repo_name
stringclasses 6
values | pr_number
int64 512
78.9k
| pr_title
stringlengths 3
144
| pr_description
stringlengths 0
30.3k
| author
stringlengths 2
21
| date_created
timestamp[ns, tz=UTC] | date_merged
timestamp[ns, tz=UTC] | previous_commit
stringlengths 40
40
| pr_commit
stringlengths 40
40
| query
stringlengths 17
30.4k
| filepath
stringlengths 9
210
| before_content
stringlengths 0
112M
| after_content
stringlengths 0
112M
| label
int64 -1
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Net.Mail/src/System/Net/Mime/ContentDisposition.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Net.Mail;
using System.Text;
namespace System.Net.Mime
{
public class ContentDisposition
{
private const string CreationDateKey = "creation-date";
private const string ModificationDateKey = "modification-date";
private const string ReadDateKey = "read-date";
private const string FileNameKey = "filename";
private const string SizeKey = "size";
private TrackingValidationObjectDictionary? _parameters;
private string _disposition;
private string _dispositionType;
private bool _isChanged;
private bool _isPersisted;
private static readonly TrackingValidationObjectDictionary.ValidateAndParseValue s_dateParser =
new TrackingValidationObjectDictionary.ValidateAndParseValue(v => new SmtpDateTime(v.ToString()!));
// this will throw a FormatException if the value supplied is not a valid SmtpDateTime
private static readonly TrackingValidationObjectDictionary.ValidateAndParseValue s_longParser =
new TrackingValidationObjectDictionary.ValidateAndParseValue((object value) =>
{
long longValue;
if (!long.TryParse(value.ToString(), NumberStyles.None, CultureInfo.InvariantCulture, out longValue))
{
throw new FormatException(SR.ContentDispositionInvalid);
}
return longValue;
});
private static readonly Dictionary<string, TrackingValidationObjectDictionary.ValidateAndParseValue> s_validators =
new Dictionary<string, TrackingValidationObjectDictionary.ValidateAndParseValue>() {
{ CreationDateKey, s_dateParser },
{ ModificationDateKey, s_dateParser },
{ ReadDateKey, s_dateParser },
{ SizeKey, s_longParser }
};
public ContentDisposition()
{
_isChanged = true;
_disposition = _dispositionType = "attachment";
// no need to parse disposition since there's nothing to parse
}
public ContentDisposition(string disposition!!)
{
_isChanged = true;
_disposition = disposition;
ParseValue();
}
internal DateTime GetDateParameter(string parameterName)
{
SmtpDateTime? dateValue = ((TrackingValidationObjectDictionary)Parameters).InternalGet(parameterName) as SmtpDateTime;
return dateValue == null ? DateTime.MinValue : dateValue.Date;
}
/// <summary>
/// Gets the disposition type of the content.
/// </summary>
public string DispositionType
{
get { return _dispositionType; }
set
{
ArgumentException.ThrowIfNullOrEmpty(value);
_isChanged = true;
_dispositionType = value;
}
}
public StringDictionary Parameters => _parameters ??= new TrackingValidationObjectDictionary(s_validators);
/// <summary>
/// Gets the value of the Filename parameter.
/// </summary>
public string? FileName
{
get { return Parameters[FileNameKey]; }
set
{
if (string.IsNullOrEmpty(value))
{
Parameters.Remove(FileNameKey);
}
else
{
Parameters[FileNameKey] = value;
}
}
}
/// <summary>
/// Gets the value of the Creation-Date parameter.
/// </summary>
public DateTime CreationDate
{
get { return GetDateParameter(CreationDateKey); }
set
{
SmtpDateTime date = new SmtpDateTime(value);
((TrackingValidationObjectDictionary)Parameters).InternalSet(CreationDateKey, date);
}
}
/// <summary>
/// Gets the value of the Modification-Date parameter.
/// </summary>
public DateTime ModificationDate
{
get { return GetDateParameter(ModificationDateKey); }
set
{
SmtpDateTime date = new SmtpDateTime(value);
((TrackingValidationObjectDictionary)Parameters).InternalSet(ModificationDateKey, date);
}
}
public bool Inline
{
get { return _dispositionType == DispositionTypeNames.Inline; }
set
{
_isChanged = true;
_dispositionType = value ? DispositionTypeNames.Inline : DispositionTypeNames.Attachment;
}
}
/// <summary>
/// Gets the value of the Read-Date parameter.
/// </summary>
public DateTime ReadDate
{
get { return GetDateParameter(ReadDateKey); }
set
{
SmtpDateTime date = new SmtpDateTime(value);
((TrackingValidationObjectDictionary)Parameters).InternalSet(ReadDateKey, date);
}
}
/// <summary>
/// Gets the value of the Size parameter (-1 if unspecified).
/// </summary>
public long Size
{
get
{
object? sizeValue = ((TrackingValidationObjectDictionary)Parameters).InternalGet(SizeKey);
return sizeValue == null ? -1 : (long)sizeValue;
}
set
{
((TrackingValidationObjectDictionary)Parameters).InternalSet(SizeKey, value);
}
}
internal void Set(string contentDisposition, HeaderCollection headers)
{
// we don't set ischanged because persistence was already handled
// via the headers.
_disposition = contentDisposition;
ParseValue();
headers.InternalSet(MailHeaderInfo.GetString(MailHeaderID.ContentDisposition)!, ToString());
_isPersisted = true;
}
internal void PersistIfNeeded(HeaderCollection headers, bool forcePersist)
{
if (IsChanged || !_isPersisted || forcePersist)
{
headers.InternalSet(MailHeaderInfo.GetString(MailHeaderID.ContentDisposition)!, ToString());
_isPersisted = true;
}
}
internal bool IsChanged => _isChanged || _parameters != null && _parameters.IsChanged;
public override string ToString()
{
if (_disposition == null || _isChanged || _parameters != null && _parameters.IsChanged)
{
_disposition = Encode(false); // Legacy wire-safe format
_isChanged = false;
_parameters!.IsChanged = false;
_isPersisted = false;
}
return _disposition;
}
internal string Encode(bool allowUnicode)
{
var builder = new StringBuilder();
builder.Append(_dispositionType); // Must not have unicode, already validated
// Validate and encode unicode where required
foreach (string key in Parameters.Keys)
{
builder.Append("; ");
EncodeToBuffer(key, builder, allowUnicode);
builder.Append('=');
EncodeToBuffer(_parameters![key]!, builder, allowUnicode);
}
return builder.ToString();
}
private static void EncodeToBuffer(string value, StringBuilder builder, bool allowUnicode)
{
Encoding? encoding = MimeBasePart.DecodeEncoding(value);
if (encoding != null) // Manually encoded elsewhere, pass through
{
builder.Append('"').Append(value).Append('"');
}
else if ((allowUnicode && !MailBnfHelper.HasCROrLF(value)) // Unicode without CL or LF's
|| MimeBasePart.IsAscii(value, false)) // Ascii
{
MailBnfHelper.GetTokenOrQuotedString(value, builder, allowUnicode);
}
else
{
// MIME Encoding required
encoding = Encoding.GetEncoding(MimeBasePart.DefaultCharSet);
builder.Append('"').Append(MimeBasePart.EncodeHeaderValue(value, encoding, MimeBasePart.ShouldUseBase64Encoding(encoding))).Append('"');
}
}
public override bool Equals([NotNullWhen(true)] object? rparam)
{
return rparam == null ?
false :
string.Equals(ToString(), rparam.ToString(), StringComparison.OrdinalIgnoreCase);
}
public override int GetHashCode() => ToString().ToLowerInvariant().GetHashCode();
[MemberNotNull(nameof(_dispositionType))]
private void ParseValue()
{
int offset = 0;
try
{
// the disposition MUST be the first parameter in the string
_dispositionType = MailBnfHelper.ReadToken(_disposition, ref offset, null);
// disposition MUST not be empty
if (string.IsNullOrEmpty(_dispositionType))
{
throw new FormatException(SR.MailHeaderFieldMalformedHeader);
}
// now we know that there are parameters so we must initialize or clear
// and parse
if (_parameters == null)
{
_parameters = new TrackingValidationObjectDictionary(s_validators);
}
else
{
_parameters.Clear();
}
while (MailBnfHelper.SkipCFWS(_disposition, ref offset))
{
// ensure that the separator charactor is present
if (_disposition[offset++] != ';')
{
throw new FormatException(SR.Format(SR.MailHeaderFieldInvalidCharacter, _disposition[offset - 1]));
}
// skip whitespace and see if there's anything left to parse or if we're done
if (!MailBnfHelper.SkipCFWS(_disposition, ref offset))
{
break;
}
string? paramAttribute = MailBnfHelper.ReadParameterAttribute(_disposition, ref offset, null);
string? paramValue;
// verify the next character after the parameter is correct
if (_disposition[offset++] != '=')
{
throw new FormatException(SR.MailHeaderFieldMalformedHeader);
}
if (!MailBnfHelper.SkipCFWS(_disposition, ref offset))
{
// parameter was at end of string and has no value
// this is not valid
throw new FormatException(SR.ContentDispositionInvalid);
}
paramValue = _disposition[offset] == '"' ?
MailBnfHelper.ReadQuotedString(_disposition, ref offset, null) :
MailBnfHelper.ReadToken(_disposition, ref offset, null);
// paramValue could potentially still be empty if it was a valid quoted string that
// contained no inner value. this is invalid
if (string.IsNullOrEmpty(paramAttribute) || string.IsNullOrEmpty(paramValue))
{
throw new FormatException(SR.ContentDispositionInvalid);
}
// if validation is needed, the parameters dictionary will have a validator registered
// for the parameter that is being set so no additional formatting checks are needed here
Parameters.Add(paramAttribute, paramValue);
}
}
catch (FormatException exception)
{
// it's possible that something in MailBNFHelper could throw so ensure that we catch it and wrap it
// so that the exception has the correct text
throw new FormatException(SR.ContentDispositionInvalid, exception);
}
_parameters.IsChanged = false;
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Net.Mail;
using System.Text;
namespace System.Net.Mime
{
public class ContentDisposition
{
private const string CreationDateKey = "creation-date";
private const string ModificationDateKey = "modification-date";
private const string ReadDateKey = "read-date";
private const string FileNameKey = "filename";
private const string SizeKey = "size";
private TrackingValidationObjectDictionary? _parameters;
private string _disposition;
private string _dispositionType;
private bool _isChanged;
private bool _isPersisted;
private static readonly TrackingValidationObjectDictionary.ValidateAndParseValue s_dateParser =
new TrackingValidationObjectDictionary.ValidateAndParseValue(v => new SmtpDateTime(v.ToString()!));
// this will throw a FormatException if the value supplied is not a valid SmtpDateTime
private static readonly TrackingValidationObjectDictionary.ValidateAndParseValue s_longParser =
new TrackingValidationObjectDictionary.ValidateAndParseValue((object value) =>
{
long longValue;
if (!long.TryParse(value.ToString(), NumberStyles.None, CultureInfo.InvariantCulture, out longValue))
{
throw new FormatException(SR.ContentDispositionInvalid);
}
return longValue;
});
private static readonly Dictionary<string, TrackingValidationObjectDictionary.ValidateAndParseValue> s_validators =
new Dictionary<string, TrackingValidationObjectDictionary.ValidateAndParseValue>() {
{ CreationDateKey, s_dateParser },
{ ModificationDateKey, s_dateParser },
{ ReadDateKey, s_dateParser },
{ SizeKey, s_longParser }
};
public ContentDisposition()
{
_isChanged = true;
_disposition = _dispositionType = "attachment";
// no need to parse disposition since there's nothing to parse
}
public ContentDisposition(string disposition!!)
{
_isChanged = true;
_disposition = disposition;
ParseValue();
}
internal DateTime GetDateParameter(string parameterName)
{
SmtpDateTime? dateValue = ((TrackingValidationObjectDictionary)Parameters).InternalGet(parameterName) as SmtpDateTime;
return dateValue == null ? DateTime.MinValue : dateValue.Date;
}
/// <summary>
/// Gets the disposition type of the content.
/// </summary>
public string DispositionType
{
get { return _dispositionType; }
set
{
ArgumentException.ThrowIfNullOrEmpty(value);
_isChanged = true;
_dispositionType = value;
}
}
public StringDictionary Parameters => _parameters ??= new TrackingValidationObjectDictionary(s_validators);
/// <summary>
/// Gets the value of the Filename parameter.
/// </summary>
public string? FileName
{
get { return Parameters[FileNameKey]; }
set
{
if (string.IsNullOrEmpty(value))
{
Parameters.Remove(FileNameKey);
}
else
{
Parameters[FileNameKey] = value;
}
}
}
/// <summary>
/// Gets the value of the Creation-Date parameter.
/// </summary>
public DateTime CreationDate
{
get { return GetDateParameter(CreationDateKey); }
set
{
SmtpDateTime date = new SmtpDateTime(value);
((TrackingValidationObjectDictionary)Parameters).InternalSet(CreationDateKey, date);
}
}
/// <summary>
/// Gets the value of the Modification-Date parameter.
/// </summary>
public DateTime ModificationDate
{
get { return GetDateParameter(ModificationDateKey); }
set
{
SmtpDateTime date = new SmtpDateTime(value);
((TrackingValidationObjectDictionary)Parameters).InternalSet(ModificationDateKey, date);
}
}
public bool Inline
{
get { return _dispositionType == DispositionTypeNames.Inline; }
set
{
_isChanged = true;
_dispositionType = value ? DispositionTypeNames.Inline : DispositionTypeNames.Attachment;
}
}
/// <summary>
/// Gets the value of the Read-Date parameter.
/// </summary>
public DateTime ReadDate
{
get { return GetDateParameter(ReadDateKey); }
set
{
SmtpDateTime date = new SmtpDateTime(value);
((TrackingValidationObjectDictionary)Parameters).InternalSet(ReadDateKey, date);
}
}
/// <summary>
/// Gets the value of the Size parameter (-1 if unspecified).
/// </summary>
public long Size
{
get
{
object? sizeValue = ((TrackingValidationObjectDictionary)Parameters).InternalGet(SizeKey);
return sizeValue == null ? -1 : (long)sizeValue;
}
set
{
((TrackingValidationObjectDictionary)Parameters).InternalSet(SizeKey, value);
}
}
internal void Set(string contentDisposition, HeaderCollection headers)
{
// we don't set ischanged because persistence was already handled
// via the headers.
_disposition = contentDisposition;
ParseValue();
headers.InternalSet(MailHeaderInfo.GetString(MailHeaderID.ContentDisposition)!, ToString());
_isPersisted = true;
}
internal void PersistIfNeeded(HeaderCollection headers, bool forcePersist)
{
if (IsChanged || !_isPersisted || forcePersist)
{
headers.InternalSet(MailHeaderInfo.GetString(MailHeaderID.ContentDisposition)!, ToString());
_isPersisted = true;
}
}
internal bool IsChanged => _isChanged || _parameters != null && _parameters.IsChanged;
public override string ToString()
{
if (_disposition == null || _isChanged || _parameters != null && _parameters.IsChanged)
{
_disposition = Encode(false); // Legacy wire-safe format
_isChanged = false;
_parameters!.IsChanged = false;
_isPersisted = false;
}
return _disposition;
}
internal string Encode(bool allowUnicode)
{
var builder = new StringBuilder();
builder.Append(_dispositionType); // Must not have unicode, already validated
// Validate and encode unicode where required
foreach (string key in Parameters.Keys)
{
builder.Append("; ");
EncodeToBuffer(key, builder, allowUnicode);
builder.Append('=');
EncodeToBuffer(_parameters![key]!, builder, allowUnicode);
}
return builder.ToString();
}
private static void EncodeToBuffer(string value, StringBuilder builder, bool allowUnicode)
{
Encoding? encoding = MimeBasePart.DecodeEncoding(value);
if (encoding != null) // Manually encoded elsewhere, pass through
{
builder.Append('"').Append(value).Append('"');
}
else if ((allowUnicode && !MailBnfHelper.HasCROrLF(value)) // Unicode without CL or LF's
|| MimeBasePart.IsAscii(value, false)) // Ascii
{
MailBnfHelper.GetTokenOrQuotedString(value, builder, allowUnicode);
}
else
{
// MIME Encoding required
encoding = Encoding.GetEncoding(MimeBasePart.DefaultCharSet);
builder.Append('"').Append(MimeBasePart.EncodeHeaderValue(value, encoding, MimeBasePart.ShouldUseBase64Encoding(encoding))).Append('"');
}
}
public override bool Equals([NotNullWhen(true)] object? rparam)
{
return rparam == null ?
false :
string.Equals(ToString(), rparam.ToString(), StringComparison.OrdinalIgnoreCase);
}
public override int GetHashCode() => ToString().ToLowerInvariant().GetHashCode();
[MemberNotNull(nameof(_dispositionType))]
private void ParseValue()
{
int offset = 0;
try
{
// the disposition MUST be the first parameter in the string
_dispositionType = MailBnfHelper.ReadToken(_disposition, ref offset, null);
// disposition MUST not be empty
if (string.IsNullOrEmpty(_dispositionType))
{
throw new FormatException(SR.MailHeaderFieldMalformedHeader);
}
// now we know that there are parameters so we must initialize or clear
// and parse
if (_parameters == null)
{
_parameters = new TrackingValidationObjectDictionary(s_validators);
}
else
{
_parameters.Clear();
}
while (MailBnfHelper.SkipCFWS(_disposition, ref offset))
{
// ensure that the separator charactor is present
if (_disposition[offset++] != ';')
{
throw new FormatException(SR.Format(SR.MailHeaderFieldInvalidCharacter, _disposition[offset - 1]));
}
// skip whitespace and see if there's anything left to parse or if we're done
if (!MailBnfHelper.SkipCFWS(_disposition, ref offset))
{
break;
}
string? paramAttribute = MailBnfHelper.ReadParameterAttribute(_disposition, ref offset, null);
string? paramValue;
// verify the next character after the parameter is correct
if (_disposition[offset++] != '=')
{
throw new FormatException(SR.MailHeaderFieldMalformedHeader);
}
if (!MailBnfHelper.SkipCFWS(_disposition, ref offset))
{
// parameter was at end of string and has no value
// this is not valid
throw new FormatException(SR.ContentDispositionInvalid);
}
paramValue = _disposition[offset] == '"' ?
MailBnfHelper.ReadQuotedString(_disposition, ref offset, null) :
MailBnfHelper.ReadToken(_disposition, ref offset, null);
// paramValue could potentially still be empty if it was a valid quoted string that
// contained no inner value. this is invalid
if (string.IsNullOrEmpty(paramAttribute) || string.IsNullOrEmpty(paramValue))
{
throw new FormatException(SR.ContentDispositionInvalid);
}
// if validation is needed, the parameters dictionary will have a validator registered
// for the parameter that is being set so no additional formatting checks are needed here
Parameters.Add(paramAttribute, paramValue);
}
}
catch (FormatException exception)
{
// it's possible that something in MailBNFHelper could throw so ensure that we catch it and wrap it
// so that the exception has the correct text
throw new FormatException(SR.ContentDispositionInvalid, exception);
}
_parameters.IsChanged = false;
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Text.RegularExpressions/src/System/Text/RegularExpressions/Regex.Replace.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics.CodeAnalysis;
namespace System.Text.RegularExpressions
{
// Callback class
public delegate string MatchEvaluator(Match match);
internal delegate bool MatchCallback<TState>(ref TState state, Match match);
public partial class Regex
{
/// <summary>
/// Replaces all occurrences of the pattern with the <paramref name="replacement"/> pattern, starting at
/// the first character in the input string.
/// </summary>
public static string Replace(string input, [StringSyntax(StringSyntaxAttribute.Regex)] string pattern, string replacement) =>
RegexCache.GetOrAdd(pattern).Replace(input, replacement);
/// <summary>
/// Replaces all occurrences of
/// the <paramref name="pattern "/>with the <paramref name="replacement "/>
/// pattern, starting at the first character in the input string.
/// </summary>
public static string Replace(string input, [StringSyntax(StringSyntaxAttribute.Regex, "options")] string pattern, string replacement, RegexOptions options) =>
RegexCache.GetOrAdd(pattern, options, s_defaultMatchTimeout).Replace(input, replacement);
public static string Replace(string input, [StringSyntax(StringSyntaxAttribute.Regex, "options")] string pattern, string replacement, RegexOptions options, TimeSpan matchTimeout) =>
RegexCache.GetOrAdd(pattern, options, matchTimeout).Replace(input, replacement);
/// <summary>
/// Replaces all occurrences of the previously defined pattern with the
/// <paramref name="replacement"/> pattern, starting at the first character in the
/// input string.
/// </summary>
public string Replace(string input, string replacement)
{
if (input is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.input);
}
return Replace(input, replacement, -1, RightToLeft ? input.Length : 0);
}
/// <summary>
/// Replaces all occurrences of the previously defined pattern with the
/// <paramref name="replacement"/> pattern, starting at the first character in the
/// input string.
/// </summary>
public string Replace(string input, string replacement, int count)
{
if (input is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.input);
}
return Replace(input, replacement, count, RightToLeft ? input.Length : 0);
}
/// <summary>
/// Replaces all occurrences of the previously defined pattern with the
/// <paramref name="replacement"/> pattern, starting at the character position
/// <paramref name="startat"/>.
/// </summary>
public string Replace(string input, string replacement, int count, int startat)
{
if (input is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.input);
}
if (replacement is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.replacement);
}
// Gets the weakly cached replacement helper or creates one if there isn't one already,
// then uses it to perform the replace.
return
RegexReplacement.GetOrCreate(RegexReplacementWeakReference, replacement, caps!, capsize, capnames!, roptions).
Replace(this, input, count, startat);
}
/// <summary>
/// Replaces all occurrences of the <paramref name="pattern"/> with the recent
/// replacement pattern.
/// </summary>
public static string Replace(string input, [StringSyntax(StringSyntaxAttribute.Regex)] string pattern, MatchEvaluator evaluator) =>
RegexCache.GetOrAdd(pattern).Replace(input, evaluator);
/// <summary>
/// Replaces all occurrences of the <paramref name="pattern"/> with the recent
/// replacement pattern, starting at the first character.
/// </summary>
public static string Replace(string input, [StringSyntax(StringSyntaxAttribute.Regex, "options")] string pattern, MatchEvaluator evaluator, RegexOptions options) =>
RegexCache.GetOrAdd(pattern, options, s_defaultMatchTimeout).Replace(input, evaluator);
public static string Replace(string input, [StringSyntax(StringSyntaxAttribute.Regex, "options")] string pattern, MatchEvaluator evaluator, RegexOptions options, TimeSpan matchTimeout) =>
RegexCache.GetOrAdd(pattern, options, matchTimeout).Replace(input, evaluator);
/// <summary>
/// Replaces all occurrences of the previously defined pattern with the recent
/// replacement pattern, starting at the first character position.
/// </summary>
public string Replace(string input, MatchEvaluator evaluator)
{
if (input is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.input);
}
return Replace(evaluator, this, input, -1, RightToLeft ? input.Length : 0);
}
/// <summary>
/// Replaces all occurrences of the previously defined pattern with the recent
/// replacement pattern, starting at the first character position.
/// </summary>
public string Replace(string input, MatchEvaluator evaluator, int count)
{
if (input is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.input);
}
return Replace(evaluator, this, input, count, RightToLeft ? input.Length : 0);
}
/// <summary>
/// Replaces all occurrences of the previously defined pattern with the recent
/// replacement pattern, starting at the character position
/// <paramref name="startat"/>.
/// </summary>
public string Replace(string input, MatchEvaluator evaluator, int count, int startat)
{
if (input is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.input);
}
return Replace(evaluator, this, input, count, startat);
}
/// <summary>
/// Replaces all occurrences of the regex in the string with the
/// replacement evaluator.
///
/// Note that the special case of no matches is handled on its own:
/// with no matches, the input string is returned unchanged.
/// The right-to-left case is split out because StringBuilder
/// doesn't handle right-to-left string building directly very well.
/// </summary>
private static string Replace(MatchEvaluator evaluator, Regex regex, string input, int count, int startat)
{
if (evaluator is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.evaluator);
}
if (count < -1)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.CountTooSmall);
}
if ((uint)startat > (uint)input.Length)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.startat, ExceptionResource.BeginIndexNotNegative);
}
if (count == 0)
{
return input;
}
var state = (segments: SegmentStringBuilder.Create(), evaluator, prevat: 0, input, count);
if (!regex.RightToLeft)
{
regex.RunAllMatchesWithCallback(input, startat, ref state, static (ref (SegmentStringBuilder segments, MatchEvaluator evaluator, int prevat, string input, int count) state, Match match) =>
{
state.segments.Add(state.input.AsMemory(state.prevat, match.Index - state.prevat));
state.prevat = match.Index + match.Length;
state.segments.Add(state.evaluator(match).AsMemory());
return --state.count != 0;
}, reuseMatchObject: false);
if (state.segments.Count == 0)
{
return input;
}
state.segments.Add(input.AsMemory(state.prevat, input.Length - state.prevat));
}
else
{
state.prevat = input.Length;
regex.RunAllMatchesWithCallback(input, startat, ref state, static (ref (SegmentStringBuilder segments, MatchEvaluator evaluator, int prevat, string input, int count) state, Match match) =>
{
state.segments.Add(state.input.AsMemory(match.Index + match.Length, state.prevat - match.Index - match.Length));
state.prevat = match.Index;
state.segments.Add(state.evaluator(match).AsMemory());
return --state.count != 0;
}, reuseMatchObject: false);
if (state.segments.Count == 0)
{
return input;
}
state.segments.Add(input.AsMemory(0, state.prevat));
state.segments.AsSpan().Reverse();
}
return state.segments.ToString();
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics.CodeAnalysis;
namespace System.Text.RegularExpressions
{
// Callback class
public delegate string MatchEvaluator(Match match);
internal delegate bool MatchCallback<TState>(ref TState state, Match match);
public partial class Regex
{
/// <summary>
/// Replaces all occurrences of the pattern with the <paramref name="replacement"/> pattern, starting at
/// the first character in the input string.
/// </summary>
public static string Replace(string input, [StringSyntax(StringSyntaxAttribute.Regex)] string pattern, string replacement) =>
RegexCache.GetOrAdd(pattern).Replace(input, replacement);
/// <summary>
/// Replaces all occurrences of
/// the <paramref name="pattern "/>with the <paramref name="replacement "/>
/// pattern, starting at the first character in the input string.
/// </summary>
public static string Replace(string input, [StringSyntax(StringSyntaxAttribute.Regex, "options")] string pattern, string replacement, RegexOptions options) =>
RegexCache.GetOrAdd(pattern, options, s_defaultMatchTimeout).Replace(input, replacement);
public static string Replace(string input, [StringSyntax(StringSyntaxAttribute.Regex, "options")] string pattern, string replacement, RegexOptions options, TimeSpan matchTimeout) =>
RegexCache.GetOrAdd(pattern, options, matchTimeout).Replace(input, replacement);
/// <summary>
/// Replaces all occurrences of the previously defined pattern with the
/// <paramref name="replacement"/> pattern, starting at the first character in the
/// input string.
/// </summary>
public string Replace(string input, string replacement)
{
if (input is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.input);
}
return Replace(input, replacement, -1, RightToLeft ? input.Length : 0);
}
/// <summary>
/// Replaces all occurrences of the previously defined pattern with the
/// <paramref name="replacement"/> pattern, starting at the first character in the
/// input string.
/// </summary>
public string Replace(string input, string replacement, int count)
{
if (input is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.input);
}
return Replace(input, replacement, count, RightToLeft ? input.Length : 0);
}
/// <summary>
/// Replaces all occurrences of the previously defined pattern with the
/// <paramref name="replacement"/> pattern, starting at the character position
/// <paramref name="startat"/>.
/// </summary>
public string Replace(string input, string replacement, int count, int startat)
{
if (input is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.input);
}
if (replacement is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.replacement);
}
// Gets the weakly cached replacement helper or creates one if there isn't one already,
// then uses it to perform the replace.
return
RegexReplacement.GetOrCreate(RegexReplacementWeakReference, replacement, caps!, capsize, capnames!, roptions).
Replace(this, input, count, startat);
}
/// <summary>
/// Replaces all occurrences of the <paramref name="pattern"/> with the recent
/// replacement pattern.
/// </summary>
public static string Replace(string input, [StringSyntax(StringSyntaxAttribute.Regex)] string pattern, MatchEvaluator evaluator) =>
RegexCache.GetOrAdd(pattern).Replace(input, evaluator);
/// <summary>
/// Replaces all occurrences of the <paramref name="pattern"/> with the recent
/// replacement pattern, starting at the first character.
/// </summary>
public static string Replace(string input, [StringSyntax(StringSyntaxAttribute.Regex, "options")] string pattern, MatchEvaluator evaluator, RegexOptions options) =>
RegexCache.GetOrAdd(pattern, options, s_defaultMatchTimeout).Replace(input, evaluator);
public static string Replace(string input, [StringSyntax(StringSyntaxAttribute.Regex, "options")] string pattern, MatchEvaluator evaluator, RegexOptions options, TimeSpan matchTimeout) =>
RegexCache.GetOrAdd(pattern, options, matchTimeout).Replace(input, evaluator);
/// <summary>
/// Replaces all occurrences of the previously defined pattern with the recent
/// replacement pattern, starting at the first character position.
/// </summary>
public string Replace(string input, MatchEvaluator evaluator)
{
if (input is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.input);
}
return Replace(evaluator, this, input, -1, RightToLeft ? input.Length : 0);
}
/// <summary>
/// Replaces all occurrences of the previously defined pattern with the recent
/// replacement pattern, starting at the first character position.
/// </summary>
public string Replace(string input, MatchEvaluator evaluator, int count)
{
if (input is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.input);
}
return Replace(evaluator, this, input, count, RightToLeft ? input.Length : 0);
}
/// <summary>
/// Replaces all occurrences of the previously defined pattern with the recent
/// replacement pattern, starting at the character position
/// <paramref name="startat"/>.
/// </summary>
public string Replace(string input, MatchEvaluator evaluator, int count, int startat)
{
if (input is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.input);
}
return Replace(evaluator, this, input, count, startat);
}
/// <summary>
/// Replaces all occurrences of the regex in the string with the
/// replacement evaluator.
///
/// Note that the special case of no matches is handled on its own:
/// with no matches, the input string is returned unchanged.
/// The right-to-left case is split out because StringBuilder
/// doesn't handle right-to-left string building directly very well.
/// </summary>
private static string Replace(MatchEvaluator evaluator, Regex regex, string input, int count, int startat)
{
if (evaluator is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.evaluator);
}
if (count < -1)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.CountTooSmall);
}
if ((uint)startat > (uint)input.Length)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.startat, ExceptionResource.BeginIndexNotNegative);
}
if (count == 0)
{
return input;
}
var state = (segments: SegmentStringBuilder.Create(), evaluator, prevat: 0, input, count);
if (!regex.RightToLeft)
{
regex.RunAllMatchesWithCallback(input, startat, ref state, static (ref (SegmentStringBuilder segments, MatchEvaluator evaluator, int prevat, string input, int count) state, Match match) =>
{
state.segments.Add(state.input.AsMemory(state.prevat, match.Index - state.prevat));
state.prevat = match.Index + match.Length;
state.segments.Add(state.evaluator(match).AsMemory());
return --state.count != 0;
}, reuseMatchObject: false);
if (state.segments.Count == 0)
{
return input;
}
state.segments.Add(input.AsMemory(state.prevat, input.Length - state.prevat));
}
else
{
state.prevat = input.Length;
regex.RunAllMatchesWithCallback(input, startat, ref state, static (ref (SegmentStringBuilder segments, MatchEvaluator evaluator, int prevat, string input, int count) state, Match match) =>
{
state.segments.Add(state.input.AsMemory(match.Index + match.Length, state.prevat - match.Index - match.Length));
state.prevat = match.Index;
state.segments.Add(state.evaluator(match).AsMemory());
return --state.count != 0;
}, reuseMatchObject: false);
if (state.segments.Count == 0)
{
return input;
}
state.segments.Add(input.AsMemory(0, state.prevat));
state.segments.AsSpan().Reverse();
}
return state.segments.ToString();
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Reflection.Metadata/src/System/Reflection/Metadata/Internal/UserStringHeap.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Reflection.Internal;
namespace System.Reflection.Metadata.Ecma335
{
internal readonly struct UserStringHeap
{
internal readonly MemoryBlock Block;
public UserStringHeap(MemoryBlock block)
{
this.Block = block;
}
internal string GetString(UserStringHandle handle)
{
int offset, size;
if (!Block.PeekHeapValueOffsetAndSize(handle.GetHeapOffset(), out offset, out size))
{
return string.Empty;
}
// Spec: Furthermore, there is an additional terminal byte (so all byte counts are odd, not even).
// The size in the blob header is the length of the string in bytes + 1.
return Block.PeekUtf16(offset, size & ~1);
}
internal UserStringHandle GetNextHandle(UserStringHandle handle)
{
int offset, size;
if (!Block.PeekHeapValueOffsetAndSize(handle.GetHeapOffset(), out offset, out size))
{
return default(UserStringHandle);
}
int nextIndex = offset + size;
if (nextIndex >= Block.Length)
{
return default(UserStringHandle);
}
return UserStringHandle.FromOffset(nextIndex);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Reflection.Internal;
namespace System.Reflection.Metadata.Ecma335
{
internal readonly struct UserStringHeap
{
internal readonly MemoryBlock Block;
public UserStringHeap(MemoryBlock block)
{
this.Block = block;
}
internal string GetString(UserStringHandle handle)
{
int offset, size;
if (!Block.PeekHeapValueOffsetAndSize(handle.GetHeapOffset(), out offset, out size))
{
return string.Empty;
}
// Spec: Furthermore, there is an additional terminal byte (so all byte counts are odd, not even).
// The size in the blob header is the length of the string in bytes + 1.
return Block.PeekUtf16(offset, size & ~1);
}
internal UserStringHandle GetNextHandle(UserStringHandle handle)
{
int offset, size;
if (!Block.PeekHeapValueOffsetAndSize(handle.GetHeapOffset(), out offset, out size))
{
return default(UserStringHandle);
}
int nextIndex = offset + size;
if (nextIndex >= Block.Length)
{
return default(UserStringHandle);
}
return UserStringHandle.FromOffset(nextIndex);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Management/tests/System/Management/ManagementObjectTests.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics;
using System.IO;
using System.Runtime.Serialization.Formatters.Binary;
using Xunit;
using Xunit.Sdk;
namespace System.Management.Tests
{
public class ManagementObjectTests
{
[ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsWindowsNanoServer))]
public void PlatformNotSupportedException_On_Nano()
{
// The underlying delegate usage can cause some cases to have the PNSE as the inner exception but there is a best effort
// to throw PNSE for such case.
Assert.Throws<PlatformNotSupportedException>(() => new ManagementObject($"Win32_LogicalDisk.DeviceID=\"{WmiTestHelper.SystemDriveId}\""));
}
[ConditionalFact(typeof(WmiTestHelper), nameof(WmiTestHelper.IsWmiSupported))]
[ActiveIssue("https://github.com/dotnet/runtime/issues/34689", TestPlatforms.Windows, TargetFrameworkMonikers.Netcoreapp, TestRuntimes.Mono)]
public void Get_Win32_LogicalDisk()
{
using (ManagementObject obj = new ManagementObject($"Win32_LogicalDisk.DeviceID=\"{WmiTestHelper.SystemDriveId}\""))
{
obj.Get();
Assert.True(obj.Properties.Count > 0);
Assert.True(ulong.Parse(obj["Size"].ToString()) > 0);
var classPath = obj.ClassPath.Path;
Assert.Equal($@"\\{Environment.MachineName}\root\cimv2:Win32_LogicalDisk", classPath);
var clone = obj.Clone();
Assert.False(ReferenceEquals(clone, obj));
((ManagementObject)clone).Dispose();
}
}
[ConditionalFact(typeof(WmiTestHelper), nameof(WmiTestHelper.IsWmiSupported))]
[OuterLoop]
public void GetRelated_For_Win32_LogicalDisk()
{
using (ManagementObject obj = new ManagementObject($"Win32_LogicalDisk.DeviceID=\"{WmiTestHelper.SystemDriveId}\""))
using (ManagementObjectCollection relatedCollection = obj.GetRelated())
{
Assert.True(relatedCollection.Count > 0);
foreach (ManagementObject related in relatedCollection)
Assert.False(string.IsNullOrWhiteSpace(related.ClassPath.NamespacePath));
}
}
[ConditionalFact(typeof(WmiTestHelper), nameof(WmiTestHelper.IsWmiSupported))]
[ActiveIssue("https://github.com/dotnet/runtime/issues/34689", TestPlatforms.Windows, TargetFrameworkMonikers.Netcoreapp, TestRuntimes.Mono)]
public void Set_Property_Win32_ComputerSystem()
{
using (ManagementObject obj = new ManagementObject($"Win32_ComputerSystem.Name=\"{Environment.MachineName}\""))
{
obj.Get();
obj.SetPropertyValue("Workgroup", "WmiTests");
}
}
[ConditionalFact(typeof(WmiTestHelper), nameof(WmiTestHelper.IsWmiSupported))]
[OuterLoop]
public void Invoke_Instance_And_Static_Method_Win32_Process()
{
// Retries are sometimes necessary as underlying API call can return
// ERROR_NOT_READY or occasionally ERROR_INVALID_BLOCK or ERROR_NOT_ENOUGH_MEMORY
RetryHelper.Execute(() =>
{
var processClass = new ManagementClass("Win32_Process");
object[] methodArgs = { "notepad.exe", null, null, 0 };
object resultObj = processClass.InvokeMethod("Create", methodArgs);
var resultCode = (uint)resultObj;
Assert.Equal(0u, resultCode);
var processId = (uint)methodArgs[3];
Assert.True(0u != processId, $"Unexpected process ID: {processId}");
using (Process targetProcess = Process.GetProcessById((int)processId))
using (var process = new ManagementObject($"Win32_Process.Handle=\"{processId}\""))
{
Assert.False(targetProcess.HasExited);
resultObj = process.InvokeMethod("Terminate", new object[] { 0 });
resultCode = (uint)resultObj;
Assert.Equal(0u, resultCode);
Assert.True(targetProcess.HasExited);
}
}, maxAttempts: 10, retryWhen: e => e is XunitException);
}
[ConditionalFact(typeof(WmiTestHelper), nameof(WmiTestHelper.IsWmiSupported))]
[OuterLoop]
public void Serialize_ManagementException()
{
try
{
new ManagementObject("Win32_LogicalDisk.DeviceID=\"InvalidDeviceId\"").Get();
}
catch (ManagementException e)
{
using var ms = new MemoryStream();
var formatter = new BinaryFormatter();
formatter.Serialize(ms, e);
ms.Position = 0;
var exception = (ManagementException)formatter.Deserialize(ms);
Assert.Equal(e.ErrorCode, exception.ErrorCode);
// On .NET Framework the `ErrorInformation` underlying field is serialized
if (PlatformDetection.IsNetFramework)
{
Assert.Equal(e.ErrorInformation, exception.ErrorInformation);
return;
}
Assert.Null(exception.ErrorInformation);
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics;
using System.IO;
using System.Runtime.Serialization.Formatters.Binary;
using Xunit;
using Xunit.Sdk;
namespace System.Management.Tests
{
public class ManagementObjectTests
{
[ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsWindowsNanoServer))]
public void PlatformNotSupportedException_On_Nano()
{
// The underlying delegate usage can cause some cases to have the PNSE as the inner exception but there is a best effort
// to throw PNSE for such case.
Assert.Throws<PlatformNotSupportedException>(() => new ManagementObject($"Win32_LogicalDisk.DeviceID=\"{WmiTestHelper.SystemDriveId}\""));
}
[ConditionalFact(typeof(WmiTestHelper), nameof(WmiTestHelper.IsWmiSupported))]
[ActiveIssue("https://github.com/dotnet/runtime/issues/34689", TestPlatforms.Windows, TargetFrameworkMonikers.Netcoreapp, TestRuntimes.Mono)]
public void Get_Win32_LogicalDisk()
{
using (ManagementObject obj = new ManagementObject($"Win32_LogicalDisk.DeviceID=\"{WmiTestHelper.SystemDriveId}\""))
{
obj.Get();
Assert.True(obj.Properties.Count > 0);
Assert.True(ulong.Parse(obj["Size"].ToString()) > 0);
var classPath = obj.ClassPath.Path;
Assert.Equal($@"\\{Environment.MachineName}\root\cimv2:Win32_LogicalDisk", classPath);
var clone = obj.Clone();
Assert.False(ReferenceEquals(clone, obj));
((ManagementObject)clone).Dispose();
}
}
[ConditionalFact(typeof(WmiTestHelper), nameof(WmiTestHelper.IsWmiSupported))]
[OuterLoop]
public void GetRelated_For_Win32_LogicalDisk()
{
using (ManagementObject obj = new ManagementObject($"Win32_LogicalDisk.DeviceID=\"{WmiTestHelper.SystemDriveId}\""))
using (ManagementObjectCollection relatedCollection = obj.GetRelated())
{
Assert.True(relatedCollection.Count > 0);
foreach (ManagementObject related in relatedCollection)
Assert.False(string.IsNullOrWhiteSpace(related.ClassPath.NamespacePath));
}
}
[ConditionalFact(typeof(WmiTestHelper), nameof(WmiTestHelper.IsWmiSupported))]
[ActiveIssue("https://github.com/dotnet/runtime/issues/34689", TestPlatforms.Windows, TargetFrameworkMonikers.Netcoreapp, TestRuntimes.Mono)]
public void Set_Property_Win32_ComputerSystem()
{
using (ManagementObject obj = new ManagementObject($"Win32_ComputerSystem.Name=\"{Environment.MachineName}\""))
{
obj.Get();
obj.SetPropertyValue("Workgroup", "WmiTests");
}
}
[ConditionalFact(typeof(WmiTestHelper), nameof(WmiTestHelper.IsWmiSupported))]
[OuterLoop]
public void Invoke_Instance_And_Static_Method_Win32_Process()
{
// Retries are sometimes necessary as underlying API call can return
// ERROR_NOT_READY or occasionally ERROR_INVALID_BLOCK or ERROR_NOT_ENOUGH_MEMORY
RetryHelper.Execute(() =>
{
var processClass = new ManagementClass("Win32_Process");
object[] methodArgs = { "notepad.exe", null, null, 0 };
object resultObj = processClass.InvokeMethod("Create", methodArgs);
var resultCode = (uint)resultObj;
Assert.Equal(0u, resultCode);
var processId = (uint)methodArgs[3];
Assert.True(0u != processId, $"Unexpected process ID: {processId}");
using (Process targetProcess = Process.GetProcessById((int)processId))
using (var process = new ManagementObject($"Win32_Process.Handle=\"{processId}\""))
{
Assert.False(targetProcess.HasExited);
resultObj = process.InvokeMethod("Terminate", new object[] { 0 });
resultCode = (uint)resultObj;
Assert.Equal(0u, resultCode);
Assert.True(targetProcess.HasExited);
}
}, maxAttempts: 10, retryWhen: e => e is XunitException);
}
[ConditionalFact(typeof(WmiTestHelper), nameof(WmiTestHelper.IsWmiSupported))]
[OuterLoop]
public void Serialize_ManagementException()
{
try
{
new ManagementObject("Win32_LogicalDisk.DeviceID=\"InvalidDeviceId\"").Get();
}
catch (ManagementException e)
{
using var ms = new MemoryStream();
var formatter = new BinaryFormatter();
formatter.Serialize(ms, e);
ms.Position = 0;
var exception = (ManagementException)formatter.Deserialize(ms);
Assert.Equal(e.ErrorCode, exception.ErrorCode);
// On .NET Framework the `ErrorInformation` underlying field is serialized
if (PlatformDetection.IsNetFramework)
{
Assert.Equal(e.ErrorInformation, exception.ErrorInformation);
return;
}
Assert.Null(exception.ErrorInformation);
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.ComponentModel.Annotations/src/System/ComponentModel/DataAnnotations/DisplayFormatAttribute.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics.CodeAnalysis;
namespace System.ComponentModel.DataAnnotations
{
/// <summary>
/// Allows overriding various display-related options for a given field. The options have the same meaning as in
/// BoundField.
/// </summary>
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, AllowMultiple = false)]
public class DisplayFormatAttribute : Attribute
{
private readonly LocalizableString _nullDisplayText = new LocalizableString(nameof(NullDisplayText));
/// <summary>
/// Default constructor
/// </summary>
public DisplayFormatAttribute()
{
ConvertEmptyStringToNull = true; // default to true to match behavior in related components
HtmlEncode = true; // default to true to match behavior in related components
}
/// <summary>
/// Gets or sets the format string
/// </summary>
public string? DataFormatString { get; set; }
/// <summary>
/// Gets or sets the string to display when the value is null, which may be a resource key string.
/// <para>
/// Consumers should use the <see cref="GetNullDisplayText" /> method to retrieve the UI display string.
/// </para>
/// </summary>
/// <remarks>
/// The property contains either the literal, non-localized string or the resource key
/// to be used in conjunction with <see cref="NullDisplayTextResourceType" /> to configure a localized
/// name for display.
/// <para>
/// The <see cref="GetNullDisplayText" /> method will return either the literal, non-localized
/// string or the localized string when <see cref="NullDisplayTextResourceType" /> has been specified.
/// </para>
/// </remarks>
/// <value>
/// The null display text is generally used as placeholder when the value is not specified.
/// A <c>null</c> or empty string is legal, and consumers must allow for that.
/// </value>
public string? NullDisplayText
{
get => _nullDisplayText.Value;
set => _nullDisplayText.Value = value;
}
/// <summary>
/// Gets or sets a value indicating whether empty strings should be set to null
/// </summary>
public bool ConvertEmptyStringToNull { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the format string should be used in edit mode
/// </summary>
public bool ApplyFormatInEditMode { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the field should be html encoded
/// </summary>
public bool HtmlEncode { get; set; }
/// <summary>
/// Gets or sets the <see cref="Type" /> that contains the resources for <see cref="NullDisplayText" />.
/// Using <see cref="NullDisplayTextResourceType" /> along with <see cref="NullDisplayText" />, allows the <see cref="GetNullDisplayText" />
/// method to return localized values.
/// </summary>
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)]
public Type? NullDisplayTextResourceType
{
get => _nullDisplayText.ResourceType;
set => _nullDisplayText.ResourceType = value;
}
/// <summary>
/// Gets the UI display string for NullDisplayText.
/// <para>
/// This can be either a literal, non-localized string provided to <see cref="NullDisplayText" /> or the
/// localized string found when <see cref="NullDisplayTextResourceType" /> has been specified and <see cref="NullDisplayText" />
/// represents a resource key within that resource type.
/// </para>
/// </summary>
/// <returns>
/// When <see cref="NullDisplayTextResourceType" /> has not been specified, the value of
/// <see cref="NullDisplayText" /> will be returned.
/// <para>
/// When <see cref="NullDisplayTextResourceType" /> has been specified and <see cref="NullDisplayText" />
/// represents a resource key within that resource type, then the localized value will be returned.
/// </para>
/// <para>
/// When <see cref="NullDisplayText" /> and <see cref="NullDisplayTextResourceType" /> have not been set, returns <c>null</c>.
/// </para>
/// </returns>
/// <exception cref="InvalidOperationException">
/// After setting both the <see cref="NullDisplayTextResourceType" /> property and the <see cref="NullDisplayText" /> property,
/// but a public static property with a name matching the <see cref="NullDisplayText" /> value couldn't be found
/// on the <see cref="NullDisplayTextResourceType" />.
/// </exception>
public string? GetNullDisplayText() => _nullDisplayText.GetLocalizableValue();
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics.CodeAnalysis;
namespace System.ComponentModel.DataAnnotations
{
/// <summary>
/// Allows overriding various display-related options for a given field. The options have the same meaning as in
/// BoundField.
/// </summary>
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, AllowMultiple = false)]
public class DisplayFormatAttribute : Attribute
{
private readonly LocalizableString _nullDisplayText = new LocalizableString(nameof(NullDisplayText));
/// <summary>
/// Default constructor
/// </summary>
public DisplayFormatAttribute()
{
ConvertEmptyStringToNull = true; // default to true to match behavior in related components
HtmlEncode = true; // default to true to match behavior in related components
}
/// <summary>
/// Gets or sets the format string
/// </summary>
public string? DataFormatString { get; set; }
/// <summary>
/// Gets or sets the string to display when the value is null, which may be a resource key string.
/// <para>
/// Consumers should use the <see cref="GetNullDisplayText" /> method to retrieve the UI display string.
/// </para>
/// </summary>
/// <remarks>
/// The property contains either the literal, non-localized string or the resource key
/// to be used in conjunction with <see cref="NullDisplayTextResourceType" /> to configure a localized
/// name for display.
/// <para>
/// The <see cref="GetNullDisplayText" /> method will return either the literal, non-localized
/// string or the localized string when <see cref="NullDisplayTextResourceType" /> has been specified.
/// </para>
/// </remarks>
/// <value>
/// The null display text is generally used as placeholder when the value is not specified.
/// A <c>null</c> or empty string is legal, and consumers must allow for that.
/// </value>
public string? NullDisplayText
{
get => _nullDisplayText.Value;
set => _nullDisplayText.Value = value;
}
/// <summary>
/// Gets or sets a value indicating whether empty strings should be set to null
/// </summary>
public bool ConvertEmptyStringToNull { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the format string should be used in edit mode
/// </summary>
public bool ApplyFormatInEditMode { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the field should be html encoded
/// </summary>
public bool HtmlEncode { get; set; }
/// <summary>
/// Gets or sets the <see cref="Type" /> that contains the resources for <see cref="NullDisplayText" />.
/// Using <see cref="NullDisplayTextResourceType" /> along with <see cref="NullDisplayText" />, allows the <see cref="GetNullDisplayText" />
/// method to return localized values.
/// </summary>
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)]
public Type? NullDisplayTextResourceType
{
get => _nullDisplayText.ResourceType;
set => _nullDisplayText.ResourceType = value;
}
/// <summary>
/// Gets the UI display string for NullDisplayText.
/// <para>
/// This can be either a literal, non-localized string provided to <see cref="NullDisplayText" /> or the
/// localized string found when <see cref="NullDisplayTextResourceType" /> has been specified and <see cref="NullDisplayText" />
/// represents a resource key within that resource type.
/// </para>
/// </summary>
/// <returns>
/// When <see cref="NullDisplayTextResourceType" /> has not been specified, the value of
/// <see cref="NullDisplayText" /> will be returned.
/// <para>
/// When <see cref="NullDisplayTextResourceType" /> has been specified and <see cref="NullDisplayText" />
/// represents a resource key within that resource type, then the localized value will be returned.
/// </para>
/// <para>
/// When <see cref="NullDisplayText" /> and <see cref="NullDisplayTextResourceType" /> have not been set, returns <c>null</c>.
/// </para>
/// </returns>
/// <exception cref="InvalidOperationException">
/// After setting both the <see cref="NullDisplayTextResourceType" /> property and the <see cref="NullDisplayText" /> property,
/// but a public static property with a name matching the <see cref="NullDisplayText" /> value couldn't be found
/// on the <see cref="NullDisplayTextResourceType" />.
/// </exception>
public string? GetNullDisplayText() => _nullDisplayText.GetLocalizableValue();
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/mono/mono/tests/verifier/valid_delegate_return_variant_iface.cs
|
using System;
using System.Collections.Generic;
interface IFoo {}
class Foo : IFoo {}
class Driver
{
static IEnumerable <Foo> Dele (bool b)
{
return null;
}
static void Main ()
{
Func<bool, IEnumerable<IFoo>> dele = Dele;
dele (true);
}
}
|
using System;
using System.Collections.Generic;
interface IFoo {}
class Foo : IFoo {}
class Driver
{
static IEnumerable <Foo> Dele (bool b)
{
return null;
}
static void Main ()
{
Func<bool, IEnumerable<IFoo>> dele = Dele;
dele (true);
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Runtime/tests/System/IO/DirectoryNotFoundExceptionTests.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.IO;
using Xunit;
using System.Tests;
namespace System.IO.Tests
{
public static class DirectoryNotFoundExceptionTests
{
[Fact]
public static void Ctor_Empty()
{
var exception = new DirectoryNotFoundException();
ExceptionHelpers.ValidateExceptionProperties(exception, hResult: HResults.COR_E_DIRECTORYNOTFOUND, validateMessage: false);
}
[Fact]
public static void Ctor_String()
{
string message = "That page was missing from the directory.";
var exception = new DirectoryNotFoundException(message);
ExceptionHelpers.ValidateExceptionProperties(exception, hResult: HResults.COR_E_DIRECTORYNOTFOUND, message: message);
}
[Fact]
public static void Ctor_String_Exception()
{
string message = "That page was missing from the directory.";
var innerException = new Exception("Inner exception");
var exception = new DirectoryNotFoundException(message, innerException);
ExceptionHelpers.ValidateExceptionProperties(exception, hResult: HResults.COR_E_DIRECTORYNOTFOUND, innerException: innerException, message: message);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.IO;
using Xunit;
using System.Tests;
namespace System.IO.Tests
{
public static class DirectoryNotFoundExceptionTests
{
[Fact]
public static void Ctor_Empty()
{
var exception = new DirectoryNotFoundException();
ExceptionHelpers.ValidateExceptionProperties(exception, hResult: HResults.COR_E_DIRECTORYNOTFOUND, validateMessage: false);
}
[Fact]
public static void Ctor_String()
{
string message = "That page was missing from the directory.";
var exception = new DirectoryNotFoundException(message);
ExceptionHelpers.ValidateExceptionProperties(exception, hResult: HResults.COR_E_DIRECTORYNOTFOUND, message: message);
}
[Fact]
public static void Ctor_String_Exception()
{
string message = "That page was missing from the directory.";
var innerException = new Exception("Inner exception");
var exception = new DirectoryNotFoundException(message, innerException);
ExceptionHelpers.ValidateExceptionProperties(exception, hResult: HResults.COR_E_DIRECTORYNOTFOUND, innerException: innerException, message: message);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Linq.Expressions/src/System/Linq/Expressions/Compiler/StackSpiller.Temps.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Expressions.Compiler
{
internal sealed partial class StackSpiller
{
/// <summary>
/// The source of temporary variables introduced during stack spilling.
/// </summary>
private readonly TempMaker _tm = new TempMaker();
/// <summary>
/// Creates a temporary variable of the specified <paramref name="type"/>.
/// </summary>
/// <param name="type">The type for the temporary variable to create.</param>
/// <returns>
/// A temporary variable of the specified <paramref name="type"/>. When the temporary
/// variable is no longer used, it should be returned by using the <see cref="Mark"/>
/// and <see cref="Free"/> mechanism provided.
/// </returns>
private ParameterExpression MakeTemp(Type type) => _tm.Temp(type);
/// <summary>
/// Gets a watermark into the stack of used temporary variables. The returned
/// watermark value can be passed to <see cref="Free"/> to free all variables
/// below the watermark value, allowing them to be reused.
/// </summary>
/// <returns>
/// A watermark value indicating the number of temporary variables currently in use.
/// </returns>
/// <remarks>
/// This is a performance optimization to lower the overall number of temporaries needed.
/// </remarks>
private int Mark() => _tm.Mark();
/// <summary>
/// Frees temporaries created since the last marking using <see cref="Mark"/>.
/// </summary>
/// <param name="mark">The watermark value up to which to recycle used temporary variables.</param>
/// <remarks>
/// This is a performance optimization to lower the overall number of temporaries needed.
/// </remarks>
private void Free(int mark) => _tm.Free(mark);
/// <summary>
/// Verifies that all temporary variables get properly returned to the free list
/// after stack spilling for a lambda expression has taken place. This is used
/// to detect misuse of the <see cref="Mark"/> and <see cref="Free"/> methods.
/// </summary>
[Conditional("DEBUG")]
private void VerifyTemps() => _tm.VerifyTemps();
/// <summary>
/// Creates and returns a temporary variable to store the result of evaluating
/// the specified <paramref name="expression"/>.
/// </summary>
/// <param name="expression">The expression to store in a temporary variable.</param>
/// <param name="save">An expression that assigns the <paramref name="expression"/> to the created temporary variable.</param>
/// <param name="byRef">Indicates whether the <paramref name="expression"/> represents a ByRef value.</param>
/// <returns>The temporary variable holding the result of evaluating <paramref name="expression"/>.</returns>
private ParameterExpression ToTemp(Expression expression, out Expression save, bool byRef)
{
Type tempType = byRef ? expression.Type.MakeByRefType() : expression.Type;
ParameterExpression temp = MakeTemp(tempType);
save = AssignBinaryExpression.Make(temp, expression, byRef);
return temp;
}
/// <summary>
/// Utility to create and recycle temporary variables.
/// </summary>
private sealed class TempMaker
{
/// <summary>
/// Index of the next temporary variable to create.
/// This value is used for naming temporary variables using an increasing index.
/// </summary>
private int _temp;
/// <summary>
/// List of free temporary variables. These can be recycled for new temporary variables.
/// </summary>
private List<ParameterExpression>? _freeTemps;
/// <summary>
/// Stack of temporary variables that are currently in use.
/// </summary>
private Stack<ParameterExpression>? _usedTemps;
/// <summary>
/// List of all temporary variables created by the stack spiller instance.
/// </summary>
internal List<ParameterExpression> Temps { get; } = new List<ParameterExpression>();
/// <summary>
/// Creates a temporary variable of the specified <paramref name="type"/>.
/// </summary>
/// <param name="type">The type for the temporary variable to create.</param>
/// <returns>
/// A temporary variable of the specified <paramref name="type"/>. When the temporary
/// variable is no longer used, it should be returned by using the <see cref="Mark"/>
/// and <see cref="Free"/> mechanism provided.
/// </returns>
internal ParameterExpression Temp(Type type)
{
ParameterExpression temp;
if (_freeTemps != null)
{
// Recycle from the free-list if possible.
for (int i = _freeTemps.Count - 1; i >= 0; i--)
{
temp = _freeTemps[i];
if (temp.Type == type)
{
_freeTemps.RemoveAt(i);
return UseTemp(temp);
}
}
}
// Not on the free-list, create a brand new one.
temp = ParameterExpression.Make(type, "$temp$" + _temp++, isByRef: false);
Temps.Add(temp);
return UseTemp(temp);
}
/// <summary>
/// Registers the temporary variable in the stack of used temporary variables.
/// The <see cref="Mark"/> and <see cref="Free"/> methods use a watermark index
/// into this stack to enable recycling temporary variables in bulk.
/// </summary>
/// <param name="temp">The temporary variable to mark as used.</param>
/// <returns>The original temporary variable.</returns>
private ParameterExpression UseTemp(ParameterExpression temp)
{
Debug.Assert(_freeTemps == null || !_freeTemps.Contains(temp));
Debug.Assert(_usedTemps == null || !_usedTemps.Contains(temp));
if (_usedTemps == null)
{
_usedTemps = new Stack<ParameterExpression>();
}
_usedTemps.Push(temp);
return temp;
}
/// <summary>
/// Puts the temporary variable on the free list which is used by the
/// <see cref="Temp"/> method to reuse temporary variables.
/// </summary>
/// <param name="temp">The temporary variable to mark as no longer in use.</param>
private void FreeTemp(ParameterExpression temp)
{
Debug.Assert(_freeTemps == null || !_freeTemps.Contains(temp));
if (_freeTemps == null)
{
_freeTemps = new List<ParameterExpression>();
}
_freeTemps.Add(temp);
}
/// <summary>
/// Gets a watermark into the stack of used temporary variables. The returned
/// watermark value can be passed to <see cref="Free"/> to free all variables
/// below the watermark value, allowing them to be reused.
/// </summary>
/// <returns>
/// A watermark value indicating the number of temporary variables currently in use.
/// </returns>
/// <remarks>
/// This is a performance optimization to lower the overall number of temporaries needed.
/// </remarks>
internal int Mark() => _usedTemps?.Count ?? 0;
/// <summary>
/// Frees temporaries created since the last marking using <see cref="Mark"/>.
/// </summary>
/// <param name="mark">The watermark value up to which to recycle used temporary variables.</param>
/// <remarks>
/// This is a performance optimization to lower the overall number of temporaries needed.
/// </remarks>
internal void Free(int mark)
{
// (_usedTemps != null) ==> (mark <= _usedTemps.Count)
Debug.Assert(_usedTemps == null || mark <= _usedTemps.Count);
// (_usedTemps == null) ==> (mark == 0)
Debug.Assert(mark == 0 || _usedTemps != null);
if (_usedTemps != null)
{
while (mark < _usedTemps.Count)
{
FreeTemp(_usedTemps.Pop());
}
}
}
/// <summary>
/// Verifies that all temporary variables get properly returned to the free list
/// after stack spilling for a lambda expression has taken place. This is used
/// to detect misuse of the <see cref="Mark"/> and <see cref="Free"/> methods.
/// </summary>
[Conditional("DEBUG")]
internal void VerifyTemps()
{
Debug.Assert(_usedTemps == null || _usedTemps.Count == 0);
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
namespace System.Linq.Expressions.Compiler
{
internal sealed partial class StackSpiller
{
/// <summary>
/// The source of temporary variables introduced during stack spilling.
/// </summary>
private readonly TempMaker _tm = new TempMaker();
/// <summary>
/// Creates a temporary variable of the specified <paramref name="type"/>.
/// </summary>
/// <param name="type">The type for the temporary variable to create.</param>
/// <returns>
/// A temporary variable of the specified <paramref name="type"/>. When the temporary
/// variable is no longer used, it should be returned by using the <see cref="Mark"/>
/// and <see cref="Free"/> mechanism provided.
/// </returns>
private ParameterExpression MakeTemp(Type type) => _tm.Temp(type);
/// <summary>
/// Gets a watermark into the stack of used temporary variables. The returned
/// watermark value can be passed to <see cref="Free"/> to free all variables
/// below the watermark value, allowing them to be reused.
/// </summary>
/// <returns>
/// A watermark value indicating the number of temporary variables currently in use.
/// </returns>
/// <remarks>
/// This is a performance optimization to lower the overall number of temporaries needed.
/// </remarks>
private int Mark() => _tm.Mark();
/// <summary>
/// Frees temporaries created since the last marking using <see cref="Mark"/>.
/// </summary>
/// <param name="mark">The watermark value up to which to recycle used temporary variables.</param>
/// <remarks>
/// This is a performance optimization to lower the overall number of temporaries needed.
/// </remarks>
private void Free(int mark) => _tm.Free(mark);
/// <summary>
/// Verifies that all temporary variables get properly returned to the free list
/// after stack spilling for a lambda expression has taken place. This is used
/// to detect misuse of the <see cref="Mark"/> and <see cref="Free"/> methods.
/// </summary>
[Conditional("DEBUG")]
private void VerifyTemps() => _tm.VerifyTemps();
/// <summary>
/// Creates and returns a temporary variable to store the result of evaluating
/// the specified <paramref name="expression"/>.
/// </summary>
/// <param name="expression">The expression to store in a temporary variable.</param>
/// <param name="save">An expression that assigns the <paramref name="expression"/> to the created temporary variable.</param>
/// <param name="byRef">Indicates whether the <paramref name="expression"/> represents a ByRef value.</param>
/// <returns>The temporary variable holding the result of evaluating <paramref name="expression"/>.</returns>
private ParameterExpression ToTemp(Expression expression, out Expression save, bool byRef)
{
Type tempType = byRef ? expression.Type.MakeByRefType() : expression.Type;
ParameterExpression temp = MakeTemp(tempType);
save = AssignBinaryExpression.Make(temp, expression, byRef);
return temp;
}
/// <summary>
/// Utility to create and recycle temporary variables.
/// </summary>
private sealed class TempMaker
{
/// <summary>
/// Index of the next temporary variable to create.
/// This value is used for naming temporary variables using an increasing index.
/// </summary>
private int _temp;
/// <summary>
/// List of free temporary variables. These can be recycled for new temporary variables.
/// </summary>
private List<ParameterExpression>? _freeTemps;
/// <summary>
/// Stack of temporary variables that are currently in use.
/// </summary>
private Stack<ParameterExpression>? _usedTemps;
/// <summary>
/// List of all temporary variables created by the stack spiller instance.
/// </summary>
internal List<ParameterExpression> Temps { get; } = new List<ParameterExpression>();
/// <summary>
/// Creates a temporary variable of the specified <paramref name="type"/>.
/// </summary>
/// <param name="type">The type for the temporary variable to create.</param>
/// <returns>
/// A temporary variable of the specified <paramref name="type"/>. When the temporary
/// variable is no longer used, it should be returned by using the <see cref="Mark"/>
/// and <see cref="Free"/> mechanism provided.
/// </returns>
internal ParameterExpression Temp(Type type)
{
ParameterExpression temp;
if (_freeTemps != null)
{
// Recycle from the free-list if possible.
for (int i = _freeTemps.Count - 1; i >= 0; i--)
{
temp = _freeTemps[i];
if (temp.Type == type)
{
_freeTemps.RemoveAt(i);
return UseTemp(temp);
}
}
}
// Not on the free-list, create a brand new one.
temp = ParameterExpression.Make(type, "$temp$" + _temp++, isByRef: false);
Temps.Add(temp);
return UseTemp(temp);
}
/// <summary>
/// Registers the temporary variable in the stack of used temporary variables.
/// The <see cref="Mark"/> and <see cref="Free"/> methods use a watermark index
/// into this stack to enable recycling temporary variables in bulk.
/// </summary>
/// <param name="temp">The temporary variable to mark as used.</param>
/// <returns>The original temporary variable.</returns>
private ParameterExpression UseTemp(ParameterExpression temp)
{
Debug.Assert(_freeTemps == null || !_freeTemps.Contains(temp));
Debug.Assert(_usedTemps == null || !_usedTemps.Contains(temp));
if (_usedTemps == null)
{
_usedTemps = new Stack<ParameterExpression>();
}
_usedTemps.Push(temp);
return temp;
}
/// <summary>
/// Puts the temporary variable on the free list which is used by the
/// <see cref="Temp"/> method to reuse temporary variables.
/// </summary>
/// <param name="temp">The temporary variable to mark as no longer in use.</param>
private void FreeTemp(ParameterExpression temp)
{
Debug.Assert(_freeTemps == null || !_freeTemps.Contains(temp));
if (_freeTemps == null)
{
_freeTemps = new List<ParameterExpression>();
}
_freeTemps.Add(temp);
}
/// <summary>
/// Gets a watermark into the stack of used temporary variables. The returned
/// watermark value can be passed to <see cref="Free"/> to free all variables
/// below the watermark value, allowing them to be reused.
/// </summary>
/// <returns>
/// A watermark value indicating the number of temporary variables currently in use.
/// </returns>
/// <remarks>
/// This is a performance optimization to lower the overall number of temporaries needed.
/// </remarks>
internal int Mark() => _usedTemps?.Count ?? 0;
/// <summary>
/// Frees temporaries created since the last marking using <see cref="Mark"/>.
/// </summary>
/// <param name="mark">The watermark value up to which to recycle used temporary variables.</param>
/// <remarks>
/// This is a performance optimization to lower the overall number of temporaries needed.
/// </remarks>
internal void Free(int mark)
{
// (_usedTemps != null) ==> (mark <= _usedTemps.Count)
Debug.Assert(_usedTemps == null || mark <= _usedTemps.Count);
// (_usedTemps == null) ==> (mark == 0)
Debug.Assert(mark == 0 || _usedTemps != null);
if (_usedTemps != null)
{
while (mark < _usedTemps.Count)
{
FreeTemp(_usedTemps.Pop());
}
}
}
/// <summary>
/// Verifies that all temporary variables get properly returned to the free list
/// after stack spilling for a lambda expression has taken place. This is used
/// to detect misuse of the <see cref="Mark"/> and <see cref="Free"/> methods.
/// </summary>
[Conditional("DEBUG")]
internal void VerifyTemps()
{
Debug.Assert(_usedTemps == null || _usedTemps.Count == 0);
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/Common/tests/System/Net/Http/HttpAgnosticLoopbackServer.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.IO;
using System.Net.Security;
using System.Net.Sockets;
using System.Security.Cryptography.X509Certificates;
using System.Threading;
using System.Threading.Tasks;
namespace System.Net.Test.Common
{
public class HttpAgnosticLoopbackServer : GenericLoopbackServer, IDisposable
{
private Socket _listenSocket;
private HttpAgnosticOptions _options;
private Uri _uri;
public override Uri Address => _uri;
public static HttpAgnosticLoopbackServer CreateServer()
{
return new HttpAgnosticLoopbackServer(new HttpAgnosticOptions());
}
public static HttpAgnosticLoopbackServer CreateServer(HttpAgnosticOptions options)
{
return new HttpAgnosticLoopbackServer(options);
}
private HttpAgnosticLoopbackServer(HttpAgnosticOptions options)
{
_options = options;
_listenSocket = new Socket(_options.Address.AddressFamily, SocketType.Stream, ProtocolType.Tcp);
_listenSocket.Bind(new IPEndPoint(_options.Address, 0));
_listenSocket.Listen(_options.ListenBacklog);
var localEndPoint = (IPEndPoint)_listenSocket.LocalEndPoint;
var host = _options.Address.AddressFamily == AddressFamily.InterNetworkV6 ? $"[{localEndPoint.Address}]" : localEndPoint.Address.ToString();
var scheme = _options.UseSsl ? "https" : "http";
_uri = new Uri($"{scheme}://{host}:{localEndPoint.Port}/");
}
public override void Dispose()
{
if (_listenSocket != null)
{
_listenSocket.Dispose();
_listenSocket = null;
}
}
public override async Task<GenericLoopbackConnection> EstablishGenericConnectionAsync()
{
Socket socket = await _listenSocket.AcceptAsync().ConfigureAwait(false);
Stream stream = new NetworkStream(socket, ownsSocket: true);
var options = new GenericLoopbackOptions()
{
Address = _options.Address,
SslProtocols = _options.SslProtocols,
UseSsl = false,
ListenBacklog = _options.ListenBacklog
};
GenericLoopbackConnection connection = null;
try
{
if (_options.UseSsl)
{
var sslStream = new SslStream(stream, false, delegate { return true; });
using (X509Certificate2 cert = Configuration.Certificates.GetServerCertificate())
{
SslServerAuthenticationOptions sslOptions = new SslServerAuthenticationOptions();
sslOptions.EnabledSslProtocols = _options.SslProtocols;
sslOptions.ApplicationProtocols = _options.SslApplicationProtocols;
sslOptions.ServerCertificate = cert;
await sslStream.AuthenticateAsServerAsync(sslOptions, CancellationToken.None).ConfigureAwait(false);
}
stream = sslStream;
if (sslStream.NegotiatedApplicationProtocol == SslApplicationProtocol.Http2)
{
// Do not pass original options so the CreateConnectionAsync won't try to do ALPN again.
return connection = await Http2LoopbackServerFactory.Singleton.CreateConnectionAsync(new SocketWrapper(socket), stream, options).ConfigureAwait(false);
}
if (sslStream.NegotiatedApplicationProtocol == SslApplicationProtocol.Http11 ||
sslStream.NegotiatedApplicationProtocol == default)
{
// Do not pass original options so the CreateConnectionAsync won't try to do ALPN again.
return connection = await Http11LoopbackServerFactory.Singleton.CreateConnectionAsync(new SocketWrapper(socket), stream, options).ConfigureAwait(false);
}
else
{
throw new Exception($"Unsupported negotiated protocol {sslStream.NegotiatedApplicationProtocol}");
}
}
if (_options.ClearTextVersion == HttpVersion.Version11)
{
return connection = await Http11LoopbackServerFactory.Singleton.CreateConnectionAsync(new SocketWrapper(socket), stream, options).ConfigureAwait(false);
}
else if (_options.ClearTextVersion == HttpVersion.Version20)
{
return connection = await Http2LoopbackServerFactory.Singleton.CreateConnectionAsync(new SocketWrapper(socket), stream, options).ConfigureAwait(false);
}
else
{
throw new Exception($"Invalid ClearTextVersion={_options.ClearTextVersion} specified");
}
}
catch
{
connection?.Dispose();
connection = null;
stream.Dispose();
throw;
}
finally
{
if (connection != null)
{
await connection.InitializeConnectionAsync().ConfigureAwait(false);
}
}
}
public override async Task<HttpRequestData> HandleRequestAsync(HttpStatusCode statusCode = HttpStatusCode.OK, IList<HttpHeaderData> headers = null, string content = "")
{
using (GenericLoopbackConnection connection = await EstablishGenericConnectionAsync().ConfigureAwait(false))
{
return await connection.HandleRequestAsync(statusCode, headers, content).ConfigureAwait(false);
}
}
public override async Task AcceptConnectionAsync(Func<GenericLoopbackConnection, Task> funcAsync)
{
using (GenericLoopbackConnection connection = await EstablishGenericConnectionAsync().ConfigureAwait(false))
{
await funcAsync(connection).ConfigureAwait(false);
}
}
public static Task CreateClientAndServerAsync(Func<Uri, Task> clientFunc, Func<GenericLoopbackServer, Task> serverFunc, int timeout = 60_000)
{
return CreateClientAndServerAsync(clientFunc, serverFunc, null, timeout);
}
public static async Task CreateClientAndServerAsync(Func<Uri, Task> clientFunc, Func<GenericLoopbackServer, Task> serverFunc, HttpAgnosticOptions httpOptions, int timeout = 60_000)
{
using (var server = HttpAgnosticLoopbackServer.CreateServer(httpOptions ?? new HttpAgnosticOptions()))
{
Task clientTask = clientFunc(server.Address);
Task serverTask = serverFunc(server);
await new Task[] { clientTask, serverTask }.WhenAllOrAnyFailed(timeout).ConfigureAwait(false);
}
}
}
public class HttpAgnosticOptions : GenericLoopbackOptions
{
// Must specify either HttpVersion.Version11 or HttpVersion.Version20.
public Version ClearTextVersion { get; set; }
public List<SslApplicationProtocol> SslApplicationProtocols { get; set; }
}
public sealed class HttpAgnosticLoopbackServerFactory : LoopbackServerFactory
{
public static readonly HttpAgnosticLoopbackServerFactory Singleton = new HttpAgnosticLoopbackServerFactory();
public static async Task CreateServerAsync(Func<HttpAgnosticLoopbackServer, Uri, Task> funcAsync, int millisecondsTimeout = 60_000)
{
using (var server = HttpAgnosticLoopbackServer.CreateServer())
{
await funcAsync(server, server.Address).WaitAsync(TimeSpan.FromMilliseconds(millisecondsTimeout));
}
}
public override GenericLoopbackServer CreateServer(GenericLoopbackOptions options = null)
{
return HttpAgnosticLoopbackServer.CreateServer(CreateOptions(options));
}
public override Task<GenericLoopbackConnection> CreateConnectionAsync(SocketWrapper socket, Stream stream, GenericLoopbackOptions options = null)
{
// This method is always unacceptable to call for an agnostic server.
throw new NotImplementedException("HttpAgnosticLoopbackServerFactory cannot create connection.");
}
private static HttpAgnosticOptions CreateOptions(GenericLoopbackOptions options)
{
HttpAgnosticOptions httpOptions = new HttpAgnosticOptions();
if (options != null)
{
httpOptions.Address = options.Address;
httpOptions.UseSsl = options.UseSsl;
httpOptions.SslProtocols = options.SslProtocols;
httpOptions.ListenBacklog = options.ListenBacklog;
}
return httpOptions;
}
public override async Task CreateServerAsync(Func<GenericLoopbackServer, Uri, Task> funcAsync, int millisecondsTimeout = 60_000, GenericLoopbackOptions options = null)
{
using (var server = CreateServer(options))
{
await funcAsync(server, server.Address).WaitAsync(TimeSpan.FromMilliseconds(millisecondsTimeout));
}
}
public override Version Version => HttpVersion.Unknown;
}
internal class ReturnBufferStream : Stream
{
private Stream _stream;
private Memory<byte> _buffer;
public ReturnBufferStream(Stream stream, Memory<byte> buffer)
{
_stream = stream;
_buffer = buffer;
}
public override int Read(byte[] buffer, int offset, int count)
{
if (_buffer.IsEmpty)
{
return _stream.Read(buffer, offset, count);
}
var fromBuffer = Math.Min(_buffer.Length, count);
_buffer.Slice(0, fromBuffer).CopyTo(new Memory<byte>(buffer, offset, count));
_buffer = _buffer.Slice(fromBuffer);
offset += fromBuffer;
count -= fromBuffer;
if (count > 0)
{
return _stream.Read(buffer, offset, count) + fromBuffer;
}
return fromBuffer;
}
public override bool CanRead => _stream.CanRead;
public override bool CanSeek => _stream.CanSeek;
public override bool CanWrite => _stream.CanWrite;
public override long Length => _stream.Length;
public override long Position { get => _stream.Position; set => _stream.Position = value; }
public override void Flush() => _stream.Flush();
public override long Seek(long offset, SeekOrigin origin) => _stream.Seek(offset, origin);
public override void SetLength(long value) => _stream.SetLength(value);
public override void Write(byte[] buffer, int offset, int count) => _stream.Write(buffer, offset, count);
protected override void Dispose(bool disposing)
{
_stream.Dispose();
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.IO;
using System.Net.Security;
using System.Net.Sockets;
using System.Security.Cryptography.X509Certificates;
using System.Threading;
using System.Threading.Tasks;
namespace System.Net.Test.Common
{
public class HttpAgnosticLoopbackServer : GenericLoopbackServer, IDisposable
{
private Socket _listenSocket;
private HttpAgnosticOptions _options;
private Uri _uri;
public override Uri Address => _uri;
public static HttpAgnosticLoopbackServer CreateServer()
{
return new HttpAgnosticLoopbackServer(new HttpAgnosticOptions());
}
public static HttpAgnosticLoopbackServer CreateServer(HttpAgnosticOptions options)
{
return new HttpAgnosticLoopbackServer(options);
}
private HttpAgnosticLoopbackServer(HttpAgnosticOptions options)
{
_options = options;
_listenSocket = new Socket(_options.Address.AddressFamily, SocketType.Stream, ProtocolType.Tcp);
_listenSocket.Bind(new IPEndPoint(_options.Address, 0));
_listenSocket.Listen(_options.ListenBacklog);
var localEndPoint = (IPEndPoint)_listenSocket.LocalEndPoint;
var host = _options.Address.AddressFamily == AddressFamily.InterNetworkV6 ? $"[{localEndPoint.Address}]" : localEndPoint.Address.ToString();
var scheme = _options.UseSsl ? "https" : "http";
_uri = new Uri($"{scheme}://{host}:{localEndPoint.Port}/");
}
public override void Dispose()
{
if (_listenSocket != null)
{
_listenSocket.Dispose();
_listenSocket = null;
}
}
public override async Task<GenericLoopbackConnection> EstablishGenericConnectionAsync()
{
Socket socket = await _listenSocket.AcceptAsync().ConfigureAwait(false);
Stream stream = new NetworkStream(socket, ownsSocket: true);
var options = new GenericLoopbackOptions()
{
Address = _options.Address,
SslProtocols = _options.SslProtocols,
UseSsl = false,
ListenBacklog = _options.ListenBacklog
};
GenericLoopbackConnection connection = null;
try
{
if (_options.UseSsl)
{
var sslStream = new SslStream(stream, false, delegate { return true; });
using (X509Certificate2 cert = Configuration.Certificates.GetServerCertificate())
{
SslServerAuthenticationOptions sslOptions = new SslServerAuthenticationOptions();
sslOptions.EnabledSslProtocols = _options.SslProtocols;
sslOptions.ApplicationProtocols = _options.SslApplicationProtocols;
sslOptions.ServerCertificate = cert;
await sslStream.AuthenticateAsServerAsync(sslOptions, CancellationToken.None).ConfigureAwait(false);
}
stream = sslStream;
if (sslStream.NegotiatedApplicationProtocol == SslApplicationProtocol.Http2)
{
// Do not pass original options so the CreateConnectionAsync won't try to do ALPN again.
return connection = await Http2LoopbackServerFactory.Singleton.CreateConnectionAsync(new SocketWrapper(socket), stream, options).ConfigureAwait(false);
}
if (sslStream.NegotiatedApplicationProtocol == SslApplicationProtocol.Http11 ||
sslStream.NegotiatedApplicationProtocol == default)
{
// Do not pass original options so the CreateConnectionAsync won't try to do ALPN again.
return connection = await Http11LoopbackServerFactory.Singleton.CreateConnectionAsync(new SocketWrapper(socket), stream, options).ConfigureAwait(false);
}
else
{
throw new Exception($"Unsupported negotiated protocol {sslStream.NegotiatedApplicationProtocol}");
}
}
if (_options.ClearTextVersion == HttpVersion.Version11)
{
return connection = await Http11LoopbackServerFactory.Singleton.CreateConnectionAsync(new SocketWrapper(socket), stream, options).ConfigureAwait(false);
}
else if (_options.ClearTextVersion == HttpVersion.Version20)
{
return connection = await Http2LoopbackServerFactory.Singleton.CreateConnectionAsync(new SocketWrapper(socket), stream, options).ConfigureAwait(false);
}
else
{
throw new Exception($"Invalid ClearTextVersion={_options.ClearTextVersion} specified");
}
}
catch
{
connection?.Dispose();
connection = null;
stream.Dispose();
throw;
}
finally
{
if (connection != null)
{
await connection.InitializeConnectionAsync().ConfigureAwait(false);
}
}
}
public override async Task<HttpRequestData> HandleRequestAsync(HttpStatusCode statusCode = HttpStatusCode.OK, IList<HttpHeaderData> headers = null, string content = "")
{
using (GenericLoopbackConnection connection = await EstablishGenericConnectionAsync().ConfigureAwait(false))
{
return await connection.HandleRequestAsync(statusCode, headers, content).ConfigureAwait(false);
}
}
public override async Task AcceptConnectionAsync(Func<GenericLoopbackConnection, Task> funcAsync)
{
using (GenericLoopbackConnection connection = await EstablishGenericConnectionAsync().ConfigureAwait(false))
{
await funcAsync(connection).ConfigureAwait(false);
}
}
public static Task CreateClientAndServerAsync(Func<Uri, Task> clientFunc, Func<GenericLoopbackServer, Task> serverFunc, int timeout = 60_000)
{
return CreateClientAndServerAsync(clientFunc, serverFunc, null, timeout);
}
public static async Task CreateClientAndServerAsync(Func<Uri, Task> clientFunc, Func<GenericLoopbackServer, Task> serverFunc, HttpAgnosticOptions httpOptions, int timeout = 60_000)
{
using (var server = HttpAgnosticLoopbackServer.CreateServer(httpOptions ?? new HttpAgnosticOptions()))
{
Task clientTask = clientFunc(server.Address);
Task serverTask = serverFunc(server);
await new Task[] { clientTask, serverTask }.WhenAllOrAnyFailed(timeout).ConfigureAwait(false);
}
}
}
public class HttpAgnosticOptions : GenericLoopbackOptions
{
// Must specify either HttpVersion.Version11 or HttpVersion.Version20.
public Version ClearTextVersion { get; set; }
public List<SslApplicationProtocol> SslApplicationProtocols { get; set; }
}
public sealed class HttpAgnosticLoopbackServerFactory : LoopbackServerFactory
{
public static readonly HttpAgnosticLoopbackServerFactory Singleton = new HttpAgnosticLoopbackServerFactory();
public static async Task CreateServerAsync(Func<HttpAgnosticLoopbackServer, Uri, Task> funcAsync, int millisecondsTimeout = 60_000)
{
using (var server = HttpAgnosticLoopbackServer.CreateServer())
{
await funcAsync(server, server.Address).WaitAsync(TimeSpan.FromMilliseconds(millisecondsTimeout));
}
}
public override GenericLoopbackServer CreateServer(GenericLoopbackOptions options = null)
{
return HttpAgnosticLoopbackServer.CreateServer(CreateOptions(options));
}
public override Task<GenericLoopbackConnection> CreateConnectionAsync(SocketWrapper socket, Stream stream, GenericLoopbackOptions options = null)
{
// This method is always unacceptable to call for an agnostic server.
throw new NotImplementedException("HttpAgnosticLoopbackServerFactory cannot create connection.");
}
private static HttpAgnosticOptions CreateOptions(GenericLoopbackOptions options)
{
HttpAgnosticOptions httpOptions = new HttpAgnosticOptions();
if (options != null)
{
httpOptions.Address = options.Address;
httpOptions.UseSsl = options.UseSsl;
httpOptions.SslProtocols = options.SslProtocols;
httpOptions.ListenBacklog = options.ListenBacklog;
}
return httpOptions;
}
public override async Task CreateServerAsync(Func<GenericLoopbackServer, Uri, Task> funcAsync, int millisecondsTimeout = 60_000, GenericLoopbackOptions options = null)
{
using (var server = CreateServer(options))
{
await funcAsync(server, server.Address).WaitAsync(TimeSpan.FromMilliseconds(millisecondsTimeout));
}
}
public override Version Version => HttpVersion.Unknown;
}
internal class ReturnBufferStream : Stream
{
private Stream _stream;
private Memory<byte> _buffer;
public ReturnBufferStream(Stream stream, Memory<byte> buffer)
{
_stream = stream;
_buffer = buffer;
}
public override int Read(byte[] buffer, int offset, int count)
{
if (_buffer.IsEmpty)
{
return _stream.Read(buffer, offset, count);
}
var fromBuffer = Math.Min(_buffer.Length, count);
_buffer.Slice(0, fromBuffer).CopyTo(new Memory<byte>(buffer, offset, count));
_buffer = _buffer.Slice(fromBuffer);
offset += fromBuffer;
count -= fromBuffer;
if (count > 0)
{
return _stream.Read(buffer, offset, count) + fromBuffer;
}
return fromBuffer;
}
public override bool CanRead => _stream.CanRead;
public override bool CanSeek => _stream.CanSeek;
public override bool CanWrite => _stream.CanWrite;
public override long Length => _stream.Length;
public override long Position { get => _stream.Position; set => _stream.Position = value; }
public override void Flush() => _stream.Flush();
public override long Seek(long offset, SeekOrigin origin) => _stream.Seek(offset, origin);
public override void SetLength(long value) => _stream.SetLength(value);
public override void Write(byte[] buffer, int offset, int count) => _stream.Write(buffer, offset, count);
protected override void Dispose(bool disposing)
{
_stream.Dispose();
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/Microsoft.Extensions.DependencyInjection/tests/DI.Tests/Fakes/CircularReferences/DirectCircularDependencyB.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace Microsoft.Extensions.DependencyInjection.Tests.Fakes
{
public class DirectCircularDependencyB
{
public DirectCircularDependencyB(DirectCircularDependencyA a)
{
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace Microsoft.Extensions.DependencyInjection.Tests.Fakes
{
public class DirectCircularDependencyB
{
public DirectCircularDependencyB(DirectCircularDependencyA a)
{
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/General/Vector128_1/op_OnesComplement.UInt16.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace JIT.HardwareIntrinsics.General
{
public static partial class Program
{
private static void op_OnesComplementUInt16()
{
var test = new VectorUnaryOpTest__op_OnesComplementUInt16();
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class VectorUnaryOpTest__op_OnesComplementUInt16
{
private struct DataTable
{
private byte[] inArray1;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle outHandle;
private ulong alignment;
public DataTable(UInt16[] inArray1, UInt16[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<UInt16>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt16>();
if ((alignment != 32 && alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<UInt16, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<UInt16> _fld1;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
return testStruct;
}
public void RunStructFldScenario(VectorUnaryOpTest__op_OnesComplementUInt16 testClass)
{
var result = ~_fld1;
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<UInt16>>() / sizeof(UInt16);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<UInt16>>() / sizeof(UInt16);
private static UInt16[] _data1 = new UInt16[Op1ElementCount];
private static Vector128<UInt16> _clsVar1;
private Vector128<UInt16> _fld1;
private DataTable _dataTable;
static VectorUnaryOpTest__op_OnesComplementUInt16()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _clsVar1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
}
public VectorUnaryOpTest__op_OnesComplementUInt16()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
_dataTable = new DataTable(_data1, new UInt16[RetElementCount], LargestVectorSize);
}
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = ~Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Vector128<UInt16>).GetMethod("op_OnesComplement", new Type[] { typeof(Vector128<UInt16>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = ~_clsVar1;
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr);
var result = ~op1;
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new VectorUnaryOpTest__op_OnesComplementUInt16();
var result = ~test._fld1;
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = ~_fld1;
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = ~test._fld1;
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
private void ValidateResult(Vector128<UInt16> op1, void* result, [CallerMemberName] string method = "")
{
UInt16[] inArray1 = new UInt16[Op1ElementCount];
UInt16[] outArray = new UInt16[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), op1);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
ValidateResult(inArray1, outArray, method);
}
private void ValidateResult(void* op1, void* result, [CallerMemberName] string method = "")
{
UInt16[] inArray1 = new UInt16[Op1ElementCount];
UInt16[] outArray = new UInt16[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
ValidateResult(inArray1, outArray, method);
}
private void ValidateResult(UInt16[] firstOp, UInt16[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (result[0] != (ushort)(~firstOp[0]))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (result[i] != (ushort)(~firstOp[i]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Vector128)}.op_OnesComplement<UInt16>(Vector128<UInt16>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace JIT.HardwareIntrinsics.General
{
public static partial class Program
{
private static void op_OnesComplementUInt16()
{
var test = new VectorUnaryOpTest__op_OnesComplementUInt16();
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class VectorUnaryOpTest__op_OnesComplementUInt16
{
private struct DataTable
{
private byte[] inArray1;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle outHandle;
private ulong alignment;
public DataTable(UInt16[] inArray1, UInt16[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<UInt16>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt16>();
if ((alignment != 32 && alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<UInt16, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<UInt16> _fld1;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
return testStruct;
}
public void RunStructFldScenario(VectorUnaryOpTest__op_OnesComplementUInt16 testClass)
{
var result = ~_fld1;
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<UInt16>>() / sizeof(UInt16);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<UInt16>>() / sizeof(UInt16);
private static UInt16[] _data1 = new UInt16[Op1ElementCount];
private static Vector128<UInt16> _clsVar1;
private Vector128<UInt16> _fld1;
private DataTable _dataTable;
static VectorUnaryOpTest__op_OnesComplementUInt16()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _clsVar1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
}
public VectorUnaryOpTest__op_OnesComplementUInt16()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
_dataTable = new DataTable(_data1, new UInt16[RetElementCount], LargestVectorSize);
}
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = ~Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Vector128<UInt16>).GetMethod("op_OnesComplement", new Type[] { typeof(Vector128<UInt16>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = ~_clsVar1;
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr);
var result = ~op1;
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new VectorUnaryOpTest__op_OnesComplementUInt16();
var result = ~test._fld1;
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = ~_fld1;
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = ~test._fld1;
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
private void ValidateResult(Vector128<UInt16> op1, void* result, [CallerMemberName] string method = "")
{
UInt16[] inArray1 = new UInt16[Op1ElementCount];
UInt16[] outArray = new UInt16[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), op1);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
ValidateResult(inArray1, outArray, method);
}
private void ValidateResult(void* op1, void* result, [CallerMemberName] string method = "")
{
UInt16[] inArray1 = new UInt16[Op1ElementCount];
UInt16[] outArray = new UInt16[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
ValidateResult(inArray1, outArray, method);
}
private void ValidateResult(UInt16[] firstOp, UInt16[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (result[0] != (ushort)(~firstOp[0]))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (result[i] != (ushort)(~firstOp[i]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Vector128)}.op_OnesComplement<UInt16>(Vector128<UInt16>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/Arm/AdvSimd/CompareGreaterThan.Vector128.UInt32.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void CompareGreaterThan_Vector128_UInt32()
{
var test = new SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(UInt32[] inArray1, UInt32[] inArray2, UInt32[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<UInt32>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<UInt32>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt32>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<UInt32, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<UInt32, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<UInt32> _fld1;
public Vector128<UInt32> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32 testClass)
{
var result = AdvSimd.CompareGreaterThan(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32 testClass)
{
fixed (Vector128<UInt32>* pFld1 = &_fld1)
fixed (Vector128<UInt32>* pFld2 = &_fld2)
{
var result = AdvSimd.CompareGreaterThan(
AdvSimd.LoadVector128((UInt32*)(pFld1)),
AdvSimd.LoadVector128((UInt32*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static UInt32[] _data1 = new UInt32[Op1ElementCount];
private static UInt32[] _data2 = new UInt32[Op2ElementCount];
private static Vector128<UInt32> _clsVar1;
private static Vector128<UInt32> _clsVar2;
private Vector128<UInt32> _fld1;
private Vector128<UInt32> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _clsVar1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _clsVar2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
}
public SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _fld1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _fld2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
_dataTable = new DataTable(_data1, _data2, new UInt32[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.CompareGreaterThan(
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.CompareGreaterThan(
AdvSimd.LoadVector128((UInt32*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector128((UInt32*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.CompareGreaterThan), new Type[] { typeof(Vector128<UInt32>), typeof(Vector128<UInt32>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.CompareGreaterThan), new Type[] { typeof(Vector128<UInt32>), typeof(Vector128<UInt32>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector128((UInt32*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector128((UInt32*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.CompareGreaterThan(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<UInt32>* pClsVar1 = &_clsVar1)
fixed (Vector128<UInt32>* pClsVar2 = &_clsVar2)
{
var result = AdvSimd.CompareGreaterThan(
AdvSimd.LoadVector128((UInt32*)(pClsVar1)),
AdvSimd.LoadVector128((UInt32*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr);
var result = AdvSimd.CompareGreaterThan(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector128((UInt32*)(_dataTable.inArray1Ptr));
var op2 = AdvSimd.LoadVector128((UInt32*)(_dataTable.inArray2Ptr));
var result = AdvSimd.CompareGreaterThan(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32();
var result = AdvSimd.CompareGreaterThan(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32();
fixed (Vector128<UInt32>* pFld1 = &test._fld1)
fixed (Vector128<UInt32>* pFld2 = &test._fld2)
{
var result = AdvSimd.CompareGreaterThan(
AdvSimd.LoadVector128((UInt32*)(pFld1)),
AdvSimd.LoadVector128((UInt32*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.CompareGreaterThan(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<UInt32>* pFld1 = &_fld1)
fixed (Vector128<UInt32>* pFld2 = &_fld2)
{
var result = AdvSimd.CompareGreaterThan(
AdvSimd.LoadVector128((UInt32*)(pFld1)),
AdvSimd.LoadVector128((UInt32*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.CompareGreaterThan(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.CompareGreaterThan(
AdvSimd.LoadVector128((UInt32*)(&test._fld1)),
AdvSimd.LoadVector128((UInt32*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<UInt32> op1, Vector128<UInt32> op2, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray1 = new UInt32[Op1ElementCount];
UInt32[] inArray2 = new UInt32[Op2ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray1 = new UInt32[Op1ElementCount];
UInt32[] inArray2 = new UInt32[Op2ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(UInt32[] left, UInt32[] right, UInt32[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (Helpers.CompareGreaterThan(left[i], right[i]) != result[i])
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd)}.{nameof(AdvSimd.CompareGreaterThan)}<UInt32>(Vector128<UInt32>, Vector128<UInt32>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void CompareGreaterThan_Vector128_UInt32()
{
var test = new SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(UInt32[] inArray1, UInt32[] inArray2, UInt32[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<UInt32>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<UInt32>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt32>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<UInt32, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<UInt32, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<UInt32> _fld1;
public Vector128<UInt32> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32 testClass)
{
var result = AdvSimd.CompareGreaterThan(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32 testClass)
{
fixed (Vector128<UInt32>* pFld1 = &_fld1)
fixed (Vector128<UInt32>* pFld2 = &_fld2)
{
var result = AdvSimd.CompareGreaterThan(
AdvSimd.LoadVector128((UInt32*)(pFld1)),
AdvSimd.LoadVector128((UInt32*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static UInt32[] _data1 = new UInt32[Op1ElementCount];
private static UInt32[] _data2 = new UInt32[Op2ElementCount];
private static Vector128<UInt32> _clsVar1;
private static Vector128<UInt32> _clsVar2;
private Vector128<UInt32> _fld1;
private Vector128<UInt32> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _clsVar1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _clsVar2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
}
public SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _fld1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _fld2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
_dataTable = new DataTable(_data1, _data2, new UInt32[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.CompareGreaterThan(
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.CompareGreaterThan(
AdvSimd.LoadVector128((UInt32*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector128((UInt32*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.CompareGreaterThan), new Type[] { typeof(Vector128<UInt32>), typeof(Vector128<UInt32>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.CompareGreaterThan), new Type[] { typeof(Vector128<UInt32>), typeof(Vector128<UInt32>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector128((UInt32*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector128((UInt32*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.CompareGreaterThan(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<UInt32>* pClsVar1 = &_clsVar1)
fixed (Vector128<UInt32>* pClsVar2 = &_clsVar2)
{
var result = AdvSimd.CompareGreaterThan(
AdvSimd.LoadVector128((UInt32*)(pClsVar1)),
AdvSimd.LoadVector128((UInt32*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr);
var result = AdvSimd.CompareGreaterThan(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector128((UInt32*)(_dataTable.inArray1Ptr));
var op2 = AdvSimd.LoadVector128((UInt32*)(_dataTable.inArray2Ptr));
var result = AdvSimd.CompareGreaterThan(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32();
var result = AdvSimd.CompareGreaterThan(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__CompareGreaterThan_Vector128_UInt32();
fixed (Vector128<UInt32>* pFld1 = &test._fld1)
fixed (Vector128<UInt32>* pFld2 = &test._fld2)
{
var result = AdvSimd.CompareGreaterThan(
AdvSimd.LoadVector128((UInt32*)(pFld1)),
AdvSimd.LoadVector128((UInt32*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.CompareGreaterThan(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<UInt32>* pFld1 = &_fld1)
fixed (Vector128<UInt32>* pFld2 = &_fld2)
{
var result = AdvSimd.CompareGreaterThan(
AdvSimd.LoadVector128((UInt32*)(pFld1)),
AdvSimd.LoadVector128((UInt32*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.CompareGreaterThan(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.CompareGreaterThan(
AdvSimd.LoadVector128((UInt32*)(&test._fld1)),
AdvSimd.LoadVector128((UInt32*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<UInt32> op1, Vector128<UInt32> op2, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray1 = new UInt32[Op1ElementCount];
UInt32[] inArray2 = new UInt32[Op2ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray1 = new UInt32[Op1ElementCount];
UInt32[] inArray2 = new UInt32[Op2ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(UInt32[] left, UInt32[] right, UInt32[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (Helpers.CompareGreaterThan(left[i], right[i]) != result[i])
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd)}.{nameof(AdvSimd.CompareGreaterThan)}<UInt32>(Vector128<UInt32>, Vector128<UInt32>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Memory/tests/ReadOnlyMemory/CopyTo.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace System.MemoryTests
{
public static partial class ReadOnlyMemoryTests
{
[Fact]
public static void TryCopyTo()
{
int[] src = { 1, 2, 3 };
int[] dst = { 99, 100, 101 };
ReadOnlyMemory<int> srcMemory = src;
bool success = srcMemory.TryCopyTo(dst);
Assert.True(success);
Assert.Equal<int>(src, dst);
}
[Fact]
public static void TryCopyToSingle()
{
int[] src = { 1 };
int[] dst = { 99 };
ReadOnlyMemory<int> srcMemory = src;
bool success = srcMemory.TryCopyTo(dst);
Assert.True(success);
Assert.Equal<int>(src, dst);
}
[Fact]
public static void TryCopyToArraySegmentImplicit()
{
int[] src = { 1, 2, 3 };
int[] dst = { 5, 99, 100, 101, 10 };
var segment = new ArraySegment<int>(dst, 1, 3);
ReadOnlyMemory<int> srcMemory = src;
bool success = srcMemory.TryCopyTo(segment);
Assert.True(success);
Assert.Equal<int>(src, segment);
}
[Fact]
public static void TryCopyToEmpty()
{
int[] src = { };
int[] dst = { 99, 100, 101 };
ReadOnlyMemory<int> srcMemory = src;
bool success = srcMemory.TryCopyTo(dst);
Assert.True(success);
int[] expected = { 99, 100, 101 };
Assert.Equal<int>(expected, dst);
}
[Fact]
public static void TryCopyToLonger()
{
int[] src = { 1, 2, 3 };
int[] dst = { 99, 100, 101, 102 };
ReadOnlyMemory<int> srcMemory = src;
bool success = srcMemory.TryCopyTo(dst);
Assert.True(success);
int[] expected = { 1, 2, 3, 102 };
Assert.Equal<int>(expected, dst);
}
[Fact]
public static void TryCopyToShorter()
{
int[] src = { 1, 2, 3 };
int[] dst = { 99, 100 };
ReadOnlyMemory<int> srcMemory = src;
bool success = srcMemory.TryCopyTo(dst);
Assert.False(success);
int[] expected = { 99, 100 };
Assert.Equal<int>(expected, dst); // TryCopyTo() checks for sufficient space before doing any copying.
}
[Fact]
public static void CopyToShorter()
{
int[] src = { 1, 2, 3 };
int[] dst = { 99, 100 };
ReadOnlyMemory<int> srcMemory = src;
Assert.Throws<ArgumentException>(() => srcMemory.CopyTo(dst));
int[] expected = { 99, 100 };
Assert.Equal<int>(expected, dst); // CopyTo() checks for sufficient space before doing any copying.
}
[Fact]
public static void Overlapping1()
{
int[] a = { 90, 91, 92, 93, 94, 95, 96, 97 };
var src = new ReadOnlyMemory<int>(a, 1, 6);
var dst = new Memory<int>(a, 2, 6);
src.CopyTo(dst);
int[] expected = { 90, 91, 91, 92, 93, 94, 95, 96 };
Assert.Equal<int>(expected, a);
}
[Fact]
public static void Overlapping2()
{
int[] a = { 90, 91, 92, 93, 94, 95, 96, 97 };
var src = new ReadOnlyMemory<int>(a, 2, 6);
var dst = new Memory<int>(a, 1, 6);
src.CopyTo(dst);
int[] expected = { 90, 92, 93, 94, 95, 96, 97, 97 };
Assert.Equal<int>(expected, a);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace System.MemoryTests
{
public static partial class ReadOnlyMemoryTests
{
[Fact]
public static void TryCopyTo()
{
int[] src = { 1, 2, 3 };
int[] dst = { 99, 100, 101 };
ReadOnlyMemory<int> srcMemory = src;
bool success = srcMemory.TryCopyTo(dst);
Assert.True(success);
Assert.Equal<int>(src, dst);
}
[Fact]
public static void TryCopyToSingle()
{
int[] src = { 1 };
int[] dst = { 99 };
ReadOnlyMemory<int> srcMemory = src;
bool success = srcMemory.TryCopyTo(dst);
Assert.True(success);
Assert.Equal<int>(src, dst);
}
[Fact]
public static void TryCopyToArraySegmentImplicit()
{
int[] src = { 1, 2, 3 };
int[] dst = { 5, 99, 100, 101, 10 };
var segment = new ArraySegment<int>(dst, 1, 3);
ReadOnlyMemory<int> srcMemory = src;
bool success = srcMemory.TryCopyTo(segment);
Assert.True(success);
Assert.Equal<int>(src, segment);
}
[Fact]
public static void TryCopyToEmpty()
{
int[] src = { };
int[] dst = { 99, 100, 101 };
ReadOnlyMemory<int> srcMemory = src;
bool success = srcMemory.TryCopyTo(dst);
Assert.True(success);
int[] expected = { 99, 100, 101 };
Assert.Equal<int>(expected, dst);
}
[Fact]
public static void TryCopyToLonger()
{
int[] src = { 1, 2, 3 };
int[] dst = { 99, 100, 101, 102 };
ReadOnlyMemory<int> srcMemory = src;
bool success = srcMemory.TryCopyTo(dst);
Assert.True(success);
int[] expected = { 1, 2, 3, 102 };
Assert.Equal<int>(expected, dst);
}
[Fact]
public static void TryCopyToShorter()
{
int[] src = { 1, 2, 3 };
int[] dst = { 99, 100 };
ReadOnlyMemory<int> srcMemory = src;
bool success = srcMemory.TryCopyTo(dst);
Assert.False(success);
int[] expected = { 99, 100 };
Assert.Equal<int>(expected, dst); // TryCopyTo() checks for sufficient space before doing any copying.
}
[Fact]
public static void CopyToShorter()
{
int[] src = { 1, 2, 3 };
int[] dst = { 99, 100 };
ReadOnlyMemory<int> srcMemory = src;
Assert.Throws<ArgumentException>(() => srcMemory.CopyTo(dst));
int[] expected = { 99, 100 };
Assert.Equal<int>(expected, dst); // CopyTo() checks for sufficient space before doing any copying.
}
[Fact]
public static void Overlapping1()
{
int[] a = { 90, 91, 92, 93, 94, 95, 96, 97 };
var src = new ReadOnlyMemory<int>(a, 1, 6);
var dst = new Memory<int>(a, 2, 6);
src.CopyTo(dst);
int[] expected = { 90, 91, 91, 92, 93, 94, 95, 96 };
Assert.Equal<int>(expected, a);
}
[Fact]
public static void Overlapping2()
{
int[] a = { 90, 91, 92, 93, 94, 95, 96, 97 };
var src = new ReadOnlyMemory<int>(a, 2, 6);
var dst = new Memory<int>(a, 1, 6);
src.CopyTo(dst);
int[] expected = { 90, 92, 93, 94, 95, 96, 97, 97 };
Assert.Equal<int>(expected, a);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/coreclr/nativeaot/System.Private.Reflection.Core/src/System/Reflection/Runtime/TypeInfos/RuntimeTypeInfo.BindingFlags.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Reflection.Runtime.General;
using System.Reflection.Runtime.BindingFlagSupport;
namespace System.Reflection.Runtime.TypeInfos
{
internal abstract partial class RuntimeTypeInfo
{
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors | DynamicallyAccessedMemberTypes.NonPublicConstructors)]
public sealed override ConstructorInfo[] GetConstructors(BindingFlags bindingAttr) => Query<ConstructorInfo>(bindingAttr).ToArray();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors | DynamicallyAccessedMemberTypes.NonPublicConstructors)]
protected sealed override ConstructorInfo GetConstructorImpl(BindingFlags bindingAttr, Binder binder, CallingConventions callConvention, Type[] types, ParameterModifier[] modifiers)
{
Debug.Assert(types != null);
QueryResult<ConstructorInfo> queryResult = Query<ConstructorInfo>(bindingAttr);
ListBuilder<ConstructorInfo> candidates = new ListBuilder<ConstructorInfo>();
foreach (ConstructorInfo candidate in queryResult)
{
if (candidate.QualifiesBasedOnParameterCount(bindingAttr, callConvention, types))
candidates.Add(candidate);
}
// For perf and desktop compat, fast-path these specific checks before calling on the binder to break ties.
if (candidates.Count == 0)
return null;
if (types.Length == 0 && candidates.Count == 1)
{
ConstructorInfo firstCandidate = candidates[0];
ParameterInfo[] parameters = firstCandidate.GetParametersNoCopy();
if (parameters.Length == 0)
return firstCandidate;
}
if ((bindingAttr & BindingFlags.ExactBinding) != 0)
return System.DefaultBinder.ExactBinding(candidates.ToArray(), types) as ConstructorInfo;
if (binder == null)
binder = DefaultBinder;
return binder.SelectMethod(bindingAttr, candidates.ToArray(), types, modifiers) as ConstructorInfo;
}
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicEvents | DynamicallyAccessedMemberTypes.NonPublicEvents)]
public sealed override EventInfo[] GetEvents(BindingFlags bindingAttr) => Query<EventInfo>(bindingAttr).ToArray();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicEvents | DynamicallyAccessedMemberTypes.NonPublicEvents)]
public sealed override EventInfo GetEvent(string name, BindingFlags bindingAttr) => Query<EventInfo>(name, bindingAttr).Disambiguate();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields | DynamicallyAccessedMemberTypes.NonPublicFields)]
public sealed override FieldInfo[] GetFields(BindingFlags bindingAttr) => Query<FieldInfo>(bindingAttr).ToArray();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields | DynamicallyAccessedMemberTypes.NonPublicFields)]
public sealed override FieldInfo GetField(string name, BindingFlags bindingAttr) => Query<FieldInfo>(name, bindingAttr).Disambiguate();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicMethods | DynamicallyAccessedMemberTypes.NonPublicMethods)]
public sealed override MethodInfo[] GetMethods(BindingFlags bindingAttr) => Query<MethodInfo>(bindingAttr).ToArray();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicMethods | DynamicallyAccessedMemberTypes.NonPublicMethods)]
protected sealed override MethodInfo GetMethodImpl(string name, BindingFlags bindingAttr, Binder binder, CallingConventions callConvention, Type[] types, ParameterModifier[] modifiers)
{
return GetMethodImplCommon(name, GenericParameterCountAny, bindingAttr, binder, callConvention, types, modifiers);
}
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicMethods | DynamicallyAccessedMemberTypes.NonPublicMethods)]
protected sealed override MethodInfo GetMethodImpl(string name, int genericParameterCount, BindingFlags bindingAttr, Binder binder, CallingConventions callConvention, Type[] types, ParameterModifier[] modifiers)
{
return GetMethodImplCommon(name, genericParameterCount, bindingAttr, binder, callConvention, types, modifiers);
}
private MethodInfo GetMethodImplCommon(string name, int genericParameterCount, BindingFlags bindingAttr, Binder binder, CallingConventions callConvention, Type[] types, ParameterModifier[] modifiers)
{
Debug.Assert(name != null);
// GetMethodImpl() is a funnel for two groups of api. We can distinguish by comparing "types" to null.
if (types == null)
{
// Group #1: This group of api accept only a name and BindingFlags. The other parameters are hard-wired by the non-virtual api entrypoints.
Debug.Assert(genericParameterCount == GenericParameterCountAny);
Debug.Assert(binder == null);
Debug.Assert(callConvention == CallingConventions.Any);
Debug.Assert(modifiers == null);
return Query<MethodInfo>(name, bindingAttr).Disambiguate();
}
else
{
// Group #2: This group of api takes a set of parameter types and an optional binder.
QueryResult<MethodInfo> queryResult = Query<MethodInfo>(name, bindingAttr);
ListBuilder<MethodInfo> candidates = new ListBuilder<MethodInfo>();
foreach (MethodInfo candidate in queryResult)
{
if (genericParameterCount != GenericParameterCountAny && genericParameterCount != candidate.GenericParameterCount)
continue;
if (candidate.QualifiesBasedOnParameterCount(bindingAttr, callConvention, types))
candidates.Add(candidate);
}
if (candidates.Count == 0)
return null;
// For perf and desktop compat, fast-path these specific checks before calling on the binder to break ties.
if (types.Length == 0 && candidates.Count == 1)
return candidates[0];
if (binder == null)
binder = DefaultBinder;
return binder.SelectMethod(bindingAttr, candidates.ToArray(), types, modifiers) as MethodInfo;
}
}
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicNestedTypes | DynamicallyAccessedMemberTypes.NonPublicNestedTypes)]
public sealed override Type[] GetNestedTypes(BindingFlags bindingAttr) => Query<Type>(bindingAttr).ToArray();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicNestedTypes | DynamicallyAccessedMemberTypes.NonPublicNestedTypes)]
public sealed override Type GetNestedType(string name, BindingFlags bindingAttr) => Query<Type>(name, bindingAttr).Disambiguate();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)]
public sealed override PropertyInfo[] GetProperties(BindingFlags bindingAttr) => Query<PropertyInfo>(bindingAttr).ToArray();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)]
protected sealed override PropertyInfo GetPropertyImpl(string name, BindingFlags bindingAttr, Binder binder, Type returnType, Type[] types, ParameterModifier[] modifiers)
{
Debug.Assert(name != null);
// GetPropertyImpl() is a funnel for two groups of api. We can distinguish by comparing "types" to null.
if (types == null && returnType == null)
{
// Group #1: This group of api accept only a name and BindingFlags. The other parameters are hard-wired by the non-virtual api entrypoints.
Debug.Assert(binder == null);
Debug.Assert(modifiers == null);
return Query<PropertyInfo>(name, bindingAttr).Disambiguate();
}
else
{
// Group #2: This group of api takes a set of parameter types, a return type (both cannot be null) and an optional binder.
QueryResult<PropertyInfo> queryResult = Query<PropertyInfo>(name, bindingAttr);
ListBuilder<PropertyInfo> candidates = new ListBuilder<PropertyInfo>();
foreach (PropertyInfo candidate in queryResult)
{
if (types == null || (candidate.GetIndexParameters().Length == types.Length))
{
candidates.Add(candidate);
}
}
if (candidates.Count == 0)
return null;
// For perf and desktop compat, fast-path these specific checks before calling on the binder to break ties.
if (types == null || types.Length == 0)
{
// no arguments
if (candidates.Count == 1)
{
PropertyInfo firstCandidate = candidates[0];
if (returnType is not null && !returnType.IsEquivalentTo(firstCandidate.PropertyType))
return null;
return firstCandidate;
}
else
{
if (returnType is null)
// if we are here we have no args or property type to select over and we have more than one property with that name
throw new AmbiguousMatchException();
}
}
if ((bindingAttr & BindingFlags.ExactBinding) != 0)
return System.DefaultBinder.ExactPropertyBinding(candidates.ToArray(), returnType, types);
if (binder == null)
binder = DefaultBinder;
return binder.SelectProperty(bindingAttr, candidates.ToArray(), returnType, types, modifiers);
}
}
private QueryResult<M> Query<M>(BindingFlags bindingAttr) where M : MemberInfo
{
return Query<M>(null, bindingAttr, null);
}
private QueryResult<M> Query<M>(string name, BindingFlags bindingAttr) where M : MemberInfo
{
if (name == null)
throw new ArgumentNullException(nameof(name));
return Query<M>(name, bindingAttr, null);
}
private QueryResult<M> Query<M>(string optionalName, BindingFlags bindingAttr, Func<M, bool> optionalPredicate) where M : MemberInfo
{
MemberPolicies<M> policies = MemberPolicies<M>.Default;
bindingAttr = policies.ModifyBindingFlags(bindingAttr);
bool ignoreCase = (bindingAttr & BindingFlags.IgnoreCase) != 0;
TypeComponentsCache cache = Cache;
QueriedMemberList<M> queriedMembers;
if (optionalName == null)
queriedMembers = cache.GetQueriedMembers<M>();
else
queriedMembers = cache.GetQueriedMembers<M>(optionalName, ignoreCase: ignoreCase);
if (optionalPredicate != null)
queriedMembers = queriedMembers.Filter(optionalPredicate);
return new QueryResult<M>(bindingAttr, queriedMembers);
}
private TypeComponentsCache Cache => _lazyCache ?? (_lazyCache = new TypeComponentsCache(this));
private volatile TypeComponentsCache _lazyCache;
private const int GenericParameterCountAny = -1;
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Reflection.Runtime.General;
using System.Reflection.Runtime.BindingFlagSupport;
namespace System.Reflection.Runtime.TypeInfos
{
internal abstract partial class RuntimeTypeInfo
{
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors | DynamicallyAccessedMemberTypes.NonPublicConstructors)]
public sealed override ConstructorInfo[] GetConstructors(BindingFlags bindingAttr) => Query<ConstructorInfo>(bindingAttr).ToArray();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors | DynamicallyAccessedMemberTypes.NonPublicConstructors)]
protected sealed override ConstructorInfo GetConstructorImpl(BindingFlags bindingAttr, Binder binder, CallingConventions callConvention, Type[] types, ParameterModifier[] modifiers)
{
Debug.Assert(types != null);
QueryResult<ConstructorInfo> queryResult = Query<ConstructorInfo>(bindingAttr);
ListBuilder<ConstructorInfo> candidates = new ListBuilder<ConstructorInfo>();
foreach (ConstructorInfo candidate in queryResult)
{
if (candidate.QualifiesBasedOnParameterCount(bindingAttr, callConvention, types))
candidates.Add(candidate);
}
// For perf and desktop compat, fast-path these specific checks before calling on the binder to break ties.
if (candidates.Count == 0)
return null;
if (types.Length == 0 && candidates.Count == 1)
{
ConstructorInfo firstCandidate = candidates[0];
ParameterInfo[] parameters = firstCandidate.GetParametersNoCopy();
if (parameters.Length == 0)
return firstCandidate;
}
if ((bindingAttr & BindingFlags.ExactBinding) != 0)
return System.DefaultBinder.ExactBinding(candidates.ToArray(), types) as ConstructorInfo;
if (binder == null)
binder = DefaultBinder;
return binder.SelectMethod(bindingAttr, candidates.ToArray(), types, modifiers) as ConstructorInfo;
}
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicEvents | DynamicallyAccessedMemberTypes.NonPublicEvents)]
public sealed override EventInfo[] GetEvents(BindingFlags bindingAttr) => Query<EventInfo>(bindingAttr).ToArray();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicEvents | DynamicallyAccessedMemberTypes.NonPublicEvents)]
public sealed override EventInfo GetEvent(string name, BindingFlags bindingAttr) => Query<EventInfo>(name, bindingAttr).Disambiguate();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields | DynamicallyAccessedMemberTypes.NonPublicFields)]
public sealed override FieldInfo[] GetFields(BindingFlags bindingAttr) => Query<FieldInfo>(bindingAttr).ToArray();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields | DynamicallyAccessedMemberTypes.NonPublicFields)]
public sealed override FieldInfo GetField(string name, BindingFlags bindingAttr) => Query<FieldInfo>(name, bindingAttr).Disambiguate();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicMethods | DynamicallyAccessedMemberTypes.NonPublicMethods)]
public sealed override MethodInfo[] GetMethods(BindingFlags bindingAttr) => Query<MethodInfo>(bindingAttr).ToArray();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicMethods | DynamicallyAccessedMemberTypes.NonPublicMethods)]
protected sealed override MethodInfo GetMethodImpl(string name, BindingFlags bindingAttr, Binder binder, CallingConventions callConvention, Type[] types, ParameterModifier[] modifiers)
{
return GetMethodImplCommon(name, GenericParameterCountAny, bindingAttr, binder, callConvention, types, modifiers);
}
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicMethods | DynamicallyAccessedMemberTypes.NonPublicMethods)]
protected sealed override MethodInfo GetMethodImpl(string name, int genericParameterCount, BindingFlags bindingAttr, Binder binder, CallingConventions callConvention, Type[] types, ParameterModifier[] modifiers)
{
return GetMethodImplCommon(name, genericParameterCount, bindingAttr, binder, callConvention, types, modifiers);
}
private MethodInfo GetMethodImplCommon(string name, int genericParameterCount, BindingFlags bindingAttr, Binder binder, CallingConventions callConvention, Type[] types, ParameterModifier[] modifiers)
{
Debug.Assert(name != null);
// GetMethodImpl() is a funnel for two groups of api. We can distinguish by comparing "types" to null.
if (types == null)
{
// Group #1: This group of api accept only a name and BindingFlags. The other parameters are hard-wired by the non-virtual api entrypoints.
Debug.Assert(genericParameterCount == GenericParameterCountAny);
Debug.Assert(binder == null);
Debug.Assert(callConvention == CallingConventions.Any);
Debug.Assert(modifiers == null);
return Query<MethodInfo>(name, bindingAttr).Disambiguate();
}
else
{
// Group #2: This group of api takes a set of parameter types and an optional binder.
QueryResult<MethodInfo> queryResult = Query<MethodInfo>(name, bindingAttr);
ListBuilder<MethodInfo> candidates = new ListBuilder<MethodInfo>();
foreach (MethodInfo candidate in queryResult)
{
if (genericParameterCount != GenericParameterCountAny && genericParameterCount != candidate.GenericParameterCount)
continue;
if (candidate.QualifiesBasedOnParameterCount(bindingAttr, callConvention, types))
candidates.Add(candidate);
}
if (candidates.Count == 0)
return null;
// For perf and desktop compat, fast-path these specific checks before calling on the binder to break ties.
if (types.Length == 0 && candidates.Count == 1)
return candidates[0];
if (binder == null)
binder = DefaultBinder;
return binder.SelectMethod(bindingAttr, candidates.ToArray(), types, modifiers) as MethodInfo;
}
}
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicNestedTypes | DynamicallyAccessedMemberTypes.NonPublicNestedTypes)]
public sealed override Type[] GetNestedTypes(BindingFlags bindingAttr) => Query<Type>(bindingAttr).ToArray();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicNestedTypes | DynamicallyAccessedMemberTypes.NonPublicNestedTypes)]
public sealed override Type GetNestedType(string name, BindingFlags bindingAttr) => Query<Type>(name, bindingAttr).Disambiguate();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)]
public sealed override PropertyInfo[] GetProperties(BindingFlags bindingAttr) => Query<PropertyInfo>(bindingAttr).ToArray();
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)]
protected sealed override PropertyInfo GetPropertyImpl(string name, BindingFlags bindingAttr, Binder binder, Type returnType, Type[] types, ParameterModifier[] modifiers)
{
Debug.Assert(name != null);
// GetPropertyImpl() is a funnel for two groups of api. We can distinguish by comparing "types" to null.
if (types == null && returnType == null)
{
// Group #1: This group of api accept only a name and BindingFlags. The other parameters are hard-wired by the non-virtual api entrypoints.
Debug.Assert(binder == null);
Debug.Assert(modifiers == null);
return Query<PropertyInfo>(name, bindingAttr).Disambiguate();
}
else
{
// Group #2: This group of api takes a set of parameter types, a return type (both cannot be null) and an optional binder.
QueryResult<PropertyInfo> queryResult = Query<PropertyInfo>(name, bindingAttr);
ListBuilder<PropertyInfo> candidates = new ListBuilder<PropertyInfo>();
foreach (PropertyInfo candidate in queryResult)
{
if (types == null || (candidate.GetIndexParameters().Length == types.Length))
{
candidates.Add(candidate);
}
}
if (candidates.Count == 0)
return null;
// For perf and desktop compat, fast-path these specific checks before calling on the binder to break ties.
if (types == null || types.Length == 0)
{
// no arguments
if (candidates.Count == 1)
{
PropertyInfo firstCandidate = candidates[0];
if (returnType is not null && !returnType.IsEquivalentTo(firstCandidate.PropertyType))
return null;
return firstCandidate;
}
else
{
if (returnType is null)
// if we are here we have no args or property type to select over and we have more than one property with that name
throw new AmbiguousMatchException();
}
}
if ((bindingAttr & BindingFlags.ExactBinding) != 0)
return System.DefaultBinder.ExactPropertyBinding(candidates.ToArray(), returnType, types);
if (binder == null)
binder = DefaultBinder;
return binder.SelectProperty(bindingAttr, candidates.ToArray(), returnType, types, modifiers);
}
}
private QueryResult<M> Query<M>(BindingFlags bindingAttr) where M : MemberInfo
{
return Query<M>(null, bindingAttr, null);
}
private QueryResult<M> Query<M>(string name, BindingFlags bindingAttr) where M : MemberInfo
{
if (name == null)
throw new ArgumentNullException(nameof(name));
return Query<M>(name, bindingAttr, null);
}
private QueryResult<M> Query<M>(string optionalName, BindingFlags bindingAttr, Func<M, bool> optionalPredicate) where M : MemberInfo
{
MemberPolicies<M> policies = MemberPolicies<M>.Default;
bindingAttr = policies.ModifyBindingFlags(bindingAttr);
bool ignoreCase = (bindingAttr & BindingFlags.IgnoreCase) != 0;
TypeComponentsCache cache = Cache;
QueriedMemberList<M> queriedMembers;
if (optionalName == null)
queriedMembers = cache.GetQueriedMembers<M>();
else
queriedMembers = cache.GetQueriedMembers<M>(optionalName, ignoreCase: ignoreCase);
if (optionalPredicate != null)
queriedMembers = queriedMembers.Filter(optionalPredicate);
return new QueryResult<M>(bindingAttr, queriedMembers);
}
private TypeComponentsCache Cache => _lazyCache ?? (_lazyCache = new TypeComponentsCache(this));
private volatile TypeComponentsCache _lazyCache;
private const int GenericParameterCountAny = -1;
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/Common/src/Interop/OSX/System.Security.Cryptography.Native.Apple/Interop.Pbkdf2.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
internal static partial class Interop
{
internal static partial class AppleCrypto
{
internal static unsafe void Pbkdf2(
PAL_HashAlgorithm prfAlgorithm,
ReadOnlySpan<byte> password,
ReadOnlySpan<byte> salt,
int iterations,
Span<byte> destination)
{
fixed (byte* pPassword = password)
fixed (byte* pSalt = salt)
fixed (byte* pDestination = destination)
{
int ccStatus;
int ret = AppleCryptoNative_Pbkdf2(
prfAlgorithm,
pPassword,
password.Length,
pSalt,
salt.Length,
iterations,
pDestination,
destination.Length,
&ccStatus);
if (ret == 0)
{
throw Interop.AppleCrypto.CreateExceptionForCCError(
ccStatus,
Interop.AppleCrypto.CCCryptorStatus);
}
if (ret != 1)
{
Debug.Fail($"Pbkdf2 failed with invalid input {ret}");
throw new CryptographicException();
}
}
}
[LibraryImport(Libraries.AppleCryptoNative)]
private static unsafe partial int AppleCryptoNative_Pbkdf2(
PAL_HashAlgorithm prfAlgorithm,
byte* password,
int passwordLen,
byte* salt,
int saltLen,
int iterations,
byte* derivedKey,
int derivedKeyLen,
int* errorCode);
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
internal static partial class Interop
{
internal static partial class AppleCrypto
{
internal static unsafe void Pbkdf2(
PAL_HashAlgorithm prfAlgorithm,
ReadOnlySpan<byte> password,
ReadOnlySpan<byte> salt,
int iterations,
Span<byte> destination)
{
fixed (byte* pPassword = password)
fixed (byte* pSalt = salt)
fixed (byte* pDestination = destination)
{
int ccStatus;
int ret = AppleCryptoNative_Pbkdf2(
prfAlgorithm,
pPassword,
password.Length,
pSalt,
salt.Length,
iterations,
pDestination,
destination.Length,
&ccStatus);
if (ret == 0)
{
throw Interop.AppleCrypto.CreateExceptionForCCError(
ccStatus,
Interop.AppleCrypto.CCCryptorStatus);
}
if (ret != 1)
{
Debug.Fail($"Pbkdf2 failed with invalid input {ret}");
throw new CryptographicException();
}
}
}
[LibraryImport(Libraries.AppleCryptoNative)]
private static unsafe partial int AppleCryptoNative_Pbkdf2(
PAL_HashAlgorithm prfAlgorithm,
byte* password,
int passwordLen,
byte* salt,
int saltLen,
int iterations,
byte* derivedKey,
int derivedKeyLen,
int* errorCode);
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/Arm/AdvSimd/And.Vector128.UInt16.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void And_Vector128_UInt16()
{
var test = new SimpleBinaryOpTest__And_Vector128_UInt16();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__And_Vector128_UInt16
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(UInt16[] inArray1, UInt16[] inArray2, UInt16[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<UInt16>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<UInt16>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt16>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<UInt16, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<UInt16, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<UInt16> _fld1;
public Vector128<UInt16> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__And_Vector128_UInt16 testClass)
{
var result = AdvSimd.And(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__And_Vector128_UInt16 testClass)
{
fixed (Vector128<UInt16>* pFld1 = &_fld1)
fixed (Vector128<UInt16>* pFld2 = &_fld2)
{
var result = AdvSimd.And(
AdvSimd.LoadVector128((UInt16*)(pFld1)),
AdvSimd.LoadVector128((UInt16*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<UInt16>>() / sizeof(UInt16);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<UInt16>>() / sizeof(UInt16);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<UInt16>>() / sizeof(UInt16);
private static UInt16[] _data1 = new UInt16[Op1ElementCount];
private static UInt16[] _data2 = new UInt16[Op2ElementCount];
private static Vector128<UInt16> _clsVar1;
private static Vector128<UInt16> _clsVar2;
private Vector128<UInt16> _fld1;
private Vector128<UInt16> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__And_Vector128_UInt16()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _clsVar1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _clsVar2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
}
public SimpleBinaryOpTest__And_Vector128_UInt16()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _fld2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); }
_dataTable = new DataTable(_data1, _data2, new UInt16[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.And(
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.And(
AdvSimd.LoadVector128((UInt16*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector128((UInt16*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.And), new Type[] { typeof(Vector128<UInt16>), typeof(Vector128<UInt16>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.And), new Type[] { typeof(Vector128<UInt16>), typeof(Vector128<UInt16>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector128((UInt16*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector128((UInt16*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.And(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<UInt16>* pClsVar1 = &_clsVar1)
fixed (Vector128<UInt16>* pClsVar2 = &_clsVar2)
{
var result = AdvSimd.And(
AdvSimd.LoadVector128((UInt16*)(pClsVar1)),
AdvSimd.LoadVector128((UInt16*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray2Ptr);
var result = AdvSimd.And(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector128((UInt16*)(_dataTable.inArray1Ptr));
var op2 = AdvSimd.LoadVector128((UInt16*)(_dataTable.inArray2Ptr));
var result = AdvSimd.And(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__And_Vector128_UInt16();
var result = AdvSimd.And(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__And_Vector128_UInt16();
fixed (Vector128<UInt16>* pFld1 = &test._fld1)
fixed (Vector128<UInt16>* pFld2 = &test._fld2)
{
var result = AdvSimd.And(
AdvSimd.LoadVector128((UInt16*)(pFld1)),
AdvSimd.LoadVector128((UInt16*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.And(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<UInt16>* pFld1 = &_fld1)
fixed (Vector128<UInt16>* pFld2 = &_fld2)
{
var result = AdvSimd.And(
AdvSimd.LoadVector128((UInt16*)(pFld1)),
AdvSimd.LoadVector128((UInt16*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.And(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.And(
AdvSimd.LoadVector128((UInt16*)(&test._fld1)),
AdvSimd.LoadVector128((UInt16*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<UInt16> op1, Vector128<UInt16> op2, void* result, [CallerMemberName] string method = "")
{
UInt16[] inArray1 = new UInt16[Op1ElementCount];
UInt16[] inArray2 = new UInt16[Op2ElementCount];
UInt16[] outArray = new UInt16[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
UInt16[] inArray1 = new UInt16[Op1ElementCount];
UInt16[] inArray2 = new UInt16[Op2ElementCount];
UInt16[] outArray = new UInt16[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(UInt16[] left, UInt16[] right, UInt16[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (Helpers.And(left[i], right[i]) != result[i])
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd)}.{nameof(AdvSimd.And)}<UInt16>(Vector128<UInt16>, Vector128<UInt16>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void And_Vector128_UInt16()
{
var test = new SimpleBinaryOpTest__And_Vector128_UInt16();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__And_Vector128_UInt16
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(UInt16[] inArray1, UInt16[] inArray2, UInt16[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<UInt16>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<UInt16>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt16>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<UInt16, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<UInt16, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<UInt16> _fld1;
public Vector128<UInt16> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__And_Vector128_UInt16 testClass)
{
var result = AdvSimd.And(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__And_Vector128_UInt16 testClass)
{
fixed (Vector128<UInt16>* pFld1 = &_fld1)
fixed (Vector128<UInt16>* pFld2 = &_fld2)
{
var result = AdvSimd.And(
AdvSimd.LoadVector128((UInt16*)(pFld1)),
AdvSimd.LoadVector128((UInt16*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<UInt16>>() / sizeof(UInt16);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<UInt16>>() / sizeof(UInt16);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<UInt16>>() / sizeof(UInt16);
private static UInt16[] _data1 = new UInt16[Op1ElementCount];
private static UInt16[] _data2 = new UInt16[Op2ElementCount];
private static Vector128<UInt16> _clsVar1;
private static Vector128<UInt16> _clsVar2;
private Vector128<UInt16> _fld1;
private Vector128<UInt16> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__And_Vector128_UInt16()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _clsVar1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _clsVar2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
}
public SimpleBinaryOpTest__And_Vector128_UInt16()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _fld2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt16(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt16(); }
_dataTable = new DataTable(_data1, _data2, new UInt16[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.And(
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.And(
AdvSimd.LoadVector128((UInt16*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector128((UInt16*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.And), new Type[] { typeof(Vector128<UInt16>), typeof(Vector128<UInt16>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.And), new Type[] { typeof(Vector128<UInt16>), typeof(Vector128<UInt16>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector128((UInt16*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector128((UInt16*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.And(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<UInt16>* pClsVar1 = &_clsVar1)
fixed (Vector128<UInt16>* pClsVar2 = &_clsVar2)
{
var result = AdvSimd.And(
AdvSimd.LoadVector128((UInt16*)(pClsVar1)),
AdvSimd.LoadVector128((UInt16*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray2Ptr);
var result = AdvSimd.And(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector128((UInt16*)(_dataTable.inArray1Ptr));
var op2 = AdvSimd.LoadVector128((UInt16*)(_dataTable.inArray2Ptr));
var result = AdvSimd.And(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__And_Vector128_UInt16();
var result = AdvSimd.And(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__And_Vector128_UInt16();
fixed (Vector128<UInt16>* pFld1 = &test._fld1)
fixed (Vector128<UInt16>* pFld2 = &test._fld2)
{
var result = AdvSimd.And(
AdvSimd.LoadVector128((UInt16*)(pFld1)),
AdvSimd.LoadVector128((UInt16*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.And(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<UInt16>* pFld1 = &_fld1)
fixed (Vector128<UInt16>* pFld2 = &_fld2)
{
var result = AdvSimd.And(
AdvSimd.LoadVector128((UInt16*)(pFld1)),
AdvSimd.LoadVector128((UInt16*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.And(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.And(
AdvSimd.LoadVector128((UInt16*)(&test._fld1)),
AdvSimd.LoadVector128((UInt16*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<UInt16> op1, Vector128<UInt16> op2, void* result, [CallerMemberName] string method = "")
{
UInt16[] inArray1 = new UInt16[Op1ElementCount];
UInt16[] inArray2 = new UInt16[Op2ElementCount];
UInt16[] outArray = new UInt16[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
UInt16[] inArray1 = new UInt16[Op1ElementCount];
UInt16[] inArray2 = new UInt16[Op2ElementCount];
UInt16[] outArray = new UInt16[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt16>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(UInt16[] left, UInt16[] right, UInt16[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (Helpers.And(left[i], right[i]) != result[i])
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd)}.{nameof(AdvSimd.And)}<UInt16>(Vector128<UInt16>, Vector128<UInt16>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/coreclr/utilcode/dlwrap.cpp
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
#include "stdafx.h" // Precompiled header key.
#include "utilcode.h"
#include "metadata.h"
#include "ex.h"
#include "pedecoder.h"
#include <wininet.h>
#include <urlmon.h>
DWORD
GetFileVersionInfoSizeW_NoThrow(
LPCWSTR lptstrFilename, /* Filename of version stamped file */
LPDWORD lpdwHandle
)
{
WRAPPER_NO_CONTRACT;
HRESULT hr=S_OK;
DWORD dwRet=0;
EX_TRY
{
dwRet=GetFileVersionInfoSize( (LPWSTR)lptstrFilename, lpdwHandle );
}
EX_CATCH_HRESULT(hr);
if (hr!=S_OK)
SetLastError(hr);
return dwRet;
}
BOOL
GetFileVersionInfoW_NoThrow(
LPCWSTR lptstrFilename, /* Filename of version stamped file */
DWORD dwHandle, /* Information from GetFileVersionSize */
DWORD dwLen, /* Length of buffer for info */
LPVOID lpData
)
{
WRAPPER_NO_CONTRACT;
HRESULT hr=S_OK;
BOOL bRet=FALSE;
EX_TRY
{
bRet=GetFileVersionInfo( (LPWSTR)lptstrFilename, dwHandle,dwLen,lpData );
}
EX_CATCH_HRESULT(hr);
if (hr!=S_OK)
SetLastError(hr);
return bRet;
}
BOOL
VerQueryValueW_NoThrow(
const LPVOID pBlock,
LPCWSTR lpSubBlock,
LPVOID * lplpBuffer,
PUINT puLen
)
{
WRAPPER_NO_CONTRACT;
HRESULT hr=S_OK;
BOOL bRet=FALSE;
EX_TRY
{
bRet=VerQueryValueW( pBlock, (LPWSTR)lpSubBlock,lplpBuffer,puLen );
}
EX_CATCH_HRESULT(hr);
if (hr!=S_OK)
SetLastError(hr);
return bRet;
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
#include "stdafx.h" // Precompiled header key.
#include "utilcode.h"
#include "metadata.h"
#include "ex.h"
#include "pedecoder.h"
#include <wininet.h>
#include <urlmon.h>
DWORD
GetFileVersionInfoSizeW_NoThrow(
LPCWSTR lptstrFilename, /* Filename of version stamped file */
LPDWORD lpdwHandle
)
{
WRAPPER_NO_CONTRACT;
HRESULT hr=S_OK;
DWORD dwRet=0;
EX_TRY
{
dwRet=GetFileVersionInfoSize( (LPWSTR)lptstrFilename, lpdwHandle );
}
EX_CATCH_HRESULT(hr);
if (hr!=S_OK)
SetLastError(hr);
return dwRet;
}
BOOL
GetFileVersionInfoW_NoThrow(
LPCWSTR lptstrFilename, /* Filename of version stamped file */
DWORD dwHandle, /* Information from GetFileVersionSize */
DWORD dwLen, /* Length of buffer for info */
LPVOID lpData
)
{
WRAPPER_NO_CONTRACT;
HRESULT hr=S_OK;
BOOL bRet=FALSE;
EX_TRY
{
bRet=GetFileVersionInfo( (LPWSTR)lptstrFilename, dwHandle,dwLen,lpData );
}
EX_CATCH_HRESULT(hr);
if (hr!=S_OK)
SetLastError(hr);
return bRet;
}
BOOL
VerQueryValueW_NoThrow(
const LPVOID pBlock,
LPCWSTR lpSubBlock,
LPVOID * lplpBuffer,
PUINT puLen
)
{
WRAPPER_NO_CONTRACT;
HRESULT hr=S_OK;
BOOL bRet=FALSE;
EX_TRY
{
bRet=VerQueryValueW( pBlock, (LPWSTR)lpSubBlock,lplpBuffer,puLen );
}
EX_CATCH_HRESULT(hr);
if (hr!=S_OK)
SetLastError(hr);
return bRet;
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Runtime.Serialization.Primitives/tests/System/Runtime/Serialization/CollectionDataContractAttributeTests.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace System.Runtime.Serialization.Tests
{
public class CollectionDataContractAttributeTests
{
[Fact]
public void Ctor_Default()
{
var attribute = new CollectionDataContractAttribute();
Assert.False(attribute.IsReference);
Assert.False(attribute.IsReferenceSetExplicitly);
Assert.Null(attribute.ItemName);
Assert.False(attribute.IsItemNameSetExplicitly);
Assert.Null(attribute.Name);
Assert.False(attribute.IsNameSetExplicitly);
Assert.Null(attribute.Namespace);
Assert.False(attribute.IsNamespaceSetExplicitly);
Assert.Null(attribute.KeyName);
Assert.False(attribute.IsKeyNameSetExplicitly);
Assert.Null(attribute.ValueName);
Assert.False(attribute.IsValueNameSetExplicitly);
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void IsReference_Set_GetReturnsExpected(bool value)
{
var attribute = new CollectionDataContractAttribute() { IsReference = value };
Assert.Equal(value, attribute.IsReference);
Assert.True(attribute.IsReferenceSetExplicitly);
}
public static TheoryData<string> StringValue_TestData => new TheoryData<string>()
{
{ null },
{ "" },
{ "value" }
};
[Theory]
[MemberData(nameof(StringValue_TestData))]
public void ItemName_Set_GetReturnsExpected(string value)
{
var attribute = new CollectionDataContractAttribute() { ItemName = value };
Assert.Equal(value, attribute.ItemName);
Assert.True(attribute.IsItemNameSetExplicitly);
}
[Theory]
[MemberData(nameof(StringValue_TestData))]
public void Name_Set_GetReturnsExpected(string value)
{
var attribute = new CollectionDataContractAttribute() { Name = value };
Assert.Equal(value, attribute.Name);
Assert.True(attribute.IsNameSetExplicitly);
}
[Theory]
[MemberData(nameof(StringValue_TestData))]
public void Namespace_Set_GetReturnsExpected(string value)
{
var attribute = new CollectionDataContractAttribute() { Namespace = value };
Assert.Equal(value, attribute.Namespace);
Assert.True(attribute.IsNamespaceSetExplicitly);
}
[Theory]
[MemberData(nameof(StringValue_TestData))]
public void KeyName_Set_GetReturnsExpected(string value)
{
var attribute = new CollectionDataContractAttribute() { KeyName = value };
Assert.Equal(value, attribute.KeyName);
Assert.True(attribute.IsKeyNameSetExplicitly);
}
[Theory]
[MemberData(nameof(StringValue_TestData))]
public void ValueName_Set_GetReturnsExpected(string value)
{
var attribute = new CollectionDataContractAttribute() { ValueName = value };
Assert.Equal(value, attribute.ValueName);
Assert.True(attribute.IsValueNameSetExplicitly);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace System.Runtime.Serialization.Tests
{
public class CollectionDataContractAttributeTests
{
[Fact]
public void Ctor_Default()
{
var attribute = new CollectionDataContractAttribute();
Assert.False(attribute.IsReference);
Assert.False(attribute.IsReferenceSetExplicitly);
Assert.Null(attribute.ItemName);
Assert.False(attribute.IsItemNameSetExplicitly);
Assert.Null(attribute.Name);
Assert.False(attribute.IsNameSetExplicitly);
Assert.Null(attribute.Namespace);
Assert.False(attribute.IsNamespaceSetExplicitly);
Assert.Null(attribute.KeyName);
Assert.False(attribute.IsKeyNameSetExplicitly);
Assert.Null(attribute.ValueName);
Assert.False(attribute.IsValueNameSetExplicitly);
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void IsReference_Set_GetReturnsExpected(bool value)
{
var attribute = new CollectionDataContractAttribute() { IsReference = value };
Assert.Equal(value, attribute.IsReference);
Assert.True(attribute.IsReferenceSetExplicitly);
}
public static TheoryData<string> StringValue_TestData => new TheoryData<string>()
{
{ null },
{ "" },
{ "value" }
};
[Theory]
[MemberData(nameof(StringValue_TestData))]
public void ItemName_Set_GetReturnsExpected(string value)
{
var attribute = new CollectionDataContractAttribute() { ItemName = value };
Assert.Equal(value, attribute.ItemName);
Assert.True(attribute.IsItemNameSetExplicitly);
}
[Theory]
[MemberData(nameof(StringValue_TestData))]
public void Name_Set_GetReturnsExpected(string value)
{
var attribute = new CollectionDataContractAttribute() { Name = value };
Assert.Equal(value, attribute.Name);
Assert.True(attribute.IsNameSetExplicitly);
}
[Theory]
[MemberData(nameof(StringValue_TestData))]
public void Namespace_Set_GetReturnsExpected(string value)
{
var attribute = new CollectionDataContractAttribute() { Namespace = value };
Assert.Equal(value, attribute.Namespace);
Assert.True(attribute.IsNamespaceSetExplicitly);
}
[Theory]
[MemberData(nameof(StringValue_TestData))]
public void KeyName_Set_GetReturnsExpected(string value)
{
var attribute = new CollectionDataContractAttribute() { KeyName = value };
Assert.Equal(value, attribute.KeyName);
Assert.True(attribute.IsKeyNameSetExplicitly);
}
[Theory]
[MemberData(nameof(StringValue_TestData))]
public void ValueName_Set_GetReturnsExpected(string value)
{
var attribute = new CollectionDataContractAttribute() { ValueName = value };
Assert.Equal(value, attribute.ValueName);
Assert.True(attribute.IsValueNameSetExplicitly);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Dynamic.Runtime/tests/Dynamic.DynamicType/Conformance.dynamic.dynamicType.generics.derived.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.errorverifier.errorverifier
{
using System.Reflection;
using System.Resources;
public enum ErrorElementId
{
None,
SK_METHOD, // method
SK_CLASS, // type
SK_NAMESPACE, // namespace
SK_FIELD, // field
SK_PROPERTY, // property
SK_UNKNOWN, // element
SK_VARIABLE, // variable
SK_EVENT, // event
SK_TYVAR, // type parameter
SK_ALIAS, // using alias
ERRORSYM, // <error>
NULL, // <null>
GlobalNamespace, // <global namespace>
MethodGroup, // method group
AnonMethod, // anonymous method
Lambda, // lambda expression
AnonymousType, // anonymous type
}
public enum ErrorMessageId
{
None,
BadBinaryOps, // Operator '{0}' cannot be applied to operands of type '{1}' and '{2}'
IntDivByZero, // Division by constant zero
BadIndexLHS, // Cannot apply indexing with [] to an expression of type '{0}'
BadIndexCount, // Wrong number of indices inside []; expected '{0}'
BadUnaryOp, // Operator '{0}' cannot be applied to operand of type '{1}'
NoImplicitConv, // Cannot implicitly convert type '{0}' to '{1}'
NoExplicitConv, // Cannot convert type '{0}' to '{1}'
ConstOutOfRange, // Constant value '{0}' cannot be converted to a '{1}'
AmbigBinaryOps, // Operator '{0}' is ambiguous on operands of type '{1}' and '{2}'
AmbigUnaryOp, // Operator '{0}' is ambiguous on an operand of type '{1}'
ValueCantBeNull, // Cannot convert null to '{0}' because it is a non-nullable value type
WrongNestedThis, // Cannot access a non-static member of outer type '{0}' via nested type '{1}'
NoSuchMember, // '{0}' does not contain a definition for '{1}'
ObjectRequired, // An object reference is required for the non-static field, method, or property '{0}'
AmbigCall, // The call is ambiguous between the following methods or properties: '{0}' and '{1}'
BadAccess, // '{0}' is inaccessible due to its protection level
MethDelegateMismatch, // No overload for '{0}' matches delegate '{1}'
AssgLvalueExpected, // The left-hand side of an assignment must be a variable, property or indexer
NoConstructors, // The type '{0}' has no constructors defined
BadDelegateConstructor, // The delegate '{0}' does not have a valid constructor
PropertyLacksGet, // The property or indexer '{0}' cannot be used in this context because it lacks the get accessor
ObjectProhibited, // Member '{0}' cannot be accessed with an instance reference; qualify it with a type name instead
AssgReadonly, // A readonly field cannot be assigned to (except in a constructor or a variable initializer)
RefReadonly, // A readonly field cannot be passed ref or out (except in a constructor)
AssgReadonlyStatic, // A static readonly field cannot be assigned to (except in a static constructor or a variable initializer)
RefReadonlyStatic, // A static readonly field cannot be passed ref or out (except in a static constructor)
AssgReadonlyProp, // Property or indexer '{0}' cannot be assigned to -- it is read only
AbstractBaseCall, // Cannot call an abstract base member: '{0}'
RefProperty, // A property or indexer may not be passed as an out or ref parameter
ManagedAddr, // Cannot take the address of, get the size of, or declare a pointer to a managed type ('{0}')
FixedNotNeeded, // You cannot use the fixed statement to take the address of an already fixed expression
UnsafeNeeded, // Dynamic calls cannot be used in conjunction with pointers
BadBoolOp, // In order to be applicable as a short circuit operator a user-defined logical operator ('{0}') must have the same return type as the type of its 2 parameters
MustHaveOpTF, // The type ('{0}') must contain declarations of operator true and operator false
CheckedOverflow, // The operation overflows at compile time in checked mode
ConstOutOfRangeChecked, // Constant value '{0}' cannot be converted to a '{1}' (use 'unchecked' syntax to override)
AmbigMember, // Ambiguity between '{0}' and '{1}'
SizeofUnsafe, // '{0}' does not have a predefined size, therefore sizeof can only be used in an unsafe context (consider using System.Runtime.InteropServices.Marshal.SizeOf)
FieldInitRefNonstatic, // A field initializer cannot reference the non-static field, method, or property '{0}'
CallingFinalizeDepracated, // Destructors and object.Finalize cannot be called directly. Consider calling IDisposable.Dispose if available.
CallingBaseFinalizeDeprecated, // Do not directly call your base class Finalize method. It is called automatically from your destructor.
BadCastInFixed, // The right hand side of a fixed statement assignment may not be a cast expression
NoImplicitConvCast, // Cannot implicitly convert type '{0}' to '{1}'. An explicit conversion exists (are you missing a cast?)
InaccessibleGetter, // The property or indexer '{0}' cannot be used in this context because the get accessor is inaccessible
InaccessibleSetter, // The property or indexer '{0}' cannot be used in this context because the set accessor is inaccessible
BadArity, // Using the generic {1} '{0}' requires '{2}' type arguments
BadTypeArgument, // The type '{0}' may not be used as a type argument
TypeArgsNotAllowed, // The {1} '{0}' cannot be used with type arguments
HasNoTypeVars, // The non-generic {1} '{0}' cannot be used with type arguments
NewConstraintNotSatisfied, // '{2}' must be a non-abstract type with a public parameterless constructor in order to use it as parameter '{1}' in the generic type or method '{0}'
GenericConstraintNotSatisfiedRefType, // The type '{3}' cannot be used as type parameter '{2}' in the generic type or method '{0}'. There is no implicit reference conversion from '{3}' to '{1}'.
GenericConstraintNotSatisfiedNullableEnum, // The type '{3}' cannot be used as type parameter '{2}' in the generic type or method '{0}'. The nullable type '{3}' does not satisfy the constraint of '{1}'.
GenericConstraintNotSatisfiedNullableInterface, // The type '{3}' cannot be used as type parameter '{2}' in the generic type or method '{0}'. The nullable type '{3}' does not satisfy the constraint of '{1}'. Nullable types can not satisfy any interface constraints.
GenericConstraintNotSatisfiedTyVar, // The type '{3}' cannot be used as type parameter '{2}' in the generic type or method '{0}'. There is no boxing conversion or type parameter conversion from '{3}' to '{1}'.
GenericConstraintNotSatisfiedValType, // The type '{3}' cannot be used as type parameter '{2}' in the generic type or method '{0}'. There is no boxing conversion from '{3}' to '{1}'.
TypeVarCantBeNull, // Cannot convert null to type parameter '{0}' because it could be a non-nullable value type. Consider using 'default({0})' instead.
BadRetType, // '{1} {0}' has the wrong return type
CantInferMethTypeArgs, // The type arguments for method '{0}' cannot be inferred from the usage. Try specifying the type arguments explicitly.
MethGrpToNonDel, // Cannot convert method group '{0}' to non-delegate type '{1}'. Did you intend to invoke the method?
RefConstraintNotSatisfied, // The type '{2}' must be a reference type in order to use it as parameter '{1}' in the generic type or method '{0}'
ValConstraintNotSatisfied, // The type '{2}' must be a non-nullable value type in order to use it as parameter '{1}' in the generic type or method '{0}'
CircularConstraint, // Circular constraint dependency involving '{0}' and '{1}'
BaseConstraintConflict, // Type parameter '{0}' inherits conflicting constraints '{1}' and '{2}'
ConWithValCon, // Type parameter '{1}' has the 'struct' constraint so '{1}' cannot be used as a constraint for '{0}'
AmbigUDConv, // Ambiguous user defined conversions '{0}' and '{1}' when converting from '{2}' to '{3}'
PredefinedTypeNotFound, // Predefined type '{0}' is not defined or imported
PredefinedTypeBadType, // Predefined type '{0}' is declared incorrectly
BindToBogus, // '{0}' is not supported by the language
CantCallSpecialMethod, // '{0}': cannot explicitly call operator or accessor
BogusType, // '{0}' is a type not supported by the language
MissingPredefinedMember, // Missing compiler required member '{0}.{1}'
LiteralDoubleCast, // Literal of type double cannot be implicitly converted to type '{1}'; use an '{0}' suffix to create a literal of this type
UnifyingInterfaceInstantiations, // '{0}' cannot implement both '{1}' and '{2}' because they may unify for some type parameter substitutions
ConvertToStaticClass, // Cannot convert to static type '{0}'
GenericArgIsStaticClass, // '{0}': static types cannot be used as type arguments
PartialMethodToDelegate, // Cannot create delegate from method '{0}' because it is a partial method without an implementing declaration
IncrementLvalueExpected, // The operand of an increment or decrement operator must be a variable, property or indexer
NoSuchMemberOrExtension, // '{0}' does not contain a definition for '{1}' and no extension method '{1}' accepting a first argument of type '{0}' could be found (are you missing a using directive or an assembly reference?)
ValueTypeExtDelegate, // Extension methods '{0}' defined on value type '{1}' cannot be used to create delegates
BadArgCount, // No overload for method '{0}' takes '{1}' arguments
BadArgTypes, // The best overloaded method match for '{0}' has some invalid arguments
BadArgType, // Argument '{0}': cannot convert from '{1}' to '{2}'
RefLvalueExpected, // A ref or out argument must be an assignable variable
BadProtectedAccess, // Cannot access protected member '{0}' via a qualifier of type '{1}'; the qualifier must be of type '{2}' (or derived from it)
BindToBogusProp2, // Property, indexer, or event '{0}' is not supported by the language; try directly calling accessor methods '{1}' or '{2}'
BindToBogusProp1, // Property, indexer, or event '{0}' is not supported by the language; try directly calling accessor method '{1}'
BadDelArgCount, // Delegate '{0}' does not take '{1}' arguments
BadDelArgTypes, // Delegate '{0}' has some invalid arguments
AssgReadonlyLocal, // Cannot assign to '{0}' because it is read-only
RefReadonlyLocal, // Cannot pass '{0}' as a ref or out argument because it is read-only
ReturnNotLValue, // Cannot modify the return value of '{0}' because it is not a variable
BadArgExtraRef, // Argument '{0}' should not be passed with the '{1}' keyword
// DelegateOnConditional, // Cannot create delegate with '{0}' because it has a Conditional attribute (REMOVED)
BadArgRef, // Argument '{0}' must be passed with the '{1}' keyword
AssgReadonly2, // Members of readonly field '{0}' cannot be modified (except in a constructor or a variable initializer)
RefReadonly2, // Members of readonly field '{0}' cannot be passed ref or out (except in a constructor)
AssgReadonlyStatic2, // Fields of static readonly field '{0}' cannot be assigned to (except in a static constructor or a variable initializer)
RefReadonlyStatic2, // Fields of static readonly field '{0}' cannot be passed ref or out (except in a static constructor)
AssgReadonlyLocalCause, // Cannot assign to '{0}' because it is a '{1}'
RefReadonlyLocalCause, // Cannot pass '{0}' as a ref or out argument because it is a '{1}'
ThisStructNotInAnonMeth, // Anonymous methods, lambda expressions, and query expressions inside structs cannot access instance members of 'this'. Consider copying 'this' to a local variable outside the anonymous method, lambda expression or query expression and using the local instead.
DelegateOnNullable, // Cannot bind delegate to '{0}' because it is a member of 'System.Nullable<T>'
BadCtorArgCount, // '{0}' does not contain a constructor that takes '{1}' arguments
BadExtensionArgTypes, // '{0}' does not contain a definition for '{1}' and the best extension method overload '{2}' has some invalid arguments
BadInstanceArgType, // Instance argument: cannot convert from '{0}' to '{1}'
BadArgTypesForCollectionAdd, // The best overloaded Add method '{0}' for the collection initializer has some invalid arguments
InitializerAddHasParamModifiers, // The best overloaded method match '{0}' for the collection initializer element cannot be used. Collection initializer 'Add' methods cannot have ref or out parameters.
NonInvocableMemberCalled, // Non-invocable member '{0}' cannot be used like a method.
NamedArgumentSpecificationBeforeFixedArgument, // Named argument specifications must appear after all fixed arguments have been specified
BadNamedArgument, // The best overload for '{0}' does not have a parameter named '{1}'
BadNamedArgumentForDelegateInvoke, // The delegate '{0}' does not have a parameter named '{1}'
DuplicateNamedArgument, // Named argument '{0}' cannot be specified multiple times
NamedArgumentUsedInPositional, // Named argument '{0}' specifies a parameter for which a positional argument has already been given
}
public enum RuntimeErrorId
{
None,
// RuntimeBinderInternalCompilerException
InternalCompilerError, // An unexpected exception occurred while binding a dynamic operation
// ArgumentException
BindRequireArguments, // Cannot bind call with no calling object
// RuntimeBinderException
BindCallFailedOverloadResolution, // Overload resolution failed
// ArgumentException
BindBinaryOperatorRequireTwoArguments, // Binary operators must be invoked with two arguments
// ArgumentException
BindUnaryOperatorRequireOneArgument, // Unary operators must be invoked with one argument
// RuntimeBinderException
BindPropertyFailedMethodGroup, // The name '{0}' is bound to a method and cannot be used like a property
// RuntimeBinderException
BindPropertyFailedEvent, // The event '{0}' can only appear on the left hand side of += or -=
// RuntimeBinderException
BindInvokeFailedNonDelegate, // Cannot invoke a non-delegate type
// ArgumentException
BindImplicitConversionRequireOneArgument, // Implicit conversion takes exactly one argument
// ArgumentException
BindExplicitConversionRequireOneArgument, // Explicit conversion takes exactly one argument
// ArgumentException
BindBinaryAssignmentRequireTwoArguments, // Binary operators cannot be invoked with one argument
// RuntimeBinderException
BindBinaryAssignmentFailedNullReference, // Cannot perform member assignment on a null reference
// RuntimeBinderException
NullReferenceOnMemberException, // Cannot perform runtime binding on a null reference
// RuntimeBinderException
BindCallToConditionalMethod, // Cannot dynamically invoke method '{0}' because it has a Conditional attribute
// RuntimeBinderException
BindToVoidMethodButExpectResult, // Cannot implicitly convert type 'void' to 'object'
// EE?
EmptyDynamicView, // No further information on this object could be discovered
// MissingMemberException
GetValueonWriteOnlyProperty, // Write Only properties are not supported
}
public class ErrorVerifier
{
private static Assembly s_asm;
private static ResourceManager s_rm1;
private static ResourceManager s_rm2;
public static string GetErrorElement(ErrorElementId id)
{
return string.Empty;
}
public static bool Verify(ErrorMessageId id, string actualError, params string[] args)
{
return true;
}
public static bool Verify(RuntimeErrorId id, string actualError, params string[] args)
{
return true;
}
}
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.derived001.derived001
{
using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.errorverifier.errorverifier;
// <Title>Generic constraints</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
// <Expects Status=success></Expects>
// <Code>
using System.Collections.Generic;
public class MyClass<T, U>
where T : List<object>, new()
{
internal T myList = new T();
}
public class MyClassDerived<U> : MyClass<List<dynamic>, U>
{
public void Foo()
{
if (myList.Count != 0)
Test.Status = 2;
myList.Add(1);
try
{
myList[0].Foo(); //This should compile into a call site
Test.Status = 2;
}
catch (Microsoft.CSharp.RuntimeBinder.RuntimeBinderException e)
{
Test.Status = 2;
if (ErrorVerifier.Verify(ErrorMessageId.NoSuchMember, e.Message, "int", "Foo"))
Test.Status = 1;
}
}
}
public class Test
{
public static int Status;
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod(null));
}
public static int MainMethod(string[] args)
{
MyClassDerived<object> mc = new MyClassDerived<dynamic>();
mc.Foo();
if (Test.Status != 1)
return 1;
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.derived003.derived003
{
// <Title>Generic constraints</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class MyClass<T, U>
where T : U
{
}
public class MyClassDerived<T> : MyClass<T, object>
{
public void Foo()
{
Test.Status = 1;
}
}
public class Test
{
public static int Status;
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod(null));
}
public static int MainMethod(string[] args)
{
MyClassDerived<dynamic> mc = new MyClassDerived<dynamic>();
mc.Foo();
if (Test.Status != 1)
return 1;
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.derived004.derived004
{
// <Title>Generic constraints</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
// <Expects Status=success></Expects>
// <Code>
public class MyClass<T, U>
where T : U
{
}
public class MyClassDerived<T> : MyClass<T, dynamic>
{
public void Foo()
{
Test.Status = 1;
}
}
public class Test
{
public static int Status;
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod(null));
}
public static int MainMethod(string[] args)
{
MyClassDerived<dynamic> mc = new MyClassDerived<dynamic>();
mc.Foo();
if (Test.Status != 1)
return 1;
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.derived005.derived005
{
// <Title>Generic constraints</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
// <Expects Status=success></Expects>
// <Code>
public class MyClass<T, U>
where T : U
{
}
public class MyClassDerived<T> : MyClass<T, dynamic>
{
public void Foo()
{
Test.Status = 1;
}
}
public class Test
{
public static int Status;
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod(null));
}
public static int MainMethod(string[] args)
{
MyClassDerived<object> mc = new MyClassDerived<dynamic>();
mc.Foo();
if (Test.Status != 1)
return 1;
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.b813045bindfield01.b813045bindfield01
{
// <Title>Generic constraints</Title>
// <Description>Event invocation throws an ArgumentNullException incorrectly
// </Description>
// <RelatedBugs></RelatedBugs>
// <Expects Status=success></Expects>
using System;
public interface I<T>
{
T Prop
{
get;
set;
}
void M(T t, out char ret);
}
public class DC<T> : I<T>
{
private T _tt = default(T);
public T Prop
{
get
{
return _tt;
}
set
{
_tt = value;
}
}
public void M(T t, out char ret)
{
_tt = t;
ret = 'i';
}
}
public struct DS<T> : I<T>
{
public T Prop
{
get
{
return default(T);
}
set
{
}
}
public void M(T t, out char ret)
{
T tt = t;
ret = 'y';
}
}
/// <summary>
/// NO public, no instance field in generic type
/// </summary>
/// <typeparam name = "T"></typeparam>
public class C<T>
{
public event EventHandler E = delegate
{
}
;
public static void Foo()
{
dynamic c = new C<T>();
c.E(null, null);
}
// field
private T _tt;
public char Bar(T t1, T t2)
{
_tt = t1;
return 'q'; // t1 == t2;
}
}
public struct S<T, V>
{
public event EventHandler E;
public S(EventHandler e)
{
E = e;
}
public static void Foo()
{
dynamic d = new S<T, V>(delegate
{
}
);
d.E(null, null);
}
// local var
public char Bar(T t, V v)
{
T t1 = t;
return 'c';
}
}
public class Test
{
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
C<int>.Foo();
S<string, char>.Foo();
dynamic d1 = new C<Test>();
bool ret = 'q' == d1.Bar(null, null);
dynamic d2 = new S<sbyte, dynamic>(new EventHandler((p, q) =>
{
}
));
ret &= 'c' == d2.Bar(-1, null);
dynamic v = new DC<object>();
ret &= default(object) == v.Prop;
char c = ' ';
v.M(null, out c);
ret &= 'i' == c;
d2 = new DS<dynamic>();
ret &= default(dynamic) == d2.Prop;
d2.M(new object(), out c);
ret &= 'y' == c;
System.Console.WriteLine(ret);
return ret ? 0 : 1;
}
}
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.changetypearg001.changetypearg001
{
// <Title>Specializing generic overloads </Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
var TestValue2 = new GC1<string>();
int rez = 0;
dynamic d = new SubGenericClass<string>();
dynamic d1 = default(GC1<GC1<string>>);
d[d1, TestValue2] = d1;
if (d[d1, TestValue2] != null)
rez++;
if (d.Method1(i: d1, t: TestValue2) != null)
rez++;
var TestValue = new GC1<GC1<string>>();
dynamic dt = new VeryDerived<string>();
dynamic d2 = default(GC1<GC1<GC1<string>>>);
dt[d2, TestValue] = d2;
if (dt[d2, TestValue] != null)
rez++;
if (dt.Method1(i: d2, t: TestValue) != null)
rez++;
return rez;
}
}
public class GC1<T>
{
}
public abstract class GenericClass<T>
{
public abstract GC1<T> this[GC1<T> i, T t]
{
get;
set;
}
public abstract GC1<T> Method1(GC1<T> i, T t);
}
public class SubGenericClass<T> : GenericClass<GC1<T>>
{
public override GC1<GC1<T>> this[GC1<GC1<T>> i, GC1<T> t]
{
get
{
return null;
}
set
{
}
}
public override GC1<GC1<T>> Method1(GC1<GC1<T>> i, GC1<T> t)
{
return null;
}
}
public class VeryDerived<T> : SubGenericClass<GC1<T>>
{
public override GC1<GC1<GC1<T>>> Method1(GC1<GC1<GC1<T>>> i, GC1<GC1<T>> t)
{
return base.Method1(i, t);
}
public override GC1<GC1<GC1<T>>> this[GC1<GC1<GC1<T>>> i, GC1<GC1<T>> t]
{
get
{
return base[i, t];
}
set
{
base[i, t] = value;
}
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.changetypearg002.changetypearg002
{
// <Title>Specializing generic overloads </Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
var TestValue2 = new GC1<string>();
int rez = 0;
dynamic d = new SubGenericClass<string>();
dynamic d1 = default(GC1<GC1<string>>);
d[d1, TestValue2] = d1;
if (d[d1, TestValue2] != null)
rez++;
if (d.Method1(i: d1, t: TestValue2) != null)
rez++;
var TestValue = new GC1<GC1<string>>();
dynamic dt = new VeryDerived<string>();
dynamic d2 = default(GC1<GC1<GC1<string>>>);
dt[d2, TestValue] = d2; // System.InvalidProgramException
if (dt[d2, TestValue] != null)
rez++;
if (dt.Method1(i: d2, t: TestValue) != null)
rez++;
return rez;
}
}
public class GC1<T>
{
}
public class GenericClass<T>
{
public virtual GC1<T> this[GC1<T> i, T t]
{
get
{
return null;
}
set
{
}
}
public virtual GC1<T> Method1(GC1<T> i, T t)
{
return null;
}
}
public class SubGenericClass<T> : GenericClass<GC1<T>>
{
public override GC1<GC1<T>> this[GC1<GC1<T>> i, GC1<T> t]
{
get
{
return null;
}
set
{
}
}
public override GC1<GC1<T>> Method1(GC1<GC1<T>> i, GC1<T> t)
{
return null;
}
}
public class VeryDerived<T> : SubGenericClass<GC1<T>>
{
public override GC1<GC1<GC1<T>>> Method1(GC1<GC1<GC1<T>>> i, GC1<GC1<T>> t)
{
return base.Method1(i, t);
}
public override GC1<GC1<GC1<T>>> this[GC1<GC1<GC1<T>>> i, GC1<GC1<T>> t]
{
get
{
return base[i, t];
}
set
{
base[i, t] = value;
}
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.changetypearg003.changetypearg003
{
using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.errorverifier.errorverifier;
// <Title>Specializing generic overloads </Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
using Microsoft.CSharp.RuntimeBinder;
public class Program
{
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var TestValue2 = new GC1<string>();
dynamic d = new SubGenericClass<string>();
dynamic d1 = default(GC1<GC1<string>>);
d[d1, TestValue2] = d1;
if (d[d1, TestValue2] != null)
rez++;
try
{
if (d.Method1(i: d1, t: TestValue2) != null)
rez++;
}
catch (RuntimeBinderException exc)
{
if (ErrorVerifier.Verify(ErrorMessageId.BadArgTypes, exc.Message, "GenericClass<GC1<string>>.Method1(ref GC1<GC1<string>>, GC1<string>)") == false)
rez++;
}
var TestValue = new GC1<GC1<string>>();
dynamic dt = new VeryDerived<string>();
dynamic d2 = default(GC1<GC1<GC1<string>>>);
dt[d2, TestValue] = d2;
if (dt[d2, TestValue] != null)
rez++;
try
{
if (dt.Method1(i: ref d2, t: TestValue) != null)
rez++;
}
catch (RuntimeBinderException exc)
{
if (ErrorVerifier.Verify(ErrorMessageId.BadArgTypes, exc.Message, "GenericClass<GC1<GC1<string>>>.Method1(ref GC1<GC1<GC1<string>>>, GC1<GC1<string>>)") == false)
rez++;
}
return rez;
}
}
public class GC1<T>
{
}
public class GenericClass<T>
{
public virtual GC1<T> this[GC1<T> i, T t]
{
get
{
return null;
}
set
{
}
}
public virtual GC1<T> Method1(ref GC1<T> i, T t)
{
return null;
}
}
public class SubGenericClass<T> : GenericClass<GC1<T>>
{
public override GC1<GC1<T>> this[GC1<GC1<T>> i, GC1<T> t]
{
get
{
return null;
}
set
{
}
}
public override GC1<GC1<T>> Method1(ref GC1<GC1<T>> i, GC1<T> t)
{
return null;
}
}
public class VeryDerived<T> : SubGenericClass<GC1<T>>
{
public override GC1<GC1<GC1<T>>> Method1(ref GC1<GC1<GC1<T>>> i, GC1<GC1<T>> t)
{
return null;
}
public override GC1<GC1<GC1<T>>> this[GC1<GC1<GC1<T>>> i, GC1<GC1<T>> t]
{
get
{
return base[i, t];
}
set
{
base[i, t] = value;
}
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr001.ovr001
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T>(T t);
public class C<T>
{
public virtual int M(T x)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T> ev;
public virtual void Raise(T t)
{
ev(t);
}
}
public class D : C<int>
{
public override int M(int y)
{
return 0;
}
public override int P
{
get
{
return 0;
}
set
{
}
}
public override int this[int x]
{
get
{
return 0;
}
set
{
}
}
public override event Foo<int> ev;
public override void Raise(int t)
{
ev(t);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M((dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3];
rez += x.M(y: (dynamic)0);
rez += x[x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3);
return rez;
}
private static int Foo(int x)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr002.ovr002
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T>(T t);
public class A
{
public virtual int M(int t)
{
return 2;
}
public virtual int this[int x]
{
get
{
return 1;
}
set
{
}
}
}
public class C<T> : A
{
public virtual int M(T x)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T> ev;
public virtual void Raise(T t)
{
ev(t);
}
}
public class D : C<int>
{
public override int M(int y)
{
return 0;
}
public override int P
{
get
{
return 0;
}
set
{
}
}
public override int this[int x]
{
get
{
return 0;
}
set
{
}
}
public override event Foo<int> ev;
public override void Raise(int t)
{
ev(t);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M((dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3];
rez += x.M(y: (dynamic)0);
rez += x[x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3);
return rez;
}
private static int Foo(int x)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr003.ovr003
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T, U>(T t, U u);
public class C<T, U>
{
public virtual int M(T t, U u)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x, U u]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T, U> ev;
public virtual void Raise(T t, U u)
{
ev(t, u);
}
}
public class D<T> : C<T, int>
{
public override int M(T t, int y)
{
return 1;
}
public override int P
{
get
{
return 1;
}
set
{
}
}
public override int this[T t, int x]
{
get
{
return 1;
}
set
{
}
}
public override event Foo<T, int> ev;
public override void Raise(T t, int u)
{
ev(t, u);
}
}
public class E : D<string>
{
public override event Foo<string, int> ev;
public override int M(string t, int y)
{
return 0;
}
public override int P
{
get
{
return 0;
}
set
{
base.P = value;
}
}
public override int this[string t, int x]
{
get
{
return 0;
}
set
{
base[t, x] = value;
}
}
public override void Raise(string t, int u)
{
ev(t, u);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D<int>();
rez += x.M((dynamic)0, (dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3, (dynamic)3];
rez += x.M(t: (dynamic)0, y: (dynamic)0);
rez += x[t: (dynamic)3, x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3, (dynamic)3);
//at this point rez should be 5
rez -= 5;
var y = new E();
rez += y.M("", (dynamic)0);
rez += ((dynamic)y).P;
rez += y["a", (dynamic)3];
rez += y.M(t: "adfsa", y: (dynamic)0);
rez += y[t: "", x: (dynamic)3];
y.ev += Foo;
y.Raise("safs", (dynamic)3);
dynamic t = new E();
rez += t.M("", (dynamic)0);
rez += ((dynamic)t).P;
rez += t["a", (dynamic)3];
rez += t.M(t: "adfsa", y: (dynamic)0);
rez += t[t: "", x: (dynamic)3];
t.ev += (Foo<string, int>)Foo;
t.Raise("safs", (dynamic)3);
return rez;
}
private static int Foo(int x, int y)
{
return 0;
}
private static int Foo(string x, int y)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr004.ovr004
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T, U>(T t, U u);
public class A<T>
{
}
public class C<T, U>
{
public virtual int M(T t, U u)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x, U u]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T, U> ev;
public virtual void Raise(T t, U u)
{
ev(t, u);
}
}
public class D<T> : C<T, A<A<long>>>
{
public override int M(T t, A<A<long>> y)
{
return 1;
}
public override int P
{
get
{
return 1;
}
set
{
}
}
public override int this[T t, A<A<long>> x]
{
get
{
return 1;
}
set
{
}
}
public override event Foo<T, A<A<long>>> ev;
public override void Raise(T t, A<A<long>> u)
{
ev(t, u);
}
}
public class E : D<A<A<string>>>
{
public override event Foo<A<A<string>>, A<A<long>>> ev;
public override int M(A<A<string>> t, A<A<long>> y)
{
return 0;
}
public override int P
{
get
{
return 0;
}
set
{
base.P = value;
}
}
public override int this[A<A<string>> t, A<A<long>> x]
{
get
{
return 0;
}
set
{
base[t, x] = value;
}
}
public override void Raise(A<A<string>> t, A<A<long>> u)
{
ev(t, u);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D<A<A<string>>>();
rez += x.M((dynamic)new A<A<string>>(), (dynamic)new A<A<long>>());
rez += ((dynamic)x).P;
rez += x[(dynamic)new A<A<string>>(), (dynamic)new A<A<long>>()];
rez += x.M(t: (dynamic)new A<A<string>>(), y: (dynamic)new A<A<long>>());
rez += x[t: (dynamic)new A<A<string>>(), x: (dynamic)new A<A<long>>()];
x.ev += Foo;
x.Raise((dynamic)new A<A<string>>(), (dynamic)new A<A<long>>());
//at this point rez should be 5
rez -= 5;
var y = new E();
rez += y.M((dynamic)new A<A<string>>(), (dynamic)new A<A<long>>());
rez += ((dynamic)y).P;
rez += y[(dynamic)new A<A<string>>(), (dynamic)new A<A<long>>()];
rez += y.M(t: (dynamic)new A<A<string>>(), y: (dynamic)new A<A<long>>());
rez += y[t: (dynamic)new A<A<string>>(), x: (dynamic)new A<A<long>>()];
y.ev += Foo;
y.Raise((dynamic)new A<A<string>>(), (dynamic)new A<A<long>>());
var t = new E();
rez += t.M((dynamic)new A<A<string>>(), (dynamic)new A<A<long>>());
rez += ((dynamic)y).P;
rez += t[(dynamic)new A<A<string>>(), (dynamic)new A<A<long>>()];
rez += t.M(t: (dynamic)new A<A<string>>(), y: (dynamic)new A<A<long>>());
rez += t[t: (dynamic)new A<A<string>>(), x: (dynamic)new A<A<long>>()];
t.ev += Foo;
t.Raise((dynamic)new A<A<string>>(), (dynamic)new A<A<long>>());
return rez;
}
private static int Foo(A<A<long>> x, A<A<long>> y)
{
return 0;
}
private static int Foo(A<A<string>> x, A<A<long>> y)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr005.ovr005
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T>(T t);
public class C<T>
{
public virtual int M<U>(T x, U u)
{
return 1;
}
}
public class D : C<int>
{
public override int M<U>(int y, U u)
{
return 0;
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M<int>((dynamic)0, (dynamic)4);
rez += x.M<long>(y: (dynamic)0, u: (dynamic)4);
return rez;
}
private static int Foo(int x)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr006.ovr006
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
//<Expects Status=warning>\(29,16\).*CS0114</Expects>
//<Expects Status=warning>\(30,16\).*CS0114</Expects>
//<Expects Status=warning>\(31,16\).*CS0114</Expects>
//<Expects Status=warning>\(32,27\).*CS0114</Expects>
//<Expects Status=warning>\(33,17\).*CS0114</Expects>
public delegate int Foo<T>(T t);
public class C<T>
{
public virtual int M(T x)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T> ev;
public virtual void Raise(T t)
{
ev(t);
}
}
public class D : C<int>
{
public int M(int y)
{
return 0;
}
public int P
{
get
{
return 0;
}
set
{
}
}
public int this[int x]
{
get
{
return 0;
}
set
{
}
}
public event Foo<int> ev;
public void Raise(int t)
{
ev(t);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M((dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3];
rez += x.M(y: (dynamic)0);
rez += x[x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3);
return rez;
}
private static int Foo(int x)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr007.ovr007
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T>(T t);
public class C<T>
{
public virtual int M(T x)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T> ev;
public virtual void Raise(T t)
{
ev(t);
}
}
public class D : C<int>
{
public new int M(int y)
{
return 0;
}
public new int P
{
get
{
return 0;
}
set
{
}
}
public new int this[int x]
{
get
{
return 0;
}
set
{
}
}
public new event Foo<int> ev;
public new void Raise(int t)
{
ev(t);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M((dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3];
rez += x.M(y: (dynamic)0);
rez += x[x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3);
return rez;
}
private static int Foo(int x)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr009.ovr009
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T>(T t);
public abstract class C<T>
{
public abstract int M(T x);
public abstract int P
{
get;
set;
}
public abstract int this[T x]
{
get;
set;
}
public abstract event Foo<T> ev;
public abstract void Raise(T t);
}
public class D : C<int>
{
public override int M(int y)
{
return 0;
}
public override int P
{
get
{
return 0;
}
set
{
}
}
public override int this[int x]
{
get
{
return 0;
}
set
{
}
}
public override event Foo<int> ev;
public override void Raise(int t)
{
ev(t);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M((dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3];
rez += x.M(y: (dynamic)0);
rez += x[x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3);
return rez;
}
private static int Foo(int x)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr011.ovr011
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T>(T t);
public class C<T>
{
public virtual int M(T x)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T> ev;
public virtual void Raise(T t)
{
ev(t);
}
}
public class D : C<int?>
{
public override int M(int? y)
{
return 0;
}
public override int P
{
get
{
return 0;
}
set
{
}
}
public override int this[int? x]
{
get
{
return 0;
}
set
{
}
}
public override event Foo<int?> ev;
public override void Raise(int? t)
{
ev(t);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M((dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3];
rez += x.M(y: (dynamic)0);
rez += x[x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3);
return rez;
}
private static int Foo(int? x)
{
return 0;
}
}
// </Code>
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.errorverifier.errorverifier
{
using System.Reflection;
using System.Resources;
public enum ErrorElementId
{
None,
SK_METHOD, // method
SK_CLASS, // type
SK_NAMESPACE, // namespace
SK_FIELD, // field
SK_PROPERTY, // property
SK_UNKNOWN, // element
SK_VARIABLE, // variable
SK_EVENT, // event
SK_TYVAR, // type parameter
SK_ALIAS, // using alias
ERRORSYM, // <error>
NULL, // <null>
GlobalNamespace, // <global namespace>
MethodGroup, // method group
AnonMethod, // anonymous method
Lambda, // lambda expression
AnonymousType, // anonymous type
}
public enum ErrorMessageId
{
None,
BadBinaryOps, // Operator '{0}' cannot be applied to operands of type '{1}' and '{2}'
IntDivByZero, // Division by constant zero
BadIndexLHS, // Cannot apply indexing with [] to an expression of type '{0}'
BadIndexCount, // Wrong number of indices inside []; expected '{0}'
BadUnaryOp, // Operator '{0}' cannot be applied to operand of type '{1}'
NoImplicitConv, // Cannot implicitly convert type '{0}' to '{1}'
NoExplicitConv, // Cannot convert type '{0}' to '{1}'
ConstOutOfRange, // Constant value '{0}' cannot be converted to a '{1}'
AmbigBinaryOps, // Operator '{0}' is ambiguous on operands of type '{1}' and '{2}'
AmbigUnaryOp, // Operator '{0}' is ambiguous on an operand of type '{1}'
ValueCantBeNull, // Cannot convert null to '{0}' because it is a non-nullable value type
WrongNestedThis, // Cannot access a non-static member of outer type '{0}' via nested type '{1}'
NoSuchMember, // '{0}' does not contain a definition for '{1}'
ObjectRequired, // An object reference is required for the non-static field, method, or property '{0}'
AmbigCall, // The call is ambiguous between the following methods or properties: '{0}' and '{1}'
BadAccess, // '{0}' is inaccessible due to its protection level
MethDelegateMismatch, // No overload for '{0}' matches delegate '{1}'
AssgLvalueExpected, // The left-hand side of an assignment must be a variable, property or indexer
NoConstructors, // The type '{0}' has no constructors defined
BadDelegateConstructor, // The delegate '{0}' does not have a valid constructor
PropertyLacksGet, // The property or indexer '{0}' cannot be used in this context because it lacks the get accessor
ObjectProhibited, // Member '{0}' cannot be accessed with an instance reference; qualify it with a type name instead
AssgReadonly, // A readonly field cannot be assigned to (except in a constructor or a variable initializer)
RefReadonly, // A readonly field cannot be passed ref or out (except in a constructor)
AssgReadonlyStatic, // A static readonly field cannot be assigned to (except in a static constructor or a variable initializer)
RefReadonlyStatic, // A static readonly field cannot be passed ref or out (except in a static constructor)
AssgReadonlyProp, // Property or indexer '{0}' cannot be assigned to -- it is read only
AbstractBaseCall, // Cannot call an abstract base member: '{0}'
RefProperty, // A property or indexer may not be passed as an out or ref parameter
ManagedAddr, // Cannot take the address of, get the size of, or declare a pointer to a managed type ('{0}')
FixedNotNeeded, // You cannot use the fixed statement to take the address of an already fixed expression
UnsafeNeeded, // Dynamic calls cannot be used in conjunction with pointers
BadBoolOp, // In order to be applicable as a short circuit operator a user-defined logical operator ('{0}') must have the same return type as the type of its 2 parameters
MustHaveOpTF, // The type ('{0}') must contain declarations of operator true and operator false
CheckedOverflow, // The operation overflows at compile time in checked mode
ConstOutOfRangeChecked, // Constant value '{0}' cannot be converted to a '{1}' (use 'unchecked' syntax to override)
AmbigMember, // Ambiguity between '{0}' and '{1}'
SizeofUnsafe, // '{0}' does not have a predefined size, therefore sizeof can only be used in an unsafe context (consider using System.Runtime.InteropServices.Marshal.SizeOf)
FieldInitRefNonstatic, // A field initializer cannot reference the non-static field, method, or property '{0}'
CallingFinalizeDepracated, // Destructors and object.Finalize cannot be called directly. Consider calling IDisposable.Dispose if available.
CallingBaseFinalizeDeprecated, // Do not directly call your base class Finalize method. It is called automatically from your destructor.
BadCastInFixed, // The right hand side of a fixed statement assignment may not be a cast expression
NoImplicitConvCast, // Cannot implicitly convert type '{0}' to '{1}'. An explicit conversion exists (are you missing a cast?)
InaccessibleGetter, // The property or indexer '{0}' cannot be used in this context because the get accessor is inaccessible
InaccessibleSetter, // The property or indexer '{0}' cannot be used in this context because the set accessor is inaccessible
BadArity, // Using the generic {1} '{0}' requires '{2}' type arguments
BadTypeArgument, // The type '{0}' may not be used as a type argument
TypeArgsNotAllowed, // The {1} '{0}' cannot be used with type arguments
HasNoTypeVars, // The non-generic {1} '{0}' cannot be used with type arguments
NewConstraintNotSatisfied, // '{2}' must be a non-abstract type with a public parameterless constructor in order to use it as parameter '{1}' in the generic type or method '{0}'
GenericConstraintNotSatisfiedRefType, // The type '{3}' cannot be used as type parameter '{2}' in the generic type or method '{0}'. There is no implicit reference conversion from '{3}' to '{1}'.
GenericConstraintNotSatisfiedNullableEnum, // The type '{3}' cannot be used as type parameter '{2}' in the generic type or method '{0}'. The nullable type '{3}' does not satisfy the constraint of '{1}'.
GenericConstraintNotSatisfiedNullableInterface, // The type '{3}' cannot be used as type parameter '{2}' in the generic type or method '{0}'. The nullable type '{3}' does not satisfy the constraint of '{1}'. Nullable types can not satisfy any interface constraints.
GenericConstraintNotSatisfiedTyVar, // The type '{3}' cannot be used as type parameter '{2}' in the generic type or method '{0}'. There is no boxing conversion or type parameter conversion from '{3}' to '{1}'.
GenericConstraintNotSatisfiedValType, // The type '{3}' cannot be used as type parameter '{2}' in the generic type or method '{0}'. There is no boxing conversion from '{3}' to '{1}'.
TypeVarCantBeNull, // Cannot convert null to type parameter '{0}' because it could be a non-nullable value type. Consider using 'default({0})' instead.
BadRetType, // '{1} {0}' has the wrong return type
CantInferMethTypeArgs, // The type arguments for method '{0}' cannot be inferred from the usage. Try specifying the type arguments explicitly.
MethGrpToNonDel, // Cannot convert method group '{0}' to non-delegate type '{1}'. Did you intend to invoke the method?
RefConstraintNotSatisfied, // The type '{2}' must be a reference type in order to use it as parameter '{1}' in the generic type or method '{0}'
ValConstraintNotSatisfied, // The type '{2}' must be a non-nullable value type in order to use it as parameter '{1}' in the generic type or method '{0}'
CircularConstraint, // Circular constraint dependency involving '{0}' and '{1}'
BaseConstraintConflict, // Type parameter '{0}' inherits conflicting constraints '{1}' and '{2}'
ConWithValCon, // Type parameter '{1}' has the 'struct' constraint so '{1}' cannot be used as a constraint for '{0}'
AmbigUDConv, // Ambiguous user defined conversions '{0}' and '{1}' when converting from '{2}' to '{3}'
PredefinedTypeNotFound, // Predefined type '{0}' is not defined or imported
PredefinedTypeBadType, // Predefined type '{0}' is declared incorrectly
BindToBogus, // '{0}' is not supported by the language
CantCallSpecialMethod, // '{0}': cannot explicitly call operator or accessor
BogusType, // '{0}' is a type not supported by the language
MissingPredefinedMember, // Missing compiler required member '{0}.{1}'
LiteralDoubleCast, // Literal of type double cannot be implicitly converted to type '{1}'; use an '{0}' suffix to create a literal of this type
UnifyingInterfaceInstantiations, // '{0}' cannot implement both '{1}' and '{2}' because they may unify for some type parameter substitutions
ConvertToStaticClass, // Cannot convert to static type '{0}'
GenericArgIsStaticClass, // '{0}': static types cannot be used as type arguments
PartialMethodToDelegate, // Cannot create delegate from method '{0}' because it is a partial method without an implementing declaration
IncrementLvalueExpected, // The operand of an increment or decrement operator must be a variable, property or indexer
NoSuchMemberOrExtension, // '{0}' does not contain a definition for '{1}' and no extension method '{1}' accepting a first argument of type '{0}' could be found (are you missing a using directive or an assembly reference?)
ValueTypeExtDelegate, // Extension methods '{0}' defined on value type '{1}' cannot be used to create delegates
BadArgCount, // No overload for method '{0}' takes '{1}' arguments
BadArgTypes, // The best overloaded method match for '{0}' has some invalid arguments
BadArgType, // Argument '{0}': cannot convert from '{1}' to '{2}'
RefLvalueExpected, // A ref or out argument must be an assignable variable
BadProtectedAccess, // Cannot access protected member '{0}' via a qualifier of type '{1}'; the qualifier must be of type '{2}' (or derived from it)
BindToBogusProp2, // Property, indexer, or event '{0}' is not supported by the language; try directly calling accessor methods '{1}' or '{2}'
BindToBogusProp1, // Property, indexer, or event '{0}' is not supported by the language; try directly calling accessor method '{1}'
BadDelArgCount, // Delegate '{0}' does not take '{1}' arguments
BadDelArgTypes, // Delegate '{0}' has some invalid arguments
AssgReadonlyLocal, // Cannot assign to '{0}' because it is read-only
RefReadonlyLocal, // Cannot pass '{0}' as a ref or out argument because it is read-only
ReturnNotLValue, // Cannot modify the return value of '{0}' because it is not a variable
BadArgExtraRef, // Argument '{0}' should not be passed with the '{1}' keyword
// DelegateOnConditional, // Cannot create delegate with '{0}' because it has a Conditional attribute (REMOVED)
BadArgRef, // Argument '{0}' must be passed with the '{1}' keyword
AssgReadonly2, // Members of readonly field '{0}' cannot be modified (except in a constructor or a variable initializer)
RefReadonly2, // Members of readonly field '{0}' cannot be passed ref or out (except in a constructor)
AssgReadonlyStatic2, // Fields of static readonly field '{0}' cannot be assigned to (except in a static constructor or a variable initializer)
RefReadonlyStatic2, // Fields of static readonly field '{0}' cannot be passed ref or out (except in a static constructor)
AssgReadonlyLocalCause, // Cannot assign to '{0}' because it is a '{1}'
RefReadonlyLocalCause, // Cannot pass '{0}' as a ref or out argument because it is a '{1}'
ThisStructNotInAnonMeth, // Anonymous methods, lambda expressions, and query expressions inside structs cannot access instance members of 'this'. Consider copying 'this' to a local variable outside the anonymous method, lambda expression or query expression and using the local instead.
DelegateOnNullable, // Cannot bind delegate to '{0}' because it is a member of 'System.Nullable<T>'
BadCtorArgCount, // '{0}' does not contain a constructor that takes '{1}' arguments
BadExtensionArgTypes, // '{0}' does not contain a definition for '{1}' and the best extension method overload '{2}' has some invalid arguments
BadInstanceArgType, // Instance argument: cannot convert from '{0}' to '{1}'
BadArgTypesForCollectionAdd, // The best overloaded Add method '{0}' for the collection initializer has some invalid arguments
InitializerAddHasParamModifiers, // The best overloaded method match '{0}' for the collection initializer element cannot be used. Collection initializer 'Add' methods cannot have ref or out parameters.
NonInvocableMemberCalled, // Non-invocable member '{0}' cannot be used like a method.
NamedArgumentSpecificationBeforeFixedArgument, // Named argument specifications must appear after all fixed arguments have been specified
BadNamedArgument, // The best overload for '{0}' does not have a parameter named '{1}'
BadNamedArgumentForDelegateInvoke, // The delegate '{0}' does not have a parameter named '{1}'
DuplicateNamedArgument, // Named argument '{0}' cannot be specified multiple times
NamedArgumentUsedInPositional, // Named argument '{0}' specifies a parameter for which a positional argument has already been given
}
public enum RuntimeErrorId
{
None,
// RuntimeBinderInternalCompilerException
InternalCompilerError, // An unexpected exception occurred while binding a dynamic operation
// ArgumentException
BindRequireArguments, // Cannot bind call with no calling object
// RuntimeBinderException
BindCallFailedOverloadResolution, // Overload resolution failed
// ArgumentException
BindBinaryOperatorRequireTwoArguments, // Binary operators must be invoked with two arguments
// ArgumentException
BindUnaryOperatorRequireOneArgument, // Unary operators must be invoked with one argument
// RuntimeBinderException
BindPropertyFailedMethodGroup, // The name '{0}' is bound to a method and cannot be used like a property
// RuntimeBinderException
BindPropertyFailedEvent, // The event '{0}' can only appear on the left hand side of += or -=
// RuntimeBinderException
BindInvokeFailedNonDelegate, // Cannot invoke a non-delegate type
// ArgumentException
BindImplicitConversionRequireOneArgument, // Implicit conversion takes exactly one argument
// ArgumentException
BindExplicitConversionRequireOneArgument, // Explicit conversion takes exactly one argument
// ArgumentException
BindBinaryAssignmentRequireTwoArguments, // Binary operators cannot be invoked with one argument
// RuntimeBinderException
BindBinaryAssignmentFailedNullReference, // Cannot perform member assignment on a null reference
// RuntimeBinderException
NullReferenceOnMemberException, // Cannot perform runtime binding on a null reference
// RuntimeBinderException
BindCallToConditionalMethod, // Cannot dynamically invoke method '{0}' because it has a Conditional attribute
// RuntimeBinderException
BindToVoidMethodButExpectResult, // Cannot implicitly convert type 'void' to 'object'
// EE?
EmptyDynamicView, // No further information on this object could be discovered
// MissingMemberException
GetValueonWriteOnlyProperty, // Write Only properties are not supported
}
public class ErrorVerifier
{
private static Assembly s_asm;
private static ResourceManager s_rm1;
private static ResourceManager s_rm2;
public static string GetErrorElement(ErrorElementId id)
{
return string.Empty;
}
public static bool Verify(ErrorMessageId id, string actualError, params string[] args)
{
return true;
}
public static bool Verify(RuntimeErrorId id, string actualError, params string[] args)
{
return true;
}
}
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.derived001.derived001
{
using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.errorverifier.errorverifier;
// <Title>Generic constraints</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
// <Expects Status=success></Expects>
// <Code>
using System.Collections.Generic;
public class MyClass<T, U>
where T : List<object>, new()
{
internal T myList = new T();
}
public class MyClassDerived<U> : MyClass<List<dynamic>, U>
{
public void Foo()
{
if (myList.Count != 0)
Test.Status = 2;
myList.Add(1);
try
{
myList[0].Foo(); //This should compile into a call site
Test.Status = 2;
}
catch (Microsoft.CSharp.RuntimeBinder.RuntimeBinderException e)
{
Test.Status = 2;
if (ErrorVerifier.Verify(ErrorMessageId.NoSuchMember, e.Message, "int", "Foo"))
Test.Status = 1;
}
}
}
public class Test
{
public static int Status;
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod(null));
}
public static int MainMethod(string[] args)
{
MyClassDerived<object> mc = new MyClassDerived<dynamic>();
mc.Foo();
if (Test.Status != 1)
return 1;
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.derived003.derived003
{
// <Title>Generic constraints</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class MyClass<T, U>
where T : U
{
}
public class MyClassDerived<T> : MyClass<T, object>
{
public void Foo()
{
Test.Status = 1;
}
}
public class Test
{
public static int Status;
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod(null));
}
public static int MainMethod(string[] args)
{
MyClassDerived<dynamic> mc = new MyClassDerived<dynamic>();
mc.Foo();
if (Test.Status != 1)
return 1;
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.derived004.derived004
{
// <Title>Generic constraints</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
// <Expects Status=success></Expects>
// <Code>
public class MyClass<T, U>
where T : U
{
}
public class MyClassDerived<T> : MyClass<T, dynamic>
{
public void Foo()
{
Test.Status = 1;
}
}
public class Test
{
public static int Status;
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod(null));
}
public static int MainMethod(string[] args)
{
MyClassDerived<dynamic> mc = new MyClassDerived<dynamic>();
mc.Foo();
if (Test.Status != 1)
return 1;
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.derived005.derived005
{
// <Title>Generic constraints</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
// <Expects Status=success></Expects>
// <Code>
public class MyClass<T, U>
where T : U
{
}
public class MyClassDerived<T> : MyClass<T, dynamic>
{
public void Foo()
{
Test.Status = 1;
}
}
public class Test
{
public static int Status;
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod(null));
}
public static int MainMethod(string[] args)
{
MyClassDerived<object> mc = new MyClassDerived<dynamic>();
mc.Foo();
if (Test.Status != 1)
return 1;
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.b813045bindfield01.b813045bindfield01
{
// <Title>Generic constraints</Title>
// <Description>Event invocation throws an ArgumentNullException incorrectly
// </Description>
// <RelatedBugs></RelatedBugs>
// <Expects Status=success></Expects>
using System;
public interface I<T>
{
T Prop
{
get;
set;
}
void M(T t, out char ret);
}
public class DC<T> : I<T>
{
private T _tt = default(T);
public T Prop
{
get
{
return _tt;
}
set
{
_tt = value;
}
}
public void M(T t, out char ret)
{
_tt = t;
ret = 'i';
}
}
public struct DS<T> : I<T>
{
public T Prop
{
get
{
return default(T);
}
set
{
}
}
public void M(T t, out char ret)
{
T tt = t;
ret = 'y';
}
}
/// <summary>
/// NO public, no instance field in generic type
/// </summary>
/// <typeparam name = "T"></typeparam>
public class C<T>
{
public event EventHandler E = delegate
{
}
;
public static void Foo()
{
dynamic c = new C<T>();
c.E(null, null);
}
// field
private T _tt;
public char Bar(T t1, T t2)
{
_tt = t1;
return 'q'; // t1 == t2;
}
}
public struct S<T, V>
{
public event EventHandler E;
public S(EventHandler e)
{
E = e;
}
public static void Foo()
{
dynamic d = new S<T, V>(delegate
{
}
);
d.E(null, null);
}
// local var
public char Bar(T t, V v)
{
T t1 = t;
return 'c';
}
}
public class Test
{
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
C<int>.Foo();
S<string, char>.Foo();
dynamic d1 = new C<Test>();
bool ret = 'q' == d1.Bar(null, null);
dynamic d2 = new S<sbyte, dynamic>(new EventHandler((p, q) =>
{
}
));
ret &= 'c' == d2.Bar(-1, null);
dynamic v = new DC<object>();
ret &= default(object) == v.Prop;
char c = ' ';
v.M(null, out c);
ret &= 'i' == c;
d2 = new DS<dynamic>();
ret &= default(dynamic) == d2.Prop;
d2.M(new object(), out c);
ret &= 'y' == c;
System.Console.WriteLine(ret);
return ret ? 0 : 1;
}
}
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.changetypearg001.changetypearg001
{
// <Title>Specializing generic overloads </Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
var TestValue2 = new GC1<string>();
int rez = 0;
dynamic d = new SubGenericClass<string>();
dynamic d1 = default(GC1<GC1<string>>);
d[d1, TestValue2] = d1;
if (d[d1, TestValue2] != null)
rez++;
if (d.Method1(i: d1, t: TestValue2) != null)
rez++;
var TestValue = new GC1<GC1<string>>();
dynamic dt = new VeryDerived<string>();
dynamic d2 = default(GC1<GC1<GC1<string>>>);
dt[d2, TestValue] = d2;
if (dt[d2, TestValue] != null)
rez++;
if (dt.Method1(i: d2, t: TestValue) != null)
rez++;
return rez;
}
}
public class GC1<T>
{
}
public abstract class GenericClass<T>
{
public abstract GC1<T> this[GC1<T> i, T t]
{
get;
set;
}
public abstract GC1<T> Method1(GC1<T> i, T t);
}
public class SubGenericClass<T> : GenericClass<GC1<T>>
{
public override GC1<GC1<T>> this[GC1<GC1<T>> i, GC1<T> t]
{
get
{
return null;
}
set
{
}
}
public override GC1<GC1<T>> Method1(GC1<GC1<T>> i, GC1<T> t)
{
return null;
}
}
public class VeryDerived<T> : SubGenericClass<GC1<T>>
{
public override GC1<GC1<GC1<T>>> Method1(GC1<GC1<GC1<T>>> i, GC1<GC1<T>> t)
{
return base.Method1(i, t);
}
public override GC1<GC1<GC1<T>>> this[GC1<GC1<GC1<T>>> i, GC1<GC1<T>> t]
{
get
{
return base[i, t];
}
set
{
base[i, t] = value;
}
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.changetypearg002.changetypearg002
{
// <Title>Specializing generic overloads </Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
var TestValue2 = new GC1<string>();
int rez = 0;
dynamic d = new SubGenericClass<string>();
dynamic d1 = default(GC1<GC1<string>>);
d[d1, TestValue2] = d1;
if (d[d1, TestValue2] != null)
rez++;
if (d.Method1(i: d1, t: TestValue2) != null)
rez++;
var TestValue = new GC1<GC1<string>>();
dynamic dt = new VeryDerived<string>();
dynamic d2 = default(GC1<GC1<GC1<string>>>);
dt[d2, TestValue] = d2; // System.InvalidProgramException
if (dt[d2, TestValue] != null)
rez++;
if (dt.Method1(i: d2, t: TestValue) != null)
rez++;
return rez;
}
}
public class GC1<T>
{
}
public class GenericClass<T>
{
public virtual GC1<T> this[GC1<T> i, T t]
{
get
{
return null;
}
set
{
}
}
public virtual GC1<T> Method1(GC1<T> i, T t)
{
return null;
}
}
public class SubGenericClass<T> : GenericClass<GC1<T>>
{
public override GC1<GC1<T>> this[GC1<GC1<T>> i, GC1<T> t]
{
get
{
return null;
}
set
{
}
}
public override GC1<GC1<T>> Method1(GC1<GC1<T>> i, GC1<T> t)
{
return null;
}
}
public class VeryDerived<T> : SubGenericClass<GC1<T>>
{
public override GC1<GC1<GC1<T>>> Method1(GC1<GC1<GC1<T>>> i, GC1<GC1<T>> t)
{
return base.Method1(i, t);
}
public override GC1<GC1<GC1<T>>> this[GC1<GC1<GC1<T>>> i, GC1<GC1<T>> t]
{
get
{
return base[i, t];
}
set
{
base[i, t] = value;
}
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.changetypearg003.changetypearg003
{
using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.errorverifier.errorverifier;
// <Title>Specializing generic overloads </Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
using Microsoft.CSharp.RuntimeBinder;
public class Program
{
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var TestValue2 = new GC1<string>();
dynamic d = new SubGenericClass<string>();
dynamic d1 = default(GC1<GC1<string>>);
d[d1, TestValue2] = d1;
if (d[d1, TestValue2] != null)
rez++;
try
{
if (d.Method1(i: d1, t: TestValue2) != null)
rez++;
}
catch (RuntimeBinderException exc)
{
if (ErrorVerifier.Verify(ErrorMessageId.BadArgTypes, exc.Message, "GenericClass<GC1<string>>.Method1(ref GC1<GC1<string>>, GC1<string>)") == false)
rez++;
}
var TestValue = new GC1<GC1<string>>();
dynamic dt = new VeryDerived<string>();
dynamic d2 = default(GC1<GC1<GC1<string>>>);
dt[d2, TestValue] = d2;
if (dt[d2, TestValue] != null)
rez++;
try
{
if (dt.Method1(i: ref d2, t: TestValue) != null)
rez++;
}
catch (RuntimeBinderException exc)
{
if (ErrorVerifier.Verify(ErrorMessageId.BadArgTypes, exc.Message, "GenericClass<GC1<GC1<string>>>.Method1(ref GC1<GC1<GC1<string>>>, GC1<GC1<string>>)") == false)
rez++;
}
return rez;
}
}
public class GC1<T>
{
}
public class GenericClass<T>
{
public virtual GC1<T> this[GC1<T> i, T t]
{
get
{
return null;
}
set
{
}
}
public virtual GC1<T> Method1(ref GC1<T> i, T t)
{
return null;
}
}
public class SubGenericClass<T> : GenericClass<GC1<T>>
{
public override GC1<GC1<T>> this[GC1<GC1<T>> i, GC1<T> t]
{
get
{
return null;
}
set
{
}
}
public override GC1<GC1<T>> Method1(ref GC1<GC1<T>> i, GC1<T> t)
{
return null;
}
}
public class VeryDerived<T> : SubGenericClass<GC1<T>>
{
public override GC1<GC1<GC1<T>>> Method1(ref GC1<GC1<GC1<T>>> i, GC1<GC1<T>> t)
{
return null;
}
public override GC1<GC1<GC1<T>>> this[GC1<GC1<GC1<T>>> i, GC1<GC1<T>> t]
{
get
{
return base[i, t];
}
set
{
base[i, t] = value;
}
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr001.ovr001
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T>(T t);
public class C<T>
{
public virtual int M(T x)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T> ev;
public virtual void Raise(T t)
{
ev(t);
}
}
public class D : C<int>
{
public override int M(int y)
{
return 0;
}
public override int P
{
get
{
return 0;
}
set
{
}
}
public override int this[int x]
{
get
{
return 0;
}
set
{
}
}
public override event Foo<int> ev;
public override void Raise(int t)
{
ev(t);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M((dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3];
rez += x.M(y: (dynamic)0);
rez += x[x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3);
return rez;
}
private static int Foo(int x)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr002.ovr002
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T>(T t);
public class A
{
public virtual int M(int t)
{
return 2;
}
public virtual int this[int x]
{
get
{
return 1;
}
set
{
}
}
}
public class C<T> : A
{
public virtual int M(T x)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T> ev;
public virtual void Raise(T t)
{
ev(t);
}
}
public class D : C<int>
{
public override int M(int y)
{
return 0;
}
public override int P
{
get
{
return 0;
}
set
{
}
}
public override int this[int x]
{
get
{
return 0;
}
set
{
}
}
public override event Foo<int> ev;
public override void Raise(int t)
{
ev(t);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M((dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3];
rez += x.M(y: (dynamic)0);
rez += x[x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3);
return rez;
}
private static int Foo(int x)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr003.ovr003
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T, U>(T t, U u);
public class C<T, U>
{
public virtual int M(T t, U u)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x, U u]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T, U> ev;
public virtual void Raise(T t, U u)
{
ev(t, u);
}
}
public class D<T> : C<T, int>
{
public override int M(T t, int y)
{
return 1;
}
public override int P
{
get
{
return 1;
}
set
{
}
}
public override int this[T t, int x]
{
get
{
return 1;
}
set
{
}
}
public override event Foo<T, int> ev;
public override void Raise(T t, int u)
{
ev(t, u);
}
}
public class E : D<string>
{
public override event Foo<string, int> ev;
public override int M(string t, int y)
{
return 0;
}
public override int P
{
get
{
return 0;
}
set
{
base.P = value;
}
}
public override int this[string t, int x]
{
get
{
return 0;
}
set
{
base[t, x] = value;
}
}
public override void Raise(string t, int u)
{
ev(t, u);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D<int>();
rez += x.M((dynamic)0, (dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3, (dynamic)3];
rez += x.M(t: (dynamic)0, y: (dynamic)0);
rez += x[t: (dynamic)3, x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3, (dynamic)3);
//at this point rez should be 5
rez -= 5;
var y = new E();
rez += y.M("", (dynamic)0);
rez += ((dynamic)y).P;
rez += y["a", (dynamic)3];
rez += y.M(t: "adfsa", y: (dynamic)0);
rez += y[t: "", x: (dynamic)3];
y.ev += Foo;
y.Raise("safs", (dynamic)3);
dynamic t = new E();
rez += t.M("", (dynamic)0);
rez += ((dynamic)t).P;
rez += t["a", (dynamic)3];
rez += t.M(t: "adfsa", y: (dynamic)0);
rez += t[t: "", x: (dynamic)3];
t.ev += (Foo<string, int>)Foo;
t.Raise("safs", (dynamic)3);
return rez;
}
private static int Foo(int x, int y)
{
return 0;
}
private static int Foo(string x, int y)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr004.ovr004
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T, U>(T t, U u);
public class A<T>
{
}
public class C<T, U>
{
public virtual int M(T t, U u)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x, U u]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T, U> ev;
public virtual void Raise(T t, U u)
{
ev(t, u);
}
}
public class D<T> : C<T, A<A<long>>>
{
public override int M(T t, A<A<long>> y)
{
return 1;
}
public override int P
{
get
{
return 1;
}
set
{
}
}
public override int this[T t, A<A<long>> x]
{
get
{
return 1;
}
set
{
}
}
public override event Foo<T, A<A<long>>> ev;
public override void Raise(T t, A<A<long>> u)
{
ev(t, u);
}
}
public class E : D<A<A<string>>>
{
public override event Foo<A<A<string>>, A<A<long>>> ev;
public override int M(A<A<string>> t, A<A<long>> y)
{
return 0;
}
public override int P
{
get
{
return 0;
}
set
{
base.P = value;
}
}
public override int this[A<A<string>> t, A<A<long>> x]
{
get
{
return 0;
}
set
{
base[t, x] = value;
}
}
public override void Raise(A<A<string>> t, A<A<long>> u)
{
ev(t, u);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D<A<A<string>>>();
rez += x.M((dynamic)new A<A<string>>(), (dynamic)new A<A<long>>());
rez += ((dynamic)x).P;
rez += x[(dynamic)new A<A<string>>(), (dynamic)new A<A<long>>()];
rez += x.M(t: (dynamic)new A<A<string>>(), y: (dynamic)new A<A<long>>());
rez += x[t: (dynamic)new A<A<string>>(), x: (dynamic)new A<A<long>>()];
x.ev += Foo;
x.Raise((dynamic)new A<A<string>>(), (dynamic)new A<A<long>>());
//at this point rez should be 5
rez -= 5;
var y = new E();
rez += y.M((dynamic)new A<A<string>>(), (dynamic)new A<A<long>>());
rez += ((dynamic)y).P;
rez += y[(dynamic)new A<A<string>>(), (dynamic)new A<A<long>>()];
rez += y.M(t: (dynamic)new A<A<string>>(), y: (dynamic)new A<A<long>>());
rez += y[t: (dynamic)new A<A<string>>(), x: (dynamic)new A<A<long>>()];
y.ev += Foo;
y.Raise((dynamic)new A<A<string>>(), (dynamic)new A<A<long>>());
var t = new E();
rez += t.M((dynamic)new A<A<string>>(), (dynamic)new A<A<long>>());
rez += ((dynamic)y).P;
rez += t[(dynamic)new A<A<string>>(), (dynamic)new A<A<long>>()];
rez += t.M(t: (dynamic)new A<A<string>>(), y: (dynamic)new A<A<long>>());
rez += t[t: (dynamic)new A<A<string>>(), x: (dynamic)new A<A<long>>()];
t.ev += Foo;
t.Raise((dynamic)new A<A<string>>(), (dynamic)new A<A<long>>());
return rez;
}
private static int Foo(A<A<long>> x, A<A<long>> y)
{
return 0;
}
private static int Foo(A<A<string>> x, A<A<long>> y)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr005.ovr005
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T>(T t);
public class C<T>
{
public virtual int M<U>(T x, U u)
{
return 1;
}
}
public class D : C<int>
{
public override int M<U>(int y, U u)
{
return 0;
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M<int>((dynamic)0, (dynamic)4);
rez += x.M<long>(y: (dynamic)0, u: (dynamic)4);
return rez;
}
private static int Foo(int x)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr006.ovr006
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
//<Expects Status=warning>\(29,16\).*CS0114</Expects>
//<Expects Status=warning>\(30,16\).*CS0114</Expects>
//<Expects Status=warning>\(31,16\).*CS0114</Expects>
//<Expects Status=warning>\(32,27\).*CS0114</Expects>
//<Expects Status=warning>\(33,17\).*CS0114</Expects>
public delegate int Foo<T>(T t);
public class C<T>
{
public virtual int M(T x)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T> ev;
public virtual void Raise(T t)
{
ev(t);
}
}
public class D : C<int>
{
public int M(int y)
{
return 0;
}
public int P
{
get
{
return 0;
}
set
{
}
}
public int this[int x]
{
get
{
return 0;
}
set
{
}
}
public event Foo<int> ev;
public void Raise(int t)
{
ev(t);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M((dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3];
rez += x.M(y: (dynamic)0);
rez += x[x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3);
return rez;
}
private static int Foo(int x)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr007.ovr007
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T>(T t);
public class C<T>
{
public virtual int M(T x)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T> ev;
public virtual void Raise(T t)
{
ev(t);
}
}
public class D : C<int>
{
public new int M(int y)
{
return 0;
}
public new int P
{
get
{
return 0;
}
set
{
}
}
public new int this[int x]
{
get
{
return 0;
}
set
{
}
}
public new event Foo<int> ev;
public new void Raise(int t)
{
ev(t);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M((dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3];
rez += x.M(y: (dynamic)0);
rez += x[x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3);
return rez;
}
private static int Foo(int x)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr009.ovr009
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T>(T t);
public abstract class C<T>
{
public abstract int M(T x);
public abstract int P
{
get;
set;
}
public abstract int this[T x]
{
get;
set;
}
public abstract event Foo<T> ev;
public abstract void Raise(T t);
}
public class D : C<int>
{
public override int M(int y)
{
return 0;
}
public override int P
{
get
{
return 0;
}
set
{
}
}
public override int this[int x]
{
get
{
return 0;
}
set
{
}
}
public override event Foo<int> ev;
public override void Raise(int t)
{
ev(t);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M((dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3];
rez += x.M(y: (dynamic)0);
rez += x[x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3);
return rez;
}
private static int Foo(int x)
{
return 0;
}
}
// </Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.derived.ovr011.ovr011
{
// <Title>Virtual generic methods</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public delegate int Foo<T>(T t);
public class C<T>
{
public virtual int M(T x)
{
return 1;
}
public virtual int P
{
get
{
return 1;
}
set
{
}
}
public virtual int this[T x]
{
get
{
return 1;
}
set
{
}
}
public virtual event Foo<T> ev;
public virtual void Raise(T t)
{
ev(t);
}
}
public class D : C<int?>
{
public override int M(int? y)
{
return 0;
}
public override int P
{
get
{
return 0;
}
set
{
}
}
public override int this[int? x]
{
get
{
return 0;
}
set
{
}
}
public override event Foo<int?> ev;
public override void Raise(int? t)
{
ev(t);
}
}
public class Program
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int rez = 0;
var x = new D();
rez += x.M((dynamic)0);
rez += ((dynamic)x).P;
rez += x[(dynamic)3];
rez += x.M(y: (dynamic)0);
rez += x[x: (dynamic)3];
x.ev += Foo;
x.Raise((dynamic)3);
return rez;
}
private static int Foo(int? x)
{
return 0;
}
}
// </Code>
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Diagnostics.EventLog/ref/System.Diagnostics.EventLog.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// ------------------------------------------------------------------------------
// Changes to this file must follow the https://aka.ms/api-review process.
// ------------------------------------------------------------------------------
namespace System.Diagnostics
{
public partial class EntryWrittenEventArgs : System.EventArgs
{
public EntryWrittenEventArgs() { }
public EntryWrittenEventArgs(System.Diagnostics.EventLogEntry entry) { }
public System.Diagnostics.EventLogEntry Entry { get { throw null; } }
}
public delegate void EntryWrittenEventHandler(object sender, System.Diagnostics.EntryWrittenEventArgs e);
public partial class EventInstance
{
public EventInstance(long instanceId, int categoryId) { }
public EventInstance(long instanceId, int categoryId, System.Diagnostics.EventLogEntryType entryType) { }
public int CategoryId { get { throw null; } set { } }
public System.Diagnostics.EventLogEntryType EntryType { get { throw null; } set { } }
public long InstanceId { get { throw null; } set { } }
}
[System.ComponentModel.DefaultEventAttribute("EntryWritten")]
public partial class EventLog : System.ComponentModel.Component, System.ComponentModel.ISupportInitialize
{
public EventLog() { }
public EventLog(string logName) { }
public EventLog(string logName, string machineName) { }
public EventLog(string logName, string machineName, string source) { }
[System.ComponentModel.BrowsableAttribute(false)]
[System.ComponentModel.DefaultValueAttribute(false)]
public bool EnableRaisingEvents { get { throw null; } set { } }
[System.ComponentModel.BrowsableAttribute(false)]
[System.ComponentModel.DesignerSerializationVisibilityAttribute(System.ComponentModel.DesignerSerializationVisibility.Hidden)]
public System.Diagnostics.EventLogEntryCollection Entries { get { throw null; } }
[System.ComponentModel.DefaultValueAttribute("")]
[System.ComponentModel.ReadOnlyAttribute(true)]
[System.ComponentModel.SettingsBindableAttribute(true)]
public string Log { get { throw null; } set { } }
[System.ComponentModel.BrowsableAttribute(false)]
public string LogDisplayName { get { throw null; } }
[System.ComponentModel.DefaultValueAttribute(".")]
[System.ComponentModel.ReadOnlyAttribute(true)]
[System.ComponentModel.SettingsBindableAttribute(true)]
public string MachineName { get { throw null; } set { } }
[System.ComponentModel.BrowsableAttribute(false)]
[System.ComponentModel.DesignerSerializationVisibilityAttribute(System.ComponentModel.DesignerSerializationVisibility.Hidden)]
public long MaximumKilobytes { get { throw null; } set { } }
[System.ComponentModel.BrowsableAttribute(false)]
public int MinimumRetentionDays { get { throw null; } }
[System.ComponentModel.BrowsableAttribute(false)]
public System.Diagnostics.OverflowAction OverflowAction { get { throw null; } }
[System.ComponentModel.DefaultValueAttribute("")]
[System.ComponentModel.ReadOnlyAttribute(true)]
[System.ComponentModel.SettingsBindableAttribute(true)]
public string Source { get { throw null; } set { } }
[System.ComponentModel.BrowsableAttribute(false)]
[System.ComponentModel.DefaultValueAttribute(null)]
public System.ComponentModel.ISynchronizeInvoke SynchronizingObject { get { throw null; } set { } }
public event System.Diagnostics.EntryWrittenEventHandler EntryWritten { add { } remove { } }
public void BeginInit() { }
public void Clear() { }
public void Close() { }
public static void CreateEventSource(System.Diagnostics.EventSourceCreationData sourceData) { }
public static void CreateEventSource(string source, string logName) { }
[System.ObsoleteAttribute("EventLog.CreateEventSource has been deprecated. Use System.Diagnostics.EventLog.CreateEventSource(EventSourceCreationData sourceData) instead.")]
public static void CreateEventSource(string source, string logName, string machineName) { }
public static void Delete(string logName) { }
public static void Delete(string logName, string machineName) { }
public static void DeleteEventSource(string source) { }
public static void DeleteEventSource(string source, string machineName) { }
protected override void Dispose(bool disposing) { }
public void EndInit() { }
public static bool Exists(string logName) { throw null; }
public static bool Exists(string logName, string machineName) { throw null; }
public static System.Diagnostics.EventLog[] GetEventLogs() { throw null; }
public static System.Diagnostics.EventLog[] GetEventLogs(string machineName) { throw null; }
public static string LogNameFromSourceName(string source, string machineName) { throw null; }
public void ModifyOverflowPolicy(System.Diagnostics.OverflowAction action, int retentionDays) { }
public void RegisterDisplayName(string resourceFile, long resourceId) { }
public static bool SourceExists(string source) { throw null; }
public static bool SourceExists(string source, string machineName) { throw null; }
public void WriteEntry(string message) { }
public void WriteEntry(string message, System.Diagnostics.EventLogEntryType type) { }
public void WriteEntry(string message, System.Diagnostics.EventLogEntryType type, int eventID) { }
public void WriteEntry(string message, System.Diagnostics.EventLogEntryType type, int eventID, short category) { }
public void WriteEntry(string message, System.Diagnostics.EventLogEntryType type, int eventID, short category, byte[] rawData) { }
public static void WriteEntry(string source, string message) { }
public static void WriteEntry(string source, string message, System.Diagnostics.EventLogEntryType type) { }
public static void WriteEntry(string source, string message, System.Diagnostics.EventLogEntryType type, int eventID) { }
public static void WriteEntry(string source, string message, System.Diagnostics.EventLogEntryType type, int eventID, short category) { }
public static void WriteEntry(string source, string message, System.Diagnostics.EventLogEntryType type, int eventID, short category, byte[] rawData) { }
public void WriteEvent(System.Diagnostics.EventInstance instance, byte[] data, params object[] values) { }
public void WriteEvent(System.Diagnostics.EventInstance instance, params object[] values) { }
public static void WriteEvent(string source, System.Diagnostics.EventInstance instance, byte[] data, params object[] values) { }
public static void WriteEvent(string source, System.Diagnostics.EventInstance instance, params object[] values) { }
}
[System.ComponentModel.DesignTimeVisibleAttribute(false)]
[System.ComponentModel.ToolboxItemAttribute(false)]
public sealed partial class EventLogEntry : System.ComponentModel.Component, System.Runtime.Serialization.ISerializable
{
internal EventLogEntry() { }
public string Category { get { throw null; } }
public short CategoryNumber { get { throw null; } }
public byte[] Data { get { throw null; } }
public System.Diagnostics.EventLogEntryType EntryType { get { throw null; } }
[System.ObsoleteAttribute("EventLogEntry.EventID has been deprecated. Use System.Diagnostics.EventLogEntry.InstanceId instead.")]
public int EventID { get { throw null; } }
public int Index { get { throw null; } }
public long InstanceId { get { throw null; } }
public string MachineName { get { throw null; } }
[System.ComponentModel.EditorAttribute("System.ComponentModel.Design.BinaryEditor, System.Design, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a", "System.Drawing.Design.UITypeEditor, System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")]
public string Message { get { throw null; } }
public string[] ReplacementStrings { get { throw null; } }
public string Source { get { throw null; } }
public System.DateTime TimeGenerated { get { throw null; } }
public System.DateTime TimeWritten { get { throw null; } }
public string UserName { get { throw null; } }
public bool Equals(System.Diagnostics.EventLogEntry otherEntry) { throw null; }
void System.Runtime.Serialization.ISerializable.GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
}
public partial class EventLogEntryCollection : System.Collections.ICollection, System.Collections.IEnumerable
{
internal EventLogEntryCollection() { }
public int Count { get { throw null; } }
public virtual System.Diagnostics.EventLogEntry this[int index] { get { throw null; } }
bool System.Collections.ICollection.IsSynchronized { get { throw null; } }
object System.Collections.ICollection.SyncRoot { get { throw null; } }
public void CopyTo(System.Diagnostics.EventLogEntry[] entries, int index) { }
public System.Collections.IEnumerator GetEnumerator() { throw null; }
void System.Collections.ICollection.CopyTo(System.Array array, int index) { }
}
public enum EventLogEntryType
{
Error = 1,
Warning = 2,
Information = 4,
SuccessAudit = 8,
FailureAudit = 16,
}
public sealed partial class EventLogTraceListener : System.Diagnostics.TraceListener
{
public EventLogTraceListener() { }
public EventLogTraceListener(System.Diagnostics.EventLog eventLog) { }
public EventLogTraceListener(string source) { }
public System.Diagnostics.EventLog EventLog { get { throw null; } set { } }
public override string Name { get { throw null; } set { } }
public override void Close() { }
protected override void Dispose(bool disposing) { }
[System.Runtime.InteropServices.ComVisibleAttribute(false)]
public override void TraceData(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType severity, int id, object data) { }
[System.Runtime.InteropServices.ComVisibleAttribute(false)]
public override void TraceData(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType severity, int id, params object[] data) { }
[System.Runtime.InteropServices.ComVisibleAttribute(false)]
public override void TraceEvent(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType severity, int id, string message) { }
[System.Runtime.InteropServices.ComVisibleAttribute(false)]
public override void TraceEvent(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType severity, int id, string format, params object[] args) { }
public override void Write(string message) { }
public override void WriteLine(string message) { }
}
public partial class EventSourceCreationData
{
public EventSourceCreationData(string source, string logName) { }
public int CategoryCount { get { throw null; } set { } }
public string CategoryResourceFile { get { throw null; } set { } }
public string LogName { get { throw null; } set { } }
public string MachineName { get { throw null; } set { } }
public string MessageResourceFile { get { throw null; } set { } }
public string ParameterResourceFile { get { throw null; } set { } }
public string Source { get { throw null; } set { } }
}
public enum OverflowAction
{
DoNotOverwrite = -1,
OverwriteAsNeeded = 0,
OverwriteOlder = 1,
}
}
namespace System.Diagnostics.Eventing.Reader
{
public partial class EventBookmark
{
internal EventBookmark() { }
}
public sealed partial class EventKeyword
{
internal EventKeyword() { }
public string DisplayName { get { throw null; } }
public string Name { get { throw null; } }
public long Value { get { throw null; } }
}
public sealed partial class EventLevel
{
internal EventLevel() { }
public string DisplayName { get { throw null; } }
public string Name { get { throw null; } }
public int Value { get { throw null; } }
}
public partial class EventLogConfiguration : System.IDisposable
{
public EventLogConfiguration(string logName) { }
public EventLogConfiguration(string logName, System.Diagnostics.Eventing.Reader.EventLogSession session) { }
public bool IsClassicLog { get { throw null; } }
public bool IsEnabled { get { throw null; } set { } }
public string LogFilePath { get { throw null; } set { } }
public System.Diagnostics.Eventing.Reader.EventLogIsolation LogIsolation { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventLogMode LogMode { get { throw null; } set { } }
public string LogName { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventLogType LogType { get { throw null; } }
public long MaximumSizeInBytes { get { throw null; } set { } }
public string OwningProviderName { get { throw null; } }
public int? ProviderBufferSize { get { throw null; } }
public System.Guid? ProviderControlGuid { get { throw null; } }
public long? ProviderKeywords { get { throw null; } set { } }
public int? ProviderLatency { get { throw null; } }
public int? ProviderLevel { get { throw null; } set { } }
public int? ProviderMaximumNumberOfBuffers { get { throw null; } }
public int? ProviderMinimumNumberOfBuffers { get { throw null; } }
public System.Collections.Generic.IEnumerable<string> ProviderNames { get { throw null; } }
public string SecurityDescriptor { get { throw null; } set { } }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public void SaveChanges() { }
}
public partial class EventLogException : System.Exception
{
public EventLogException() { }
protected EventLogException(int errorCode) { }
protected EventLogException(System.Runtime.Serialization.SerializationInfo serializationInfo, System.Runtime.Serialization.StreamingContext streamingContext) { }
public EventLogException(string message) { }
public EventLogException(string message, System.Exception innerException) { }
public override string Message { get { throw null; } }
public override void GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
}
public sealed partial class EventLogInformation
{
internal EventLogInformation() { }
public int? Attributes { get { throw null; } }
public System.DateTime? CreationTime { get { throw null; } }
public long? FileSize { get { throw null; } }
public bool? IsLogFull { get { throw null; } }
public System.DateTime? LastAccessTime { get { throw null; } }
public System.DateTime? LastWriteTime { get { throw null; } }
public long? OldestRecordNumber { get { throw null; } }
public long? RecordCount { get { throw null; } }
}
public partial class EventLogInvalidDataException : System.Diagnostics.Eventing.Reader.EventLogException
{
public EventLogInvalidDataException() { }
protected EventLogInvalidDataException(System.Runtime.Serialization.SerializationInfo serializationInfo, System.Runtime.Serialization.StreamingContext streamingContext) { }
public EventLogInvalidDataException(string message) { }
public EventLogInvalidDataException(string message, System.Exception innerException) { }
}
public enum EventLogIsolation
{
Application = 0,
System = 1,
Custom = 2,
}
public sealed partial class EventLogLink
{
internal EventLogLink() { }
public string DisplayName { get { throw null; } }
public bool IsImported { get { throw null; } }
public string LogName { get { throw null; } }
}
public enum EventLogMode
{
Circular = 0,
AutoBackup = 1,
Retain = 2,
}
public partial class EventLogNotFoundException : System.Diagnostics.Eventing.Reader.EventLogException
{
public EventLogNotFoundException() { }
protected EventLogNotFoundException(System.Runtime.Serialization.SerializationInfo serializationInfo, System.Runtime.Serialization.StreamingContext streamingContext) { }
public EventLogNotFoundException(string message) { }
public EventLogNotFoundException(string message, System.Exception innerException) { }
}
public partial class EventLogPropertySelector : System.IDisposable
{
public EventLogPropertySelector(System.Collections.Generic.IEnumerable<string> propertyQueries) { }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
}
public partial class EventLogProviderDisabledException : System.Diagnostics.Eventing.Reader.EventLogException
{
public EventLogProviderDisabledException() { }
protected EventLogProviderDisabledException(System.Runtime.Serialization.SerializationInfo serializationInfo, System.Runtime.Serialization.StreamingContext streamingContext) { }
public EventLogProviderDisabledException(string message) { }
public EventLogProviderDisabledException(string message, System.Exception innerException) { }
}
public partial class EventLogQuery
{
public EventLogQuery(string path, System.Diagnostics.Eventing.Reader.PathType pathType) { }
public EventLogQuery(string path, System.Diagnostics.Eventing.Reader.PathType pathType, string query) { }
public bool ReverseDirection { get { throw null; } set { } }
public System.Diagnostics.Eventing.Reader.EventLogSession Session { get { throw null; } set { } }
public bool TolerateQueryErrors { get { throw null; } set { } }
}
public partial class EventLogReader : System.IDisposable
{
public EventLogReader(System.Diagnostics.Eventing.Reader.EventLogQuery eventQuery) { }
public EventLogReader(System.Diagnostics.Eventing.Reader.EventLogQuery eventQuery, System.Diagnostics.Eventing.Reader.EventBookmark bookmark) { }
public EventLogReader(string path) { }
public EventLogReader(string path, System.Diagnostics.Eventing.Reader.PathType pathType) { }
public int BatchSize { get { throw null; } set { } }
public System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventLogStatus> LogStatus { get { throw null; } }
public void CancelReading() { }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public System.Diagnostics.Eventing.Reader.EventRecord ReadEvent() { throw null; }
public System.Diagnostics.Eventing.Reader.EventRecord ReadEvent(System.TimeSpan timeout) { throw null; }
public void Seek(System.Diagnostics.Eventing.Reader.EventBookmark bookmark) { }
public void Seek(System.Diagnostics.Eventing.Reader.EventBookmark bookmark, long offset) { }
public void Seek(System.IO.SeekOrigin origin, long offset) { }
}
public partial class EventLogReadingException : System.Diagnostics.Eventing.Reader.EventLogException
{
public EventLogReadingException() { }
protected EventLogReadingException(System.Runtime.Serialization.SerializationInfo serializationInfo, System.Runtime.Serialization.StreamingContext streamingContext) { }
public EventLogReadingException(string message) { }
public EventLogReadingException(string message, System.Exception innerException) { }
}
public partial class EventLogRecord : System.Diagnostics.Eventing.Reader.EventRecord
{
internal EventLogRecord() { }
public override System.Guid? ActivityId { get { throw null; } }
public override System.Diagnostics.Eventing.Reader.EventBookmark Bookmark { get { throw null; } }
public string ContainerLog { get { throw null; } }
public override int Id { get { throw null; } }
public override long? Keywords { get { throw null; } }
public override System.Collections.Generic.IEnumerable<string> KeywordsDisplayNames { get { throw null; } }
public override byte? Level { get { throw null; } }
public override string LevelDisplayName { get { throw null; } }
public override string LogName { get { throw null; } }
public override string MachineName { get { throw null; } }
public System.Collections.Generic.IEnumerable<int> MatchedQueryIds { get { throw null; } }
public override short? Opcode { get { throw null; } }
public override string OpcodeDisplayName { get { throw null; } }
public override int? ProcessId { get { throw null; } }
public override System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventProperty> Properties { get { throw null; } }
public override System.Guid? ProviderId { get { throw null; } }
public override string ProviderName { get { throw null; } }
public override int? Qualifiers { get { throw null; } }
public override long? RecordId { get { throw null; } }
public override System.Guid? RelatedActivityId { get { throw null; } }
public override int? Task { get { throw null; } }
public override string TaskDisplayName { get { throw null; } }
public override int? ThreadId { get { throw null; } }
public override System.DateTime? TimeCreated { get { throw null; } }
public override System.Security.Principal.SecurityIdentifier UserId { get { throw null; } }
public override byte? Version { get { throw null; } }
protected override void Dispose(bool disposing) { }
public override string FormatDescription() { throw null; }
public override string FormatDescription(System.Collections.Generic.IEnumerable<object> values) { throw null; }
public System.Collections.Generic.IList<object> GetPropertyValues(System.Diagnostics.Eventing.Reader.EventLogPropertySelector propertySelector) { throw null; }
public override string ToXml() { throw null; }
}
public partial class EventLogSession : System.IDisposable
{
public EventLogSession() { }
public EventLogSession(string server) { }
public EventLogSession(string server, string domain, string user, System.Security.SecureString password, System.Diagnostics.Eventing.Reader.SessionAuthentication logOnType) { }
public static System.Diagnostics.Eventing.Reader.EventLogSession GlobalSession { get { throw null; } }
public void CancelCurrentOperations() { }
public void ClearLog(string logName) { }
public void ClearLog(string logName, string backupPath) { }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public void ExportLog(string path, System.Diagnostics.Eventing.Reader.PathType pathType, string query, string targetFilePath) { }
public void ExportLog(string path, System.Diagnostics.Eventing.Reader.PathType pathType, string query, string targetFilePath, bool tolerateQueryErrors) { }
public void ExportLogAndMessages(string path, System.Diagnostics.Eventing.Reader.PathType pathType, string query, string targetFilePath) { }
public void ExportLogAndMessages(string path, System.Diagnostics.Eventing.Reader.PathType pathType, string query, string targetFilePath, bool tolerateQueryErrors, System.Globalization.CultureInfo targetCultureInfo) { }
public System.Diagnostics.Eventing.Reader.EventLogInformation GetLogInformation(string logName, System.Diagnostics.Eventing.Reader.PathType pathType) { throw null; }
public System.Collections.Generic.IEnumerable<string> GetLogNames() { throw null; }
public System.Collections.Generic.IEnumerable<string> GetProviderNames() { throw null; }
}
public sealed partial class EventLogStatus
{
internal EventLogStatus() { }
public string LogName { get { throw null; } }
public int StatusCode { get { throw null; } }
}
public enum EventLogType
{
Administrative = 0,
Operational = 1,
Analytical = 2,
Debug = 3,
}
public partial class EventLogWatcher : System.IDisposable
{
public EventLogWatcher(System.Diagnostics.Eventing.Reader.EventLogQuery eventQuery) { }
public EventLogWatcher(System.Diagnostics.Eventing.Reader.EventLogQuery eventQuery, System.Diagnostics.Eventing.Reader.EventBookmark bookmark) { }
public EventLogWatcher(System.Diagnostics.Eventing.Reader.EventLogQuery eventQuery, System.Diagnostics.Eventing.Reader.EventBookmark bookmark, bool readExistingEvents) { }
public EventLogWatcher(string path) { }
public bool Enabled { get { throw null; } set { } }
public event System.EventHandler<System.Diagnostics.Eventing.Reader.EventRecordWrittenEventArgs> EventRecordWritten { add { } remove { } }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
}
public sealed partial class EventMetadata
{
internal EventMetadata() { }
public string Description { get { throw null; } }
public long Id { get { throw null; } }
public System.Collections.Generic.IEnumerable<System.Diagnostics.Eventing.Reader.EventKeyword> Keywords { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventLevel Level { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventLogLink LogLink { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventOpcode Opcode { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventTask Task { get { throw null; } }
public string Template { get { throw null; } }
public byte Version { get { throw null; } }
}
public sealed partial class EventOpcode
{
internal EventOpcode() { }
public string DisplayName { get { throw null; } }
public string Name { get { throw null; } }
public int Value { get { throw null; } }
}
public sealed partial class EventProperty
{
internal EventProperty() { }
public object Value { get { throw null; } }
}
public abstract partial class EventRecord : System.IDisposable
{
protected EventRecord() { }
public abstract System.Guid? ActivityId { get; }
public abstract System.Diagnostics.Eventing.Reader.EventBookmark Bookmark { get; }
public abstract int Id { get; }
public abstract long? Keywords { get; }
public abstract System.Collections.Generic.IEnumerable<string> KeywordsDisplayNames { get; }
public abstract byte? Level { get; }
public abstract string LevelDisplayName { get; }
public abstract string LogName { get; }
public abstract string MachineName { get; }
public abstract short? Opcode { get; }
public abstract string OpcodeDisplayName { get; }
public abstract int? ProcessId { get; }
public abstract System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventProperty> Properties { get; }
public abstract System.Guid? ProviderId { get; }
public abstract string ProviderName { get; }
public abstract int? Qualifiers { get; }
public abstract long? RecordId { get; }
public abstract System.Guid? RelatedActivityId { get; }
public abstract int? Task { get; }
public abstract string TaskDisplayName { get; }
public abstract int? ThreadId { get; }
public abstract System.DateTime? TimeCreated { get; }
public abstract System.Security.Principal.SecurityIdentifier UserId { get; }
public abstract byte? Version { get; }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public abstract string FormatDescription();
public abstract string FormatDescription(System.Collections.Generic.IEnumerable<object> values);
public abstract string ToXml();
}
public sealed partial class EventRecordWrittenEventArgs : System.EventArgs
{
internal EventRecordWrittenEventArgs() { }
public System.Exception EventException { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventRecord EventRecord { get { throw null; } }
}
public sealed partial class EventTask
{
internal EventTask() { }
public string DisplayName { get { throw null; } }
public System.Guid EventGuid { get { throw null; } }
public string Name { get { throw null; } }
public int Value { get { throw null; } }
}
public enum PathType
{
LogName = 1,
FilePath = 2,
}
public partial class ProviderMetadata : System.IDisposable
{
public ProviderMetadata(string providerName) { }
public ProviderMetadata(string providerName, System.Diagnostics.Eventing.Reader.EventLogSession session, System.Globalization.CultureInfo targetCultureInfo) { }
public string DisplayName { get { throw null; } }
public System.Collections.Generic.IEnumerable<System.Diagnostics.Eventing.Reader.EventMetadata> Events { get { throw null; } }
public System.Uri HelpLink { get { throw null; } }
public System.Guid Id { get { throw null; } }
public System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventKeyword> Keywords { get { throw null; } }
public System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventLevel> Levels { get { throw null; } }
public System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventLogLink> LogLinks { get { throw null; } }
public string MessageFilePath { get { throw null; } }
public string Name { get { throw null; } }
public System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventOpcode> Opcodes { get { throw null; } }
public string ParameterFilePath { get { throw null; } }
public string ResourceFilePath { get { throw null; } }
public System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventTask> Tasks { get { throw null; } }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
}
public enum SessionAuthentication
{
Default = 0,
Negotiate = 1,
Kerberos = 2,
Ntlm = 3,
}
[System.FlagsAttribute]
public enum StandardEventKeywords : long
{
None = (long)0,
ResponseTime = (long)281474976710656,
WdiContext = (long)562949953421312,
WdiDiagnostic = (long)1125899906842624,
Sqm = (long)2251799813685248,
AuditFailure = (long)4503599627370496,
[System.ObsoleteAttribute("StandardEventKeywords.CorrelationHint has an incorrect value and has been deprecated. Use CorrelationHint2 instead.")]
CorrelationHint = (long)4503599627370496,
AuditSuccess = (long)9007199254740992,
CorrelationHint2 = (long)18014398509481984,
EventLogClassic = (long)36028797018963968,
}
public enum StandardEventLevel
{
LogAlways = 0,
Critical = 1,
Error = 2,
Warning = 3,
Informational = 4,
Verbose = 5,
}
public enum StandardEventOpcode
{
Info = 0,
Start = 1,
Stop = 2,
DataCollectionStart = 3,
DataCollectionStop = 4,
Extension = 5,
Reply = 6,
Resume = 7,
Suspend = 8,
Send = 9,
Receive = 240,
}
public enum StandardEventTask
{
None = 0,
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// ------------------------------------------------------------------------------
// Changes to this file must follow the https://aka.ms/api-review process.
// ------------------------------------------------------------------------------
namespace System.Diagnostics
{
public partial class EntryWrittenEventArgs : System.EventArgs
{
public EntryWrittenEventArgs() { }
public EntryWrittenEventArgs(System.Diagnostics.EventLogEntry entry) { }
public System.Diagnostics.EventLogEntry Entry { get { throw null; } }
}
public delegate void EntryWrittenEventHandler(object sender, System.Diagnostics.EntryWrittenEventArgs e);
public partial class EventInstance
{
public EventInstance(long instanceId, int categoryId) { }
public EventInstance(long instanceId, int categoryId, System.Diagnostics.EventLogEntryType entryType) { }
public int CategoryId { get { throw null; } set { } }
public System.Diagnostics.EventLogEntryType EntryType { get { throw null; } set { } }
public long InstanceId { get { throw null; } set { } }
}
[System.ComponentModel.DefaultEventAttribute("EntryWritten")]
public partial class EventLog : System.ComponentModel.Component, System.ComponentModel.ISupportInitialize
{
public EventLog() { }
public EventLog(string logName) { }
public EventLog(string logName, string machineName) { }
public EventLog(string logName, string machineName, string source) { }
[System.ComponentModel.BrowsableAttribute(false)]
[System.ComponentModel.DefaultValueAttribute(false)]
public bool EnableRaisingEvents { get { throw null; } set { } }
[System.ComponentModel.BrowsableAttribute(false)]
[System.ComponentModel.DesignerSerializationVisibilityAttribute(System.ComponentModel.DesignerSerializationVisibility.Hidden)]
public System.Diagnostics.EventLogEntryCollection Entries { get { throw null; } }
[System.ComponentModel.DefaultValueAttribute("")]
[System.ComponentModel.ReadOnlyAttribute(true)]
[System.ComponentModel.SettingsBindableAttribute(true)]
public string Log { get { throw null; } set { } }
[System.ComponentModel.BrowsableAttribute(false)]
public string LogDisplayName { get { throw null; } }
[System.ComponentModel.DefaultValueAttribute(".")]
[System.ComponentModel.ReadOnlyAttribute(true)]
[System.ComponentModel.SettingsBindableAttribute(true)]
public string MachineName { get { throw null; } set { } }
[System.ComponentModel.BrowsableAttribute(false)]
[System.ComponentModel.DesignerSerializationVisibilityAttribute(System.ComponentModel.DesignerSerializationVisibility.Hidden)]
public long MaximumKilobytes { get { throw null; } set { } }
[System.ComponentModel.BrowsableAttribute(false)]
public int MinimumRetentionDays { get { throw null; } }
[System.ComponentModel.BrowsableAttribute(false)]
public System.Diagnostics.OverflowAction OverflowAction { get { throw null; } }
[System.ComponentModel.DefaultValueAttribute("")]
[System.ComponentModel.ReadOnlyAttribute(true)]
[System.ComponentModel.SettingsBindableAttribute(true)]
public string Source { get { throw null; } set { } }
[System.ComponentModel.BrowsableAttribute(false)]
[System.ComponentModel.DefaultValueAttribute(null)]
public System.ComponentModel.ISynchronizeInvoke SynchronizingObject { get { throw null; } set { } }
public event System.Diagnostics.EntryWrittenEventHandler EntryWritten { add { } remove { } }
public void BeginInit() { }
public void Clear() { }
public void Close() { }
public static void CreateEventSource(System.Diagnostics.EventSourceCreationData sourceData) { }
public static void CreateEventSource(string source, string logName) { }
[System.ObsoleteAttribute("EventLog.CreateEventSource has been deprecated. Use System.Diagnostics.EventLog.CreateEventSource(EventSourceCreationData sourceData) instead.")]
public static void CreateEventSource(string source, string logName, string machineName) { }
public static void Delete(string logName) { }
public static void Delete(string logName, string machineName) { }
public static void DeleteEventSource(string source) { }
public static void DeleteEventSource(string source, string machineName) { }
protected override void Dispose(bool disposing) { }
public void EndInit() { }
public static bool Exists(string logName) { throw null; }
public static bool Exists(string logName, string machineName) { throw null; }
public static System.Diagnostics.EventLog[] GetEventLogs() { throw null; }
public static System.Diagnostics.EventLog[] GetEventLogs(string machineName) { throw null; }
public static string LogNameFromSourceName(string source, string machineName) { throw null; }
public void ModifyOverflowPolicy(System.Diagnostics.OverflowAction action, int retentionDays) { }
public void RegisterDisplayName(string resourceFile, long resourceId) { }
public static bool SourceExists(string source) { throw null; }
public static bool SourceExists(string source, string machineName) { throw null; }
public void WriteEntry(string message) { }
public void WriteEntry(string message, System.Diagnostics.EventLogEntryType type) { }
public void WriteEntry(string message, System.Diagnostics.EventLogEntryType type, int eventID) { }
public void WriteEntry(string message, System.Diagnostics.EventLogEntryType type, int eventID, short category) { }
public void WriteEntry(string message, System.Diagnostics.EventLogEntryType type, int eventID, short category, byte[] rawData) { }
public static void WriteEntry(string source, string message) { }
public static void WriteEntry(string source, string message, System.Diagnostics.EventLogEntryType type) { }
public static void WriteEntry(string source, string message, System.Diagnostics.EventLogEntryType type, int eventID) { }
public static void WriteEntry(string source, string message, System.Diagnostics.EventLogEntryType type, int eventID, short category) { }
public static void WriteEntry(string source, string message, System.Diagnostics.EventLogEntryType type, int eventID, short category, byte[] rawData) { }
public void WriteEvent(System.Diagnostics.EventInstance instance, byte[] data, params object[] values) { }
public void WriteEvent(System.Diagnostics.EventInstance instance, params object[] values) { }
public static void WriteEvent(string source, System.Diagnostics.EventInstance instance, byte[] data, params object[] values) { }
public static void WriteEvent(string source, System.Diagnostics.EventInstance instance, params object[] values) { }
}
[System.ComponentModel.DesignTimeVisibleAttribute(false)]
[System.ComponentModel.ToolboxItemAttribute(false)]
public sealed partial class EventLogEntry : System.ComponentModel.Component, System.Runtime.Serialization.ISerializable
{
internal EventLogEntry() { }
public string Category { get { throw null; } }
public short CategoryNumber { get { throw null; } }
public byte[] Data { get { throw null; } }
public System.Diagnostics.EventLogEntryType EntryType { get { throw null; } }
[System.ObsoleteAttribute("EventLogEntry.EventID has been deprecated. Use System.Diagnostics.EventLogEntry.InstanceId instead.")]
public int EventID { get { throw null; } }
public int Index { get { throw null; } }
public long InstanceId { get { throw null; } }
public string MachineName { get { throw null; } }
[System.ComponentModel.EditorAttribute("System.ComponentModel.Design.BinaryEditor, System.Design, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a", "System.Drawing.Design.UITypeEditor, System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")]
public string Message { get { throw null; } }
public string[] ReplacementStrings { get { throw null; } }
public string Source { get { throw null; } }
public System.DateTime TimeGenerated { get { throw null; } }
public System.DateTime TimeWritten { get { throw null; } }
public string UserName { get { throw null; } }
public bool Equals(System.Diagnostics.EventLogEntry otherEntry) { throw null; }
void System.Runtime.Serialization.ISerializable.GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
}
public partial class EventLogEntryCollection : System.Collections.ICollection, System.Collections.IEnumerable
{
internal EventLogEntryCollection() { }
public int Count { get { throw null; } }
public virtual System.Diagnostics.EventLogEntry this[int index] { get { throw null; } }
bool System.Collections.ICollection.IsSynchronized { get { throw null; } }
object System.Collections.ICollection.SyncRoot { get { throw null; } }
public void CopyTo(System.Diagnostics.EventLogEntry[] entries, int index) { }
public System.Collections.IEnumerator GetEnumerator() { throw null; }
void System.Collections.ICollection.CopyTo(System.Array array, int index) { }
}
public enum EventLogEntryType
{
Error = 1,
Warning = 2,
Information = 4,
SuccessAudit = 8,
FailureAudit = 16,
}
public sealed partial class EventLogTraceListener : System.Diagnostics.TraceListener
{
public EventLogTraceListener() { }
public EventLogTraceListener(System.Diagnostics.EventLog eventLog) { }
public EventLogTraceListener(string source) { }
public System.Diagnostics.EventLog EventLog { get { throw null; } set { } }
public override string Name { get { throw null; } set { } }
public override void Close() { }
protected override void Dispose(bool disposing) { }
[System.Runtime.InteropServices.ComVisibleAttribute(false)]
public override void TraceData(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType severity, int id, object data) { }
[System.Runtime.InteropServices.ComVisibleAttribute(false)]
public override void TraceData(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType severity, int id, params object[] data) { }
[System.Runtime.InteropServices.ComVisibleAttribute(false)]
public override void TraceEvent(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType severity, int id, string message) { }
[System.Runtime.InteropServices.ComVisibleAttribute(false)]
public override void TraceEvent(System.Diagnostics.TraceEventCache eventCache, string source, System.Diagnostics.TraceEventType severity, int id, string format, params object[] args) { }
public override void Write(string message) { }
public override void WriteLine(string message) { }
}
public partial class EventSourceCreationData
{
public EventSourceCreationData(string source, string logName) { }
public int CategoryCount { get { throw null; } set { } }
public string CategoryResourceFile { get { throw null; } set { } }
public string LogName { get { throw null; } set { } }
public string MachineName { get { throw null; } set { } }
public string MessageResourceFile { get { throw null; } set { } }
public string ParameterResourceFile { get { throw null; } set { } }
public string Source { get { throw null; } set { } }
}
public enum OverflowAction
{
DoNotOverwrite = -1,
OverwriteAsNeeded = 0,
OverwriteOlder = 1,
}
}
namespace System.Diagnostics.Eventing.Reader
{
public partial class EventBookmark
{
internal EventBookmark() { }
}
public sealed partial class EventKeyword
{
internal EventKeyword() { }
public string DisplayName { get { throw null; } }
public string Name { get { throw null; } }
public long Value { get { throw null; } }
}
public sealed partial class EventLevel
{
internal EventLevel() { }
public string DisplayName { get { throw null; } }
public string Name { get { throw null; } }
public int Value { get { throw null; } }
}
public partial class EventLogConfiguration : System.IDisposable
{
public EventLogConfiguration(string logName) { }
public EventLogConfiguration(string logName, System.Diagnostics.Eventing.Reader.EventLogSession session) { }
public bool IsClassicLog { get { throw null; } }
public bool IsEnabled { get { throw null; } set { } }
public string LogFilePath { get { throw null; } set { } }
public System.Diagnostics.Eventing.Reader.EventLogIsolation LogIsolation { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventLogMode LogMode { get { throw null; } set { } }
public string LogName { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventLogType LogType { get { throw null; } }
public long MaximumSizeInBytes { get { throw null; } set { } }
public string OwningProviderName { get { throw null; } }
public int? ProviderBufferSize { get { throw null; } }
public System.Guid? ProviderControlGuid { get { throw null; } }
public long? ProviderKeywords { get { throw null; } set { } }
public int? ProviderLatency { get { throw null; } }
public int? ProviderLevel { get { throw null; } set { } }
public int? ProviderMaximumNumberOfBuffers { get { throw null; } }
public int? ProviderMinimumNumberOfBuffers { get { throw null; } }
public System.Collections.Generic.IEnumerable<string> ProviderNames { get { throw null; } }
public string SecurityDescriptor { get { throw null; } set { } }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public void SaveChanges() { }
}
public partial class EventLogException : System.Exception
{
public EventLogException() { }
protected EventLogException(int errorCode) { }
protected EventLogException(System.Runtime.Serialization.SerializationInfo serializationInfo, System.Runtime.Serialization.StreamingContext streamingContext) { }
public EventLogException(string message) { }
public EventLogException(string message, System.Exception innerException) { }
public override string Message { get { throw null; } }
public override void GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
}
public sealed partial class EventLogInformation
{
internal EventLogInformation() { }
public int? Attributes { get { throw null; } }
public System.DateTime? CreationTime { get { throw null; } }
public long? FileSize { get { throw null; } }
public bool? IsLogFull { get { throw null; } }
public System.DateTime? LastAccessTime { get { throw null; } }
public System.DateTime? LastWriteTime { get { throw null; } }
public long? OldestRecordNumber { get { throw null; } }
public long? RecordCount { get { throw null; } }
}
public partial class EventLogInvalidDataException : System.Diagnostics.Eventing.Reader.EventLogException
{
public EventLogInvalidDataException() { }
protected EventLogInvalidDataException(System.Runtime.Serialization.SerializationInfo serializationInfo, System.Runtime.Serialization.StreamingContext streamingContext) { }
public EventLogInvalidDataException(string message) { }
public EventLogInvalidDataException(string message, System.Exception innerException) { }
}
public enum EventLogIsolation
{
Application = 0,
System = 1,
Custom = 2,
}
public sealed partial class EventLogLink
{
internal EventLogLink() { }
public string DisplayName { get { throw null; } }
public bool IsImported { get { throw null; } }
public string LogName { get { throw null; } }
}
public enum EventLogMode
{
Circular = 0,
AutoBackup = 1,
Retain = 2,
}
public partial class EventLogNotFoundException : System.Diagnostics.Eventing.Reader.EventLogException
{
public EventLogNotFoundException() { }
protected EventLogNotFoundException(System.Runtime.Serialization.SerializationInfo serializationInfo, System.Runtime.Serialization.StreamingContext streamingContext) { }
public EventLogNotFoundException(string message) { }
public EventLogNotFoundException(string message, System.Exception innerException) { }
}
public partial class EventLogPropertySelector : System.IDisposable
{
public EventLogPropertySelector(System.Collections.Generic.IEnumerable<string> propertyQueries) { }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
}
public partial class EventLogProviderDisabledException : System.Diagnostics.Eventing.Reader.EventLogException
{
public EventLogProviderDisabledException() { }
protected EventLogProviderDisabledException(System.Runtime.Serialization.SerializationInfo serializationInfo, System.Runtime.Serialization.StreamingContext streamingContext) { }
public EventLogProviderDisabledException(string message) { }
public EventLogProviderDisabledException(string message, System.Exception innerException) { }
}
public partial class EventLogQuery
{
public EventLogQuery(string path, System.Diagnostics.Eventing.Reader.PathType pathType) { }
public EventLogQuery(string path, System.Diagnostics.Eventing.Reader.PathType pathType, string query) { }
public bool ReverseDirection { get { throw null; } set { } }
public System.Diagnostics.Eventing.Reader.EventLogSession Session { get { throw null; } set { } }
public bool TolerateQueryErrors { get { throw null; } set { } }
}
public partial class EventLogReader : System.IDisposable
{
public EventLogReader(System.Diagnostics.Eventing.Reader.EventLogQuery eventQuery) { }
public EventLogReader(System.Diagnostics.Eventing.Reader.EventLogQuery eventQuery, System.Diagnostics.Eventing.Reader.EventBookmark bookmark) { }
public EventLogReader(string path) { }
public EventLogReader(string path, System.Diagnostics.Eventing.Reader.PathType pathType) { }
public int BatchSize { get { throw null; } set { } }
public System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventLogStatus> LogStatus { get { throw null; } }
public void CancelReading() { }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public System.Diagnostics.Eventing.Reader.EventRecord ReadEvent() { throw null; }
public System.Diagnostics.Eventing.Reader.EventRecord ReadEvent(System.TimeSpan timeout) { throw null; }
public void Seek(System.Diagnostics.Eventing.Reader.EventBookmark bookmark) { }
public void Seek(System.Diagnostics.Eventing.Reader.EventBookmark bookmark, long offset) { }
public void Seek(System.IO.SeekOrigin origin, long offset) { }
}
public partial class EventLogReadingException : System.Diagnostics.Eventing.Reader.EventLogException
{
public EventLogReadingException() { }
protected EventLogReadingException(System.Runtime.Serialization.SerializationInfo serializationInfo, System.Runtime.Serialization.StreamingContext streamingContext) { }
public EventLogReadingException(string message) { }
public EventLogReadingException(string message, System.Exception innerException) { }
}
public partial class EventLogRecord : System.Diagnostics.Eventing.Reader.EventRecord
{
internal EventLogRecord() { }
public override System.Guid? ActivityId { get { throw null; } }
public override System.Diagnostics.Eventing.Reader.EventBookmark Bookmark { get { throw null; } }
public string ContainerLog { get { throw null; } }
public override int Id { get { throw null; } }
public override long? Keywords { get { throw null; } }
public override System.Collections.Generic.IEnumerable<string> KeywordsDisplayNames { get { throw null; } }
public override byte? Level { get { throw null; } }
public override string LevelDisplayName { get { throw null; } }
public override string LogName { get { throw null; } }
public override string MachineName { get { throw null; } }
public System.Collections.Generic.IEnumerable<int> MatchedQueryIds { get { throw null; } }
public override short? Opcode { get { throw null; } }
public override string OpcodeDisplayName { get { throw null; } }
public override int? ProcessId { get { throw null; } }
public override System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventProperty> Properties { get { throw null; } }
public override System.Guid? ProviderId { get { throw null; } }
public override string ProviderName { get { throw null; } }
public override int? Qualifiers { get { throw null; } }
public override long? RecordId { get { throw null; } }
public override System.Guid? RelatedActivityId { get { throw null; } }
public override int? Task { get { throw null; } }
public override string TaskDisplayName { get { throw null; } }
public override int? ThreadId { get { throw null; } }
public override System.DateTime? TimeCreated { get { throw null; } }
public override System.Security.Principal.SecurityIdentifier UserId { get { throw null; } }
public override byte? Version { get { throw null; } }
protected override void Dispose(bool disposing) { }
public override string FormatDescription() { throw null; }
public override string FormatDescription(System.Collections.Generic.IEnumerable<object> values) { throw null; }
public System.Collections.Generic.IList<object> GetPropertyValues(System.Diagnostics.Eventing.Reader.EventLogPropertySelector propertySelector) { throw null; }
public override string ToXml() { throw null; }
}
public partial class EventLogSession : System.IDisposable
{
public EventLogSession() { }
public EventLogSession(string server) { }
public EventLogSession(string server, string domain, string user, System.Security.SecureString password, System.Diagnostics.Eventing.Reader.SessionAuthentication logOnType) { }
public static System.Diagnostics.Eventing.Reader.EventLogSession GlobalSession { get { throw null; } }
public void CancelCurrentOperations() { }
public void ClearLog(string logName) { }
public void ClearLog(string logName, string backupPath) { }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public void ExportLog(string path, System.Diagnostics.Eventing.Reader.PathType pathType, string query, string targetFilePath) { }
public void ExportLog(string path, System.Diagnostics.Eventing.Reader.PathType pathType, string query, string targetFilePath, bool tolerateQueryErrors) { }
public void ExportLogAndMessages(string path, System.Diagnostics.Eventing.Reader.PathType pathType, string query, string targetFilePath) { }
public void ExportLogAndMessages(string path, System.Diagnostics.Eventing.Reader.PathType pathType, string query, string targetFilePath, bool tolerateQueryErrors, System.Globalization.CultureInfo targetCultureInfo) { }
public System.Diagnostics.Eventing.Reader.EventLogInformation GetLogInformation(string logName, System.Diagnostics.Eventing.Reader.PathType pathType) { throw null; }
public System.Collections.Generic.IEnumerable<string> GetLogNames() { throw null; }
public System.Collections.Generic.IEnumerable<string> GetProviderNames() { throw null; }
}
public sealed partial class EventLogStatus
{
internal EventLogStatus() { }
public string LogName { get { throw null; } }
public int StatusCode { get { throw null; } }
}
public enum EventLogType
{
Administrative = 0,
Operational = 1,
Analytical = 2,
Debug = 3,
}
public partial class EventLogWatcher : System.IDisposable
{
public EventLogWatcher(System.Diagnostics.Eventing.Reader.EventLogQuery eventQuery) { }
public EventLogWatcher(System.Diagnostics.Eventing.Reader.EventLogQuery eventQuery, System.Diagnostics.Eventing.Reader.EventBookmark bookmark) { }
public EventLogWatcher(System.Diagnostics.Eventing.Reader.EventLogQuery eventQuery, System.Diagnostics.Eventing.Reader.EventBookmark bookmark, bool readExistingEvents) { }
public EventLogWatcher(string path) { }
public bool Enabled { get { throw null; } set { } }
public event System.EventHandler<System.Diagnostics.Eventing.Reader.EventRecordWrittenEventArgs> EventRecordWritten { add { } remove { } }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
}
public sealed partial class EventMetadata
{
internal EventMetadata() { }
public string Description { get { throw null; } }
public long Id { get { throw null; } }
public System.Collections.Generic.IEnumerable<System.Diagnostics.Eventing.Reader.EventKeyword> Keywords { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventLevel Level { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventLogLink LogLink { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventOpcode Opcode { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventTask Task { get { throw null; } }
public string Template { get { throw null; } }
public byte Version { get { throw null; } }
}
public sealed partial class EventOpcode
{
internal EventOpcode() { }
public string DisplayName { get { throw null; } }
public string Name { get { throw null; } }
public int Value { get { throw null; } }
}
public sealed partial class EventProperty
{
internal EventProperty() { }
public object Value { get { throw null; } }
}
public abstract partial class EventRecord : System.IDisposable
{
protected EventRecord() { }
public abstract System.Guid? ActivityId { get; }
public abstract System.Diagnostics.Eventing.Reader.EventBookmark Bookmark { get; }
public abstract int Id { get; }
public abstract long? Keywords { get; }
public abstract System.Collections.Generic.IEnumerable<string> KeywordsDisplayNames { get; }
public abstract byte? Level { get; }
public abstract string LevelDisplayName { get; }
public abstract string LogName { get; }
public abstract string MachineName { get; }
public abstract short? Opcode { get; }
public abstract string OpcodeDisplayName { get; }
public abstract int? ProcessId { get; }
public abstract System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventProperty> Properties { get; }
public abstract System.Guid? ProviderId { get; }
public abstract string ProviderName { get; }
public abstract int? Qualifiers { get; }
public abstract long? RecordId { get; }
public abstract System.Guid? RelatedActivityId { get; }
public abstract int? Task { get; }
public abstract string TaskDisplayName { get; }
public abstract int? ThreadId { get; }
public abstract System.DateTime? TimeCreated { get; }
public abstract System.Security.Principal.SecurityIdentifier UserId { get; }
public abstract byte? Version { get; }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public abstract string FormatDescription();
public abstract string FormatDescription(System.Collections.Generic.IEnumerable<object> values);
public abstract string ToXml();
}
public sealed partial class EventRecordWrittenEventArgs : System.EventArgs
{
internal EventRecordWrittenEventArgs() { }
public System.Exception EventException { get { throw null; } }
public System.Diagnostics.Eventing.Reader.EventRecord EventRecord { get { throw null; } }
}
public sealed partial class EventTask
{
internal EventTask() { }
public string DisplayName { get { throw null; } }
public System.Guid EventGuid { get { throw null; } }
public string Name { get { throw null; } }
public int Value { get { throw null; } }
}
public enum PathType
{
LogName = 1,
FilePath = 2,
}
public partial class ProviderMetadata : System.IDisposable
{
public ProviderMetadata(string providerName) { }
public ProviderMetadata(string providerName, System.Diagnostics.Eventing.Reader.EventLogSession session, System.Globalization.CultureInfo targetCultureInfo) { }
public string DisplayName { get { throw null; } }
public System.Collections.Generic.IEnumerable<System.Diagnostics.Eventing.Reader.EventMetadata> Events { get { throw null; } }
public System.Uri HelpLink { get { throw null; } }
public System.Guid Id { get { throw null; } }
public System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventKeyword> Keywords { get { throw null; } }
public System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventLevel> Levels { get { throw null; } }
public System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventLogLink> LogLinks { get { throw null; } }
public string MessageFilePath { get { throw null; } }
public string Name { get { throw null; } }
public System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventOpcode> Opcodes { get { throw null; } }
public string ParameterFilePath { get { throw null; } }
public string ResourceFilePath { get { throw null; } }
public System.Collections.Generic.IList<System.Diagnostics.Eventing.Reader.EventTask> Tasks { get { throw null; } }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
}
public enum SessionAuthentication
{
Default = 0,
Negotiate = 1,
Kerberos = 2,
Ntlm = 3,
}
[System.FlagsAttribute]
public enum StandardEventKeywords : long
{
None = (long)0,
ResponseTime = (long)281474976710656,
WdiContext = (long)562949953421312,
WdiDiagnostic = (long)1125899906842624,
Sqm = (long)2251799813685248,
AuditFailure = (long)4503599627370496,
[System.ObsoleteAttribute("StandardEventKeywords.CorrelationHint has an incorrect value and has been deprecated. Use CorrelationHint2 instead.")]
CorrelationHint = (long)4503599627370496,
AuditSuccess = (long)9007199254740992,
CorrelationHint2 = (long)18014398509481984,
EventLogClassic = (long)36028797018963968,
}
public enum StandardEventLevel
{
LogAlways = 0,
Critical = 1,
Error = 2,
Warning = 3,
Informational = 4,
Verbose = 5,
}
public enum StandardEventOpcode
{
Info = 0,
Start = 1,
Stop = 2,
DataCollectionStart = 3,
DataCollectionStop = 4,
Extension = 5,
Reply = 6,
Resume = 7,
Suspend = 8,
Send = 9,
Receive = 240,
}
public enum StandardEventTask
{
None = 0,
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/Microsoft.Win32.Registry/src/Microsoft/Win32/SafeHandles/SafeRegistryHandle.FileSystem.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Runtime.InteropServices;
namespace Microsoft.Win32.SafeHandles
{
public sealed partial class SafeRegistryHandle : SafeHandleZeroOrMinusOneIsInvalid
{
// TODO: implement this if necessary
protected override bool ReleaseHandle() => true;
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Runtime.InteropServices;
namespace Microsoft.Win32.SafeHandles
{
public sealed partial class SafeRegistryHandle : SafeHandleZeroOrMinusOneIsInvalid
{
// TODO: implement this if necessary
protected override bool ReleaseHandle() => true;
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/Common/src/Interop/Windows/User32/Interop.SetClassLong.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Runtime.InteropServices;
internal static partial class Interop
{
internal static partial class User32
{
[LibraryImport(Libraries.User32)]
public static partial IntPtr SetClassLongW(IntPtr hwnd, int nIndex, IntPtr dwNewLong);
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Runtime.InteropServices;
internal static partial class Interop
{
internal static partial class User32
{
[LibraryImport(Libraries.User32)]
public static partial IntPtr SetClassLongW(IntPtr hwnd, int nIndex, IntPtr dwNewLong);
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Security.Cryptography.Csp/tests/RSACryptoServiceProviderTests.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Security.Cryptography.Rsa.Tests;
using Xunit;
namespace System.Security.Cryptography.Csp.Tests
{
public class RSACryptoServiceProviderTests
{
const int PROV_RSA_FULL = 1;
const int PROV_RSA_AES = 24;
[Fact]
public static void DefaultKeySize()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.Equal(1024, rsa.KeySize);
}
}
[Fact]
public static void PublicOnly_DefaultKey()
{
using (var rsa = new RSACryptoServiceProvider())
{
// This will call the key into being, which should create a public/private pair,
// therefore it should not be public-only.
Assert.False(rsa.PublicOnly);
}
}
[Fact]
public static void PublicOnly_WithPrivateKey()
{
using (var rsa = new RSACryptoServiceProvider())
{
rsa.ImportParameters(TestData.RSA1024Params);
Assert.False(rsa.PublicOnly);
}
}
[Fact]
public static void PublicOnly_WithNoPrivate()
{
using (var rsa = new RSACryptoServiceProvider())
{
RSAParameters publicParams = new RSAParameters
{
Modulus = TestData.RSA1024Params.Modulus,
Exponent = TestData.RSA1024Params.Exponent,
};
rsa.ImportParameters(publicParams);
Assert.True(rsa.PublicOnly);
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters on Unix
public static void CreateKey_LegacyProvider()
{
CspParameters cspParameters = new CspParameters(PROV_RSA_FULL);
using (var rsa = new RSACryptoServiceProvider(cspParameters))
{
CspKeyContainerInfo containerInfo = rsa.CspKeyContainerInfo;
Assert.Equal(PROV_RSA_FULL, containerInfo.ProviderType);
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters\CspKeyContainerInfo on Unix
public static void CreateKey_LegacyProvider_RoundtripBlob()
{
const int KeySize = 512;
CspParameters cspParameters = new CspParameters(PROV_RSA_FULL);
byte[] blob;
using (var rsa = new RSACryptoServiceProvider(KeySize, cspParameters))
{
CspKeyContainerInfo containerInfo = rsa.CspKeyContainerInfo;
Assert.Equal(PROV_RSA_FULL, containerInfo.ProviderType);
Assert.Equal(KeySize, rsa.KeySize);
blob = rsa.ExportCspBlob(true);
}
using (var rsa = new RSACryptoServiceProvider())
{
rsa.ImportCspBlob(blob);
CspKeyContainerInfo containerInfo = rsa.CspKeyContainerInfo;
// The provider information is not persisted in the blob
Assert.Equal(PROV_RSA_AES, containerInfo.ProviderType);
Assert.Equal(KeySize, rsa.KeySize);
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters\CspKeyContainerInfo on Unix
public static void DefaultKey_Parameters()
{
using (var rsa = new RSACryptoServiceProvider())
{
CspKeyContainerInfo keyContainerInfo = rsa.CspKeyContainerInfo;
Assert.NotNull(keyContainerInfo);
Assert.Equal(PROV_RSA_AES, keyContainerInfo.ProviderType);
// This shouldn't be localized, so it should be safe to test on all cultures
Assert.Equal("Microsoft Enhanced RSA and AES Cryptographic Provider", keyContainerInfo.ProviderName);
Assert.Null(keyContainerInfo.KeyContainerName);
Assert.Equal(string.Empty, keyContainerInfo.UniqueKeyContainerName);
Assert.False(keyContainerInfo.HardwareDevice, "HardwareDevice");
Assert.False(keyContainerInfo.MachineKeyStore, "MachineKeyStore");
Assert.False(keyContainerInfo.Protected, "Protected");
Assert.False(keyContainerInfo.Removable, "Removable");
// Ephemeral keys don't successfully request the exportable bit.
Assert.ThrowsAny<CryptographicException>(() => keyContainerInfo.Exportable);
Assert.True(keyContainerInfo.RandomlyGenerated, "RandomlyGenerated");
Assert.Equal(KeyNumber.Exchange, keyContainerInfo.KeyNumber);
}
}
[Fact]
public static void DefaultKey_NotPersisted()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.False(rsa.PersistKeyInCsp);
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters on Unix
public static void NamedKey_DefaultProvider()
{
const int KeySize = 2048;
CspParameters cspParameters = new CspParameters
{
KeyContainerName = Guid.NewGuid().ToString(),
};
using (new RsaKeyLifetime(cspParameters))
{
byte[] privateBlob;
string uniqueKeyContainerName;
using (var rsa = new RSACryptoServiceProvider(KeySize, cspParameters))
{
Assert.True(rsa.PersistKeyInCsp, "rsa.PersistKeyInCsp");
Assert.Equal(cspParameters.KeyContainerName, rsa.CspKeyContainerInfo.KeyContainerName);
uniqueKeyContainerName = rsa.CspKeyContainerInfo.UniqueKeyContainerName;
Assert.NotNull(uniqueKeyContainerName);
Assert.NotEqual(string.Empty, uniqueKeyContainerName);
privateBlob = rsa.ExportCspBlob(true);
Assert.True(rsa.CspKeyContainerInfo.Exportable, "rsa.CspKeyContainerInfo.Exportable");
}
// Fail if the key didn't persist
cspParameters.Flags |= CspProviderFlags.UseExistingKey;
using (var rsa = new RSACryptoServiceProvider(cspParameters))
{
Assert.True(rsa.PersistKeyInCsp);
Assert.Equal(KeySize, rsa.KeySize);
Assert.Equal(uniqueKeyContainerName, rsa.CspKeyContainerInfo.UniqueKeyContainerName);
byte[] blob2 = rsa.ExportCspBlob(true);
Assert.Equal(privateBlob, blob2);
}
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters on Unix
public static void NamedKey_AlternateProvider()
{
const int KeySize = 512;
CspParameters cspParameters = new CspParameters(PROV_RSA_FULL)
{
KeyContainerName = Guid.NewGuid().ToString(),
};
using (new RsaKeyLifetime(cspParameters))
{
byte[] privateBlob;
string uniqueKeyContainerName;
using (var rsa = new RSACryptoServiceProvider(KeySize, cspParameters))
{
Assert.True(rsa.PersistKeyInCsp);
Assert.Equal(PROV_RSA_FULL, rsa.CspKeyContainerInfo.ProviderType);
privateBlob = rsa.ExportCspBlob(true);
Assert.Equal(cspParameters.KeyContainerName, rsa.CspKeyContainerInfo.KeyContainerName);
uniqueKeyContainerName = rsa.CspKeyContainerInfo.UniqueKeyContainerName;
Assert.NotNull(uniqueKeyContainerName);
Assert.NotEqual(string.Empty, uniqueKeyContainerName);
}
// Fail if the key didn't persist
cspParameters.Flags |= CspProviderFlags.UseExistingKey;
using (var rsa = new RSACryptoServiceProvider(cspParameters))
{
Assert.True(rsa.PersistKeyInCsp);
Assert.Equal(KeySize, rsa.KeySize);
// Since we're specifying the provider explicitly it should still match.
Assert.Equal(PROV_RSA_FULL, rsa.CspKeyContainerInfo.ProviderType);
Assert.Equal(uniqueKeyContainerName, rsa.CspKeyContainerInfo.UniqueKeyContainerName);
byte[] blob2 = rsa.ExportCspBlob(true);
Assert.Equal(privateBlob, blob2);
}
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters on Unix
public static void NonExportable_Ephemeral()
{
CspParameters cspParameters = new CspParameters
{
Flags = CspProviderFlags.UseNonExportableKey,
};
using (var rsa = new RSACryptoServiceProvider(cspParameters))
{
// Ephemeral keys don't successfully request the exportable bit.
Assert.ThrowsAny<CryptographicException>(() => rsa.CspKeyContainerInfo.Exportable);
Assert.ThrowsAny<CryptographicException>(() => rsa.ExportCspBlob(true));
Assert.ThrowsAny<CryptographicException>(() => rsa.ExportParameters(true));
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters on Unix
public static void NonExportable_Persisted()
{
CspParameters cspParameters = new CspParameters
{
KeyContainerName = Guid.NewGuid().ToString(),
Flags = CspProviderFlags.UseNonExportableKey,
};
using (new RsaKeyLifetime(cspParameters))
{
using (var rsa = new RSACryptoServiceProvider(cspParameters))
{
Assert.False(rsa.CspKeyContainerInfo.Exportable, "rsa.CspKeyContainerInfo.Exportable");
Assert.ThrowsAny<CryptographicException>(() => rsa.ExportCspBlob(true));
Assert.ThrowsAny<CryptographicException>(() => rsa.ExportParameters(true));
}
}
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)]
public static void Ctor_UseCspParameter_Throws_Unix()
{
var cspParameters = new CspParameters();
Assert.Throws<PlatformNotSupportedException>(() => new RSACryptoServiceProvider(cspParameters));
Assert.Throws<PlatformNotSupportedException>(() => new RSACryptoServiceProvider(0, cspParameters));
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)]
public static void CspKeyContainerInfo_Throws_Unix()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.Throws<PlatformNotSupportedException>(() => (rsa.CspKeyContainerInfo));
}
}
[Fact]
public static void ImportParameters_ExponentTooBig_Throws()
{
using (var rsa = new RSACryptoServiceProvider())
{
// Verify that Unix shims and Windows Csp both throws the same exception when large Exponent imported
Assert.ThrowsAny<CryptographicException>(() => rsa.ImportParameters(TestData.RsaBigExponentParams));
}
}
[Fact]
public static void SignHash_DefaultAlgorithm_Success()
{
byte[] hashVal = SHA1.HashData(TestData.HelloBytes);
using (var rsa = new RSACryptoServiceProvider())
{
byte[] signVal = rsa.SignHash(hashVal, null);
Assert.True(rsa.VerifyHash(hashVal, null, signVal));
}
}
[Fact]
public static void VerifyHash_DefaultAlgorithm_Success()
{
byte[] hashVal = SHA1.HashData(TestData.HelloBytes);
using (var rsa = new RSACryptoServiceProvider())
{
byte[] signVal = rsa.SignData(TestData.HelloBytes, "SHA1");
Assert.True(rsa.VerifyHash(hashVal, null, signVal));
}
}
[Fact]
public static void Encrypt_InvalidPaddingMode_Throws()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.Throws<CryptographicException>(() => rsa.Encrypt(TestData.HelloBytes, RSAEncryptionPadding.OaepSHA256));
}
}
[Fact]
public static void Decrypt_InvalidPaddingMode_Throws()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.Throws<CryptographicException>(() => rsa.Decrypt(TestData.HelloBytes, RSAEncryptionPadding.OaepSHA256));
}
}
[Fact]
public static void Sign_InvalidPaddingMode_Throws()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.Throws<CryptographicException>(() => rsa.SignData(TestData.HelloBytes, HashAlgorithmName.SHA1, RSASignaturePadding.Pss));
}
}
[Fact]
public static void Verify_InvalidPaddingMode_Throws()
{
using (var rsa = new RSACryptoServiceProvider())
{
byte[] sig = rsa.SignData(TestData.HelloBytes, "SHA1");
Assert.Throws<CryptographicException>(() => rsa.VerifyData(TestData.HelloBytes, sig, HashAlgorithmName.SHA1, RSASignaturePadding.Pss));
}
}
[Fact]
public static void SignatureAlgorithm_Success()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.Equal("http://www.w3.org/2000/09/xmldsig#rsa-sha1", rsa.SignatureAlgorithm);
}
}
[Fact]
public static void SignData_VerifyHash_CaseInsensitive_Success()
{
byte[] hashVal = SHA1.HashData(TestData.HelloBytes);
using (var rsa = new RSACryptoServiceProvider())
{
byte[] signVal = rsa.SignData(TestData.HelloBytes, "SHA1");
Assert.True(rsa.VerifyHash(hashVal, "SHA1", signVal));
signVal = rsa.SignData(TestData.HelloBytes, "sha1");
Assert.True(rsa.VerifyHash(hashVal, "sha1", signVal));
}
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)] // Only Unix has _impl shim pattern
public static void TestShimOverloads_Unix()
{
ShimHelpers.VerifyAllBaseMembersOverloaded(typeof(RSACryptoServiceProvider));
}
private sealed class RsaKeyLifetime : IDisposable
{
private readonly CspParameters _cspParameters;
internal RsaKeyLifetime(CspParameters cspParameters)
{
const CspProviderFlags CopyableFlags =
CspProviderFlags.UseMachineKeyStore;
_cspParameters = new CspParameters(
cspParameters.ProviderType,
cspParameters.ProviderName,
cspParameters.KeyContainerName)
{
// If the test failed before creating the key, don't bother recreating it.
Flags = (cspParameters.Flags & CopyableFlags) | CspProviderFlags.UseExistingKey,
};
}
public void Dispose()
{
try
{
using (var rsa = new RSACryptoServiceProvider(_cspParameters))
{
// Delete the key at the end of this using
rsa.PersistKeyInCsp = false;
}
}
catch (CryptographicException)
{
}
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Security.Cryptography.Rsa.Tests;
using Xunit;
namespace System.Security.Cryptography.Csp.Tests
{
public class RSACryptoServiceProviderTests
{
const int PROV_RSA_FULL = 1;
const int PROV_RSA_AES = 24;
[Fact]
public static void DefaultKeySize()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.Equal(1024, rsa.KeySize);
}
}
[Fact]
public static void PublicOnly_DefaultKey()
{
using (var rsa = new RSACryptoServiceProvider())
{
// This will call the key into being, which should create a public/private pair,
// therefore it should not be public-only.
Assert.False(rsa.PublicOnly);
}
}
[Fact]
public static void PublicOnly_WithPrivateKey()
{
using (var rsa = new RSACryptoServiceProvider())
{
rsa.ImportParameters(TestData.RSA1024Params);
Assert.False(rsa.PublicOnly);
}
}
[Fact]
public static void PublicOnly_WithNoPrivate()
{
using (var rsa = new RSACryptoServiceProvider())
{
RSAParameters publicParams = new RSAParameters
{
Modulus = TestData.RSA1024Params.Modulus,
Exponent = TestData.RSA1024Params.Exponent,
};
rsa.ImportParameters(publicParams);
Assert.True(rsa.PublicOnly);
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters on Unix
public static void CreateKey_LegacyProvider()
{
CspParameters cspParameters = new CspParameters(PROV_RSA_FULL);
using (var rsa = new RSACryptoServiceProvider(cspParameters))
{
CspKeyContainerInfo containerInfo = rsa.CspKeyContainerInfo;
Assert.Equal(PROV_RSA_FULL, containerInfo.ProviderType);
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters\CspKeyContainerInfo on Unix
public static void CreateKey_LegacyProvider_RoundtripBlob()
{
const int KeySize = 512;
CspParameters cspParameters = new CspParameters(PROV_RSA_FULL);
byte[] blob;
using (var rsa = new RSACryptoServiceProvider(KeySize, cspParameters))
{
CspKeyContainerInfo containerInfo = rsa.CspKeyContainerInfo;
Assert.Equal(PROV_RSA_FULL, containerInfo.ProviderType);
Assert.Equal(KeySize, rsa.KeySize);
blob = rsa.ExportCspBlob(true);
}
using (var rsa = new RSACryptoServiceProvider())
{
rsa.ImportCspBlob(blob);
CspKeyContainerInfo containerInfo = rsa.CspKeyContainerInfo;
// The provider information is not persisted in the blob
Assert.Equal(PROV_RSA_AES, containerInfo.ProviderType);
Assert.Equal(KeySize, rsa.KeySize);
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters\CspKeyContainerInfo on Unix
public static void DefaultKey_Parameters()
{
using (var rsa = new RSACryptoServiceProvider())
{
CspKeyContainerInfo keyContainerInfo = rsa.CspKeyContainerInfo;
Assert.NotNull(keyContainerInfo);
Assert.Equal(PROV_RSA_AES, keyContainerInfo.ProviderType);
// This shouldn't be localized, so it should be safe to test on all cultures
Assert.Equal("Microsoft Enhanced RSA and AES Cryptographic Provider", keyContainerInfo.ProviderName);
Assert.Null(keyContainerInfo.KeyContainerName);
Assert.Equal(string.Empty, keyContainerInfo.UniqueKeyContainerName);
Assert.False(keyContainerInfo.HardwareDevice, "HardwareDevice");
Assert.False(keyContainerInfo.MachineKeyStore, "MachineKeyStore");
Assert.False(keyContainerInfo.Protected, "Protected");
Assert.False(keyContainerInfo.Removable, "Removable");
// Ephemeral keys don't successfully request the exportable bit.
Assert.ThrowsAny<CryptographicException>(() => keyContainerInfo.Exportable);
Assert.True(keyContainerInfo.RandomlyGenerated, "RandomlyGenerated");
Assert.Equal(KeyNumber.Exchange, keyContainerInfo.KeyNumber);
}
}
[Fact]
public static void DefaultKey_NotPersisted()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.False(rsa.PersistKeyInCsp);
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters on Unix
public static void NamedKey_DefaultProvider()
{
const int KeySize = 2048;
CspParameters cspParameters = new CspParameters
{
KeyContainerName = Guid.NewGuid().ToString(),
};
using (new RsaKeyLifetime(cspParameters))
{
byte[] privateBlob;
string uniqueKeyContainerName;
using (var rsa = new RSACryptoServiceProvider(KeySize, cspParameters))
{
Assert.True(rsa.PersistKeyInCsp, "rsa.PersistKeyInCsp");
Assert.Equal(cspParameters.KeyContainerName, rsa.CspKeyContainerInfo.KeyContainerName);
uniqueKeyContainerName = rsa.CspKeyContainerInfo.UniqueKeyContainerName;
Assert.NotNull(uniqueKeyContainerName);
Assert.NotEqual(string.Empty, uniqueKeyContainerName);
privateBlob = rsa.ExportCspBlob(true);
Assert.True(rsa.CspKeyContainerInfo.Exportable, "rsa.CspKeyContainerInfo.Exportable");
}
// Fail if the key didn't persist
cspParameters.Flags |= CspProviderFlags.UseExistingKey;
using (var rsa = new RSACryptoServiceProvider(cspParameters))
{
Assert.True(rsa.PersistKeyInCsp);
Assert.Equal(KeySize, rsa.KeySize);
Assert.Equal(uniqueKeyContainerName, rsa.CspKeyContainerInfo.UniqueKeyContainerName);
byte[] blob2 = rsa.ExportCspBlob(true);
Assert.Equal(privateBlob, blob2);
}
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters on Unix
public static void NamedKey_AlternateProvider()
{
const int KeySize = 512;
CspParameters cspParameters = new CspParameters(PROV_RSA_FULL)
{
KeyContainerName = Guid.NewGuid().ToString(),
};
using (new RsaKeyLifetime(cspParameters))
{
byte[] privateBlob;
string uniqueKeyContainerName;
using (var rsa = new RSACryptoServiceProvider(KeySize, cspParameters))
{
Assert.True(rsa.PersistKeyInCsp);
Assert.Equal(PROV_RSA_FULL, rsa.CspKeyContainerInfo.ProviderType);
privateBlob = rsa.ExportCspBlob(true);
Assert.Equal(cspParameters.KeyContainerName, rsa.CspKeyContainerInfo.KeyContainerName);
uniqueKeyContainerName = rsa.CspKeyContainerInfo.UniqueKeyContainerName;
Assert.NotNull(uniqueKeyContainerName);
Assert.NotEqual(string.Empty, uniqueKeyContainerName);
}
// Fail if the key didn't persist
cspParameters.Flags |= CspProviderFlags.UseExistingKey;
using (var rsa = new RSACryptoServiceProvider(cspParameters))
{
Assert.True(rsa.PersistKeyInCsp);
Assert.Equal(KeySize, rsa.KeySize);
// Since we're specifying the provider explicitly it should still match.
Assert.Equal(PROV_RSA_FULL, rsa.CspKeyContainerInfo.ProviderType);
Assert.Equal(uniqueKeyContainerName, rsa.CspKeyContainerInfo.UniqueKeyContainerName);
byte[] blob2 = rsa.ExportCspBlob(true);
Assert.Equal(privateBlob, blob2);
}
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters on Unix
public static void NonExportable_Ephemeral()
{
CspParameters cspParameters = new CspParameters
{
Flags = CspProviderFlags.UseNonExportableKey,
};
using (var rsa = new RSACryptoServiceProvider(cspParameters))
{
// Ephemeral keys don't successfully request the exportable bit.
Assert.ThrowsAny<CryptographicException>(() => rsa.CspKeyContainerInfo.Exportable);
Assert.ThrowsAny<CryptographicException>(() => rsa.ExportCspBlob(true));
Assert.ThrowsAny<CryptographicException>(() => rsa.ExportParameters(true));
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // No support for CspParameters on Unix
public static void NonExportable_Persisted()
{
CspParameters cspParameters = new CspParameters
{
KeyContainerName = Guid.NewGuid().ToString(),
Flags = CspProviderFlags.UseNonExportableKey,
};
using (new RsaKeyLifetime(cspParameters))
{
using (var rsa = new RSACryptoServiceProvider(cspParameters))
{
Assert.False(rsa.CspKeyContainerInfo.Exportable, "rsa.CspKeyContainerInfo.Exportable");
Assert.ThrowsAny<CryptographicException>(() => rsa.ExportCspBlob(true));
Assert.ThrowsAny<CryptographicException>(() => rsa.ExportParameters(true));
}
}
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)]
public static void Ctor_UseCspParameter_Throws_Unix()
{
var cspParameters = new CspParameters();
Assert.Throws<PlatformNotSupportedException>(() => new RSACryptoServiceProvider(cspParameters));
Assert.Throws<PlatformNotSupportedException>(() => new RSACryptoServiceProvider(0, cspParameters));
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)]
public static void CspKeyContainerInfo_Throws_Unix()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.Throws<PlatformNotSupportedException>(() => (rsa.CspKeyContainerInfo));
}
}
[Fact]
public static void ImportParameters_ExponentTooBig_Throws()
{
using (var rsa = new RSACryptoServiceProvider())
{
// Verify that Unix shims and Windows Csp both throws the same exception when large Exponent imported
Assert.ThrowsAny<CryptographicException>(() => rsa.ImportParameters(TestData.RsaBigExponentParams));
}
}
[Fact]
public static void SignHash_DefaultAlgorithm_Success()
{
byte[] hashVal = SHA1.HashData(TestData.HelloBytes);
using (var rsa = new RSACryptoServiceProvider())
{
byte[] signVal = rsa.SignHash(hashVal, null);
Assert.True(rsa.VerifyHash(hashVal, null, signVal));
}
}
[Fact]
public static void VerifyHash_DefaultAlgorithm_Success()
{
byte[] hashVal = SHA1.HashData(TestData.HelloBytes);
using (var rsa = new RSACryptoServiceProvider())
{
byte[] signVal = rsa.SignData(TestData.HelloBytes, "SHA1");
Assert.True(rsa.VerifyHash(hashVal, null, signVal));
}
}
[Fact]
public static void Encrypt_InvalidPaddingMode_Throws()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.Throws<CryptographicException>(() => rsa.Encrypt(TestData.HelloBytes, RSAEncryptionPadding.OaepSHA256));
}
}
[Fact]
public static void Decrypt_InvalidPaddingMode_Throws()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.Throws<CryptographicException>(() => rsa.Decrypt(TestData.HelloBytes, RSAEncryptionPadding.OaepSHA256));
}
}
[Fact]
public static void Sign_InvalidPaddingMode_Throws()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.Throws<CryptographicException>(() => rsa.SignData(TestData.HelloBytes, HashAlgorithmName.SHA1, RSASignaturePadding.Pss));
}
}
[Fact]
public static void Verify_InvalidPaddingMode_Throws()
{
using (var rsa = new RSACryptoServiceProvider())
{
byte[] sig = rsa.SignData(TestData.HelloBytes, "SHA1");
Assert.Throws<CryptographicException>(() => rsa.VerifyData(TestData.HelloBytes, sig, HashAlgorithmName.SHA1, RSASignaturePadding.Pss));
}
}
[Fact]
public static void SignatureAlgorithm_Success()
{
using (var rsa = new RSACryptoServiceProvider())
{
Assert.Equal("http://www.w3.org/2000/09/xmldsig#rsa-sha1", rsa.SignatureAlgorithm);
}
}
[Fact]
public static void SignData_VerifyHash_CaseInsensitive_Success()
{
byte[] hashVal = SHA1.HashData(TestData.HelloBytes);
using (var rsa = new RSACryptoServiceProvider())
{
byte[] signVal = rsa.SignData(TestData.HelloBytes, "SHA1");
Assert.True(rsa.VerifyHash(hashVal, "SHA1", signVal));
signVal = rsa.SignData(TestData.HelloBytes, "sha1");
Assert.True(rsa.VerifyHash(hashVal, "sha1", signVal));
}
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)] // Only Unix has _impl shim pattern
public static void TestShimOverloads_Unix()
{
ShimHelpers.VerifyAllBaseMembersOverloaded(typeof(RSACryptoServiceProvider));
}
private sealed class RsaKeyLifetime : IDisposable
{
private readonly CspParameters _cspParameters;
internal RsaKeyLifetime(CspParameters cspParameters)
{
const CspProviderFlags CopyableFlags =
CspProviderFlags.UseMachineKeyStore;
_cspParameters = new CspParameters(
cspParameters.ProviderType,
cspParameters.ProviderName,
cspParameters.KeyContainerName)
{
// If the test failed before creating the key, don't bother recreating it.
Flags = (cspParameters.Flags & CopyableFlags) | CspProviderFlags.UseExistingKey,
};
}
public void Dispose()
{
try
{
using (var rsa = new RSACryptoServiceProvider(_cspParameters))
{
// Delete the key at the end of this using
rsa.PersistKeyInCsp = false;
}
}
catch (CryptographicException)
{
}
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.CodeDom/src/System/CodeDom/CodeAttachEventStatement.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.CodeDom
{
public class CodeAttachEventStatement : CodeStatement
{
private CodeEventReferenceExpression _eventRef;
public CodeAttachEventStatement() { }
public CodeAttachEventStatement(CodeEventReferenceExpression eventRef, CodeExpression listener)
{
_eventRef = eventRef;
Listener = listener;
}
public CodeAttachEventStatement(CodeExpression targetObject, string eventName, CodeExpression listener) :
this(new CodeEventReferenceExpression(targetObject, eventName), listener)
{
}
public CodeEventReferenceExpression Event
{
get => _eventRef ?? (_eventRef = new CodeEventReferenceExpression());
set => _eventRef = value;
}
public CodeExpression Listener { get; set; }
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.CodeDom
{
public class CodeAttachEventStatement : CodeStatement
{
private CodeEventReferenceExpression _eventRef;
public CodeAttachEventStatement() { }
public CodeAttachEventStatement(CodeEventReferenceExpression eventRef, CodeExpression listener)
{
_eventRef = eventRef;
Listener = listener;
}
public CodeAttachEventStatement(CodeExpression targetObject, string eventName, CodeExpression listener) :
this(new CodeEventReferenceExpression(targetObject, eventName), listener)
{
}
public CodeEventReferenceExpression Event
{
get => _eventRef ?? (_eventRef = new CodeEventReferenceExpression());
set => _eventRef = value;
}
public CodeExpression Listener { get; set; }
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/GC/Features/KeepAlive/keepaliveother/keepalivedirectedgraph.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// Build a Directed Graph with 100 nodes
// Test KeepAlive for huge directed graphs
namespace Default {
using System;
public class Graph
{
private Vertex Vfirst = null;
private Vertex Vlast = null;
private Edge Efirst = null;
private Edge Elast = null;
private int WeightSum = 0;
public static int Nodes;
public static bool flag;
public Graph(int n) { Nodes = n;}
public void SetWeightSum() {
Edge temp = Efirst;
WeightSum = 0;
while(temp != null) {
WeightSum += temp.Weight;
temp = temp.Next;
}
}
public int GetWeightSum() {
return WeightSum;
}
public void BuildEdge(int v1,int v2) {
Vertex n1 = null,n2 = null;
Vertex temp = Vfirst;
while(temp != null) {
if (v1 == temp.Name)
{
//found 1st node..
n1 = temp;
break;
}
else temp = temp.Next;
}
//check if edge already exists
for(int i=0;i<n1.Num_Edges;i++) {
if (v2 == n1.Adjacent[i].Name)
return;
}
temp = Vfirst;
while(temp != null) {
if (v2 == temp.Name)
{
//found 2nd node..
n2 = temp;
break;
}
else temp = temp.Next;
}
n1.Adjacent[n1.Num_Edges++]=n2;
Edge temp2 = new Edge(n1,n2);
if(Efirst==null) {
Efirst = temp2;
Elast = temp2;
}
else {
temp2.AddEdge(Elast,temp2);
Elast = temp2;
}
}
public void BuildGraph() {
// Build Nodes
Console.WriteLine("Building Vertices...");
for(int i=0;i< Nodes; i++) {
Vertex temp = new Vertex(i);
if(Vfirst==null) {
Vfirst = temp;
Vlast = temp;
}
else {
temp.AddVertex(Vlast,temp);
Vlast = temp;
}
}
// Build Edges
Console.WriteLine("Building Edges...");
Int32 seed = Environment.TickCount;
Random rand = new Random(seed);
for(int i=0;i< Nodes;i++) {
int j = rand.Next(0,Nodes);
for(int k=0;k<j;k++) {
int v2;
while((v2 = rand.Next(0,Nodes))==i); //select a random node, also avoid self-loops
BuildEdge(i,v2); //build edge betn node i and v2
}
}
}
public void CheckIfReachable() {
int[] temp = new int[Nodes];
Vertex t1 = Vfirst;
Console.WriteLine("Making all vertices reachable...");
while(t1 != null) {
for(int i=0;i<t1.Num_Edges;i++) {
if(temp[t1.Adjacent[i].Name] == 0)
temp[t1.Adjacent[i].Name]=1;
}
t1 = t1.Next;
}
for(int v2=0;v2<Nodes;v2++) {
if(temp[v2]==0) { //this vertex is not connected
Int32 seed = Environment.TickCount;
Random rand = new Random(seed);
int v1;
while((v1 = rand.Next(0,Nodes))==v2); //select a random node, also avoid self-loops
BuildEdge(v1,v2);
temp[v2]=1;
}
}
}
public void DeleteVertex() {
DeleteVertex(Vfirst);
}
public void DeleteVertex(Vertex v) {
if(v == Vlast) {
Vfirst=null;
Vlast=null;
GC.Collect();
GC.WaitForPendingFinalizers();
return;
}
Vertex temp = v.Next;
v=null;
GC.Collect();
GC.WaitForPendingFinalizers();
DeleteVertex(temp);
temp=null;
GC.Collect();
GC.WaitForPendingFinalizers();
}
public Vertex ReturnVfirst() {
return(Vfirst);
}
}
public class Vertex
{
public int Name;
//public bool Visited = false;
public Vertex Next;
public Vertex[] Adjacent;
public Edge[] Edges;
public int Num_Edges = 0;
public static int count=0;
public Vertex(int val) {
Name = val;
Next = null;
Adjacent = new Vertex[Graph.Nodes];
}
~Vertex() {
//Console.WriteLine("In Finalize of Vertex");
count++;
if((count==100) && (Graph.flag==false)) {
Test.exitCode = 1;
}
}
public void AddVertex(Vertex x, Vertex y) {
x.Next = y;
}
public void DeleteAdjacentEntry(int n) {
int temp=Num_Edges;
for(int i=0;i< temp;i++) {
if(n == Adjacent[i].Name) {
for(int j=i;j<Num_Edges;j++)
Adjacent[j] = Adjacent[j+1];
Num_Edges--;
return;
}
}
}
}
public class Edge
{
public int Weight;
public Vertex v1,v2;
public Edge Next;
public Edge(Vertex n1, Vertex n2) {
v1=n1;
v2=n2;
int seed = n1.Name+n2.Name;
Random rand = new Random(seed);
Weight = rand.Next(0,50);
}
public void AddEdge(Edge x, Edge y) {
x.Next = y;
}
}
public class Test
{
public static int exitCode = 0;
public static int Main()
{
Graph.flag=false;
exitCode = 100;
Console.WriteLine("Test should pass with ExitCode 100");
Console.WriteLine("Building Graph with 100 vertices...");
Graph MyGraph = new Graph(100);
MyGraph.BuildGraph();
MyGraph.CheckIfReachable();
Console.WriteLine("Deleting all vertices...");
MyGraph.DeleteVertex();
GC.Collect();
GC.WaitForPendingFinalizers();
Vertex temp = MyGraph.ReturnVfirst();
GC.KeepAlive(temp); // will keep alive the graph till here
Console.WriteLine("Done...");
Graph.flag=true; // to check if finalizers ran at shutdown or earlier
return exitCode;
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// Build a Directed Graph with 100 nodes
// Test KeepAlive for huge directed graphs
namespace Default {
using System;
public class Graph
{
private Vertex Vfirst = null;
private Vertex Vlast = null;
private Edge Efirst = null;
private Edge Elast = null;
private int WeightSum = 0;
public static int Nodes;
public static bool flag;
public Graph(int n) { Nodes = n;}
public void SetWeightSum() {
Edge temp = Efirst;
WeightSum = 0;
while(temp != null) {
WeightSum += temp.Weight;
temp = temp.Next;
}
}
public int GetWeightSum() {
return WeightSum;
}
public void BuildEdge(int v1,int v2) {
Vertex n1 = null,n2 = null;
Vertex temp = Vfirst;
while(temp != null) {
if (v1 == temp.Name)
{
//found 1st node..
n1 = temp;
break;
}
else temp = temp.Next;
}
//check if edge already exists
for(int i=0;i<n1.Num_Edges;i++) {
if (v2 == n1.Adjacent[i].Name)
return;
}
temp = Vfirst;
while(temp != null) {
if (v2 == temp.Name)
{
//found 2nd node..
n2 = temp;
break;
}
else temp = temp.Next;
}
n1.Adjacent[n1.Num_Edges++]=n2;
Edge temp2 = new Edge(n1,n2);
if(Efirst==null) {
Efirst = temp2;
Elast = temp2;
}
else {
temp2.AddEdge(Elast,temp2);
Elast = temp2;
}
}
public void BuildGraph() {
// Build Nodes
Console.WriteLine("Building Vertices...");
for(int i=0;i< Nodes; i++) {
Vertex temp = new Vertex(i);
if(Vfirst==null) {
Vfirst = temp;
Vlast = temp;
}
else {
temp.AddVertex(Vlast,temp);
Vlast = temp;
}
}
// Build Edges
Console.WriteLine("Building Edges...");
Int32 seed = Environment.TickCount;
Random rand = new Random(seed);
for(int i=0;i< Nodes;i++) {
int j = rand.Next(0,Nodes);
for(int k=0;k<j;k++) {
int v2;
while((v2 = rand.Next(0,Nodes))==i); //select a random node, also avoid self-loops
BuildEdge(i,v2); //build edge betn node i and v2
}
}
}
public void CheckIfReachable() {
int[] temp = new int[Nodes];
Vertex t1 = Vfirst;
Console.WriteLine("Making all vertices reachable...");
while(t1 != null) {
for(int i=0;i<t1.Num_Edges;i++) {
if(temp[t1.Adjacent[i].Name] == 0)
temp[t1.Adjacent[i].Name]=1;
}
t1 = t1.Next;
}
for(int v2=0;v2<Nodes;v2++) {
if(temp[v2]==0) { //this vertex is not connected
Int32 seed = Environment.TickCount;
Random rand = new Random(seed);
int v1;
while((v1 = rand.Next(0,Nodes))==v2); //select a random node, also avoid self-loops
BuildEdge(v1,v2);
temp[v2]=1;
}
}
}
public void DeleteVertex() {
DeleteVertex(Vfirst);
}
public void DeleteVertex(Vertex v) {
if(v == Vlast) {
Vfirst=null;
Vlast=null;
GC.Collect();
GC.WaitForPendingFinalizers();
return;
}
Vertex temp = v.Next;
v=null;
GC.Collect();
GC.WaitForPendingFinalizers();
DeleteVertex(temp);
temp=null;
GC.Collect();
GC.WaitForPendingFinalizers();
}
public Vertex ReturnVfirst() {
return(Vfirst);
}
}
public class Vertex
{
public int Name;
//public bool Visited = false;
public Vertex Next;
public Vertex[] Adjacent;
public Edge[] Edges;
public int Num_Edges = 0;
public static int count=0;
public Vertex(int val) {
Name = val;
Next = null;
Adjacent = new Vertex[Graph.Nodes];
}
~Vertex() {
//Console.WriteLine("In Finalize of Vertex");
count++;
if((count==100) && (Graph.flag==false)) {
Test.exitCode = 1;
}
}
public void AddVertex(Vertex x, Vertex y) {
x.Next = y;
}
public void DeleteAdjacentEntry(int n) {
int temp=Num_Edges;
for(int i=0;i< temp;i++) {
if(n == Adjacent[i].Name) {
for(int j=i;j<Num_Edges;j++)
Adjacent[j] = Adjacent[j+1];
Num_Edges--;
return;
}
}
}
}
public class Edge
{
public int Weight;
public Vertex v1,v2;
public Edge Next;
public Edge(Vertex n1, Vertex n2) {
v1=n1;
v2=n2;
int seed = n1.Name+n2.Name;
Random rand = new Random(seed);
Weight = rand.Next(0,50);
}
public void AddEdge(Edge x, Edge y) {
x.Next = y;
}
}
public class Test
{
public static int exitCode = 0;
public static int Main()
{
Graph.flag=false;
exitCode = 100;
Console.WriteLine("Test should pass with ExitCode 100");
Console.WriteLine("Building Graph with 100 vertices...");
Graph MyGraph = new Graph(100);
MyGraph.BuildGraph();
MyGraph.CheckIfReachable();
Console.WriteLine("Deleting all vertices...");
MyGraph.DeleteVertex();
GC.Collect();
GC.WaitForPendingFinalizers();
Vertex temp = MyGraph.ReturnVfirst();
GC.KeepAlive(temp); // will keep alive the graph till here
Console.WriteLine("Done...");
Graph.flag=true; // to check if finalizers ran at shutdown or earlier
return exitCode;
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/coreclr/vm/method.cpp
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// ===========================================================================
// File: Method.CPP
//
//
// See the book of the runtime entry for overall design:
// file:../../doc/BookOfTheRuntime/ClassLoader/MethodDescDesign.doc
//
#include "common.h"
#include "excep.h"
#include "dbginterface.h"
#include "ecall.h"
#include "eeconfig.h"
#include "mlinfo.h"
#include "dllimport.h"
#include "generics.h"
#include "genericdict.h"
#include "typedesc.h"
#include "typestring.h"
#include "virtualcallstub.h"
#include "jitinterface.h"
#include "runtimehandles.h"
#include "eventtrace.h"
#include "interoputil.h"
#include "prettyprintsig.h"
#include "formattype.h"
#include "fieldmarshaler.h"
#include "versionresilienthashcode.h"
#include "typehashingalgorithms.h"
#ifdef FEATURE_COMINTEROP
#include "comcallablewrapper.h"
#include "clrtocomcall.h"
#endif
#ifdef FEATURE_MINIMETADATA_IN_TRIAGEDUMPS
GVAL_IMPL(DWORD, g_MiniMetaDataBuffMaxSize);
GVAL_IMPL(TADDR, g_MiniMetaDataBuffAddress);
#endif // FEATURE_MINIMETADATA_IN_TRIAGEDUMPS
// forward decl
bool FixupSignatureContainingInternalTypes(
DataImage * image,
PCCOR_SIGNATURE pSig,
DWORD cSig,
bool checkOnly = false);
// Alias ComPlusCallMethodDesc to regular MethodDesc to simplify definition of the size table
#ifndef FEATURE_COMINTEROP
#define ComPlusCallMethodDesc MethodDesc
#endif
// Verify that the structure sizes of our MethodDescs support proper
// aligning for atomic stub replacement.
//
static_assert_no_msg((sizeof(MethodDescChunk) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(MethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(FCallMethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(NDirectMethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(EEImplMethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(ArrayMethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(ComPlusCallMethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(DynamicMethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
#define METHOD_DESC_SIZES(adjustment) \
adjustment + sizeof(MethodDesc), /* mcIL */ \
adjustment + sizeof(FCallMethodDesc), /* mcFCall */ \
adjustment + sizeof(NDirectMethodDesc), /* mcNDirect */ \
adjustment + sizeof(EEImplMethodDesc), /* mcEEImpl */ \
adjustment + sizeof(ArrayMethodDesc), /* mcArray */ \
adjustment + sizeof(InstantiatedMethodDesc), /* mcInstantiated */ \
adjustment + sizeof(ComPlusCallMethodDesc), /* mcComInterOp */ \
adjustment + sizeof(DynamicMethodDesc) /* mcDynamic */
const BYTE MethodDesc::s_ClassificationSizeTable[] = {
// This is the raw
METHOD_DESC_SIZES(0),
// This extended part of the table is used for faster MethodDesc size lookup.
// We index using optional slot flags into it
METHOD_DESC_SIZES(sizeof(NonVtableSlot)),
METHOD_DESC_SIZES(sizeof(MethodImpl)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(MethodImpl)),
METHOD_DESC_SIZES(sizeof(NativeCodeSlot)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(NativeCodeSlot)),
METHOD_DESC_SIZES(sizeof(MethodImpl) + sizeof(NativeCodeSlot)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(MethodImpl) + sizeof(NativeCodeSlot)),
#ifdef FEATURE_COMINTEROP
METHOD_DESC_SIZES(sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(MethodImpl) + sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(MethodImpl) + sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(NativeCodeSlot) + sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(NativeCodeSlot) + sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(MethodImpl) + sizeof(NativeCodeSlot) + sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(MethodImpl) + sizeof(NativeCodeSlot) + sizeof(ComPlusCallInfo))
#endif
};
#ifndef FEATURE_COMINTEROP
#undef ComPlusCallMethodDesc
#endif
class ArgIteratorBaseForPInvoke : public ArgIteratorBase
{
protected:
FORCEINLINE BOOL IsRegPassedStruct(MethodTable* pMT)
{
return pMT->GetNativeLayoutInfo()->IsNativeStructPassedInRegisters();
}
};
class PInvokeArgIterator : public ArgIteratorTemplate<ArgIteratorBaseForPInvoke>
{
public:
PInvokeArgIterator(MetaSig* pSig)
{
m_pSig = pSig;
}
};
//*******************************************************************************
SIZE_T MethodDesc::SizeOf()
{
LIMITED_METHOD_DAC_CONTRACT;
SIZE_T size = s_ClassificationSizeTable[m_wFlags &
(mdcClassification
| mdcHasNonVtableSlot
| mdcMethodImpl
#ifdef FEATURE_COMINTEROP
| mdcHasComPlusCallInfo
#endif
| mdcHasNativeCodeSlot)];
return size;
}
/*********************************************************************/
#ifndef DACCESS_COMPILE
BOOL NDirectMethodDesc::HasDefaultDllImportSearchPathsAttribute()
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
if(IsDefaultDllImportSearchPathsAttributeCached())
{
return (ndirect.m_wFlags & kDefaultDllImportSearchPathsStatus) != 0;
}
BOOL attributeIsFound = GetDefaultDllImportSearchPathsAttributeValue(GetModule(),GetMemberDef(),&ndirect.m_DefaultDllImportSearchPathsAttributeValue);
if(attributeIsFound )
{
InterlockedSetNDirectFlags(kDefaultDllImportSearchPathsIsCached | kDefaultDllImportSearchPathsStatus);
}
else
{
InterlockedSetNDirectFlags(kDefaultDllImportSearchPathsIsCached);
}
return (ndirect.m_wFlags & kDefaultDllImportSearchPathsStatus) != 0;
}
#endif //!DACCESS_COMPILE
//*******************************************************************************
#ifndef DACCESS_COMPILE
VOID MethodDesc::EnsureActive()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END;
GetMethodTable()->EnsureInstanceActive();
if (HasMethodInstantiation() && !IsGenericMethodDefinition())
{
Instantiation methodInst = GetMethodInstantiation();
for (DWORD i = 0; i < methodInst.GetNumArgs(); ++i)
{
MethodTable * pMT = methodInst[i].GetMethodTable();
if (pMT)
pMT->EnsureInstanceActive();
}
}
}
#endif //!DACCESS_COMPILE
//*******************************************************************************
CHECK MethodDesc::CheckActivated()
{
WRAPPER_NO_CONTRACT;
CHECK(GetModule()->CheckActivated());
CHECK_OK;
}
//*******************************************************************************
BaseDomain *MethodDesc::GetDomain()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
SUPPORTS_DAC;
}
CONTRACTL_END
return AppDomain::GetCurrentDomain();
}
#ifndef DACCESS_COMPILE
//*******************************************************************************
LoaderAllocator * MethodDesc::GetDomainSpecificLoaderAllocator()
{
if (GetLoaderModule()->IsCollectible())
{
return GetLoaderAllocator();
}
else
{
return ::GetAppDomain()->GetLoaderAllocator();
}
}
#endif //!DACCESS_COMPILE
//*******************************************************************************
LPCUTF8 MethodDesc::GetName(USHORT slot)
{
// MethodDesc::GetDeclMethodDesc can throw.
WRAPPER_NO_CONTRACT;
MethodDesc *pDeclMD = GetDeclMethodDesc((UINT32)slot);
CONSISTENCY_CHECK(IsInterface() || !pDeclMD->IsInterface());
return pDeclMD->GetName();
}
//*******************************************************************************
LPCUTF8 MethodDesc::GetName()
{
CONTRACTL
{
if (FORBIDGC_LOADER_USE_ENABLED()) NOTHROW; else THROWS; // MethodImpl::FindMethodDesc can throw.
GC_NOTRIGGER;
FORBID_FAULT;
SUPPORTS_DAC;
}CONTRACTL_END;
g_IBCLogger.LogMethodDescAccess(this);
if (IsArray())
{
// Array classes don't have metadata tokens
return dac_cast<PTR_ArrayMethodDesc>(this)->GetMethodName();
}
else if (IsNoMetadata())
{
// LCG methods don't have metadata tokens
return dac_cast<PTR_DynamicMethodDesc>(this)->GetMethodName();
}
else
{
// Get the metadata string name for this method
LPCUTF8 result = NULL;
if (FAILED(GetMDImport()->GetNameOfMethodDef(GetMemberDef(), &result)))
{
result = NULL;
}
return(result);
}
}
#ifndef DACCESS_COMPILE
/*
* Function to get a method's name, its namespace
*/
VOID MethodDesc::GetMethodInfoNoSig(SString &namespaceOrClassName, SString &methodName)
{
static LPCWSTR pDynamicClassName = W("dynamicClass");
// namespace
if(IsDynamicMethod())
namespaceOrClassName.Append(pDynamicClassName);
else
TypeString::AppendType(namespaceOrClassName, TypeHandle(GetMethodTable()));
// name
methodName.AppendUTF8(GetName());
}
/*
* Function to get a method's name, its namespace and signature (legacy format)
*/
VOID MethodDesc::GetMethodInfo(SString &namespaceOrClassName, SString &methodName, SString &methodSignature)
{
GetMethodInfoNoSig(namespaceOrClassName, methodName);
// signature
CQuickBytes qbOut;
ULONG cSig = 0;
PCCOR_SIGNATURE pSig;
GetSig(&pSig, &cSig);
PrettyPrintSigInternalLegacy(pSig, cSig, " ", &qbOut, GetMDImport());
methodSignature.AppendUTF8((char *)qbOut.Ptr());
}
/*
* Function to get a method's name, its namespace and signature (new format)
*/
VOID MethodDesc::GetMethodInfoWithNewSig(SString &namespaceOrClassName, SString &methodName, SString &methodSignature)
{
GetMethodInfoNoSig(namespaceOrClassName, methodName);
// signature
CQuickBytes qbOut;
ULONG cSig = 0;
PCCOR_SIGNATURE pSig;
GetSig(&pSig, &cSig);
PrettyPrintSig(pSig, (DWORD)cSig, "", &qbOut, GetMDImport(), NULL);
methodSignature.AppendUTF8((char *)qbOut.Ptr());
}
/*
* Function to get a method's full name, something like
* void [mscorlib]System.StubHelpers.BSTRMarshaler::ClearNative(native int)
*/
VOID MethodDesc::GetFullMethodInfo(SString& fullMethodSigName)
{
SString namespaceOrClassName, methodName;
GetMethodInfoNoSig(namespaceOrClassName, methodName);
// signature
CQuickBytes qbOut;
ULONG cSig = 0;
PCCOR_SIGNATURE pSig;
SString methodFullName;
StackScratchBuffer namespaceNameBuffer, methodNameBuffer;
methodFullName.AppendPrintf(
(LPCUTF8)"[%s] %s::%s",
GetModule()->GetAssembly()->GetSimpleName(),
namespaceOrClassName.GetUTF8(namespaceNameBuffer),
methodName.GetUTF8(methodNameBuffer));
GetSig(&pSig, &cSig);
StackScratchBuffer buffer;
PrettyPrintSig(pSig, (DWORD)cSig, methodFullName.GetUTF8(buffer), &qbOut, GetMDImport(), NULL);
fullMethodSigName.AppendUTF8((char *)qbOut.Ptr());
}
#endif
//*******************************************************************************
BOOL MethodDesc::MightHaveName(ULONG nameHashValue)
{
LIMITED_METHOD_CONTRACT;
// We only have space for a name hash when we are using the packed slot layout
if (RequiresFullSlotNumber())
{
return TRUE;
}
WORD thisHashValue = m_wSlotNumber & enum_packedSlotLayout_NameHashMask;
// A zero value might mean no hash has ever been set
// (checking this way is better than dedicating a bit to tell us)
if (thisHashValue == 0)
{
return TRUE;
}
WORD testHashValue = (WORD) nameHashValue & enum_packedSlotLayout_NameHashMask;
return (thisHashValue == testHashValue);
}
//*******************************************************************************
void MethodDesc::GetSig(PCCOR_SIGNATURE *ppSig, DWORD *pcSig)
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
SUPPORTS_DAC;
}
CONTRACTL_END
if (HasStoredSig())
{
PTR_StoredSigMethodDesc pSMD = dac_cast<PTR_StoredSigMethodDesc>(this);
if (pSMD->HasStoredMethodSig() || GetClassification()==mcDynamic)
{
*ppSig = pSMD->GetStoredMethodSig(pcSig);
PREFIX_ASSUME(*ppSig != NULL);
return;
}
}
GetSigFromMetadata(GetMDImport(), ppSig, pcSig);
PREFIX_ASSUME(*ppSig != NULL);
}
//*******************************************************************************
// get a function signature from its metadata
// Arguments:
// input:
// importer the metatdata importer to be used
// output:
// ppSig the function signature
// pcSig number of elements in the signature
void MethodDesc::GetSigFromMetadata(IMDInternalImport * importer,
PCCOR_SIGNATURE * ppSig,
DWORD * pcSig)
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
SUPPORTS_DAC;
}
CONTRACTL_END
if (FAILED(importer->GetSigOfMethodDef(GetMemberDef(), pcSig, ppSig)))
{ // Class loader already asked for signature, so this should always succeed (unless there's a
// bug or a new code path)
_ASSERTE(!"If this ever fires, then this method should return HRESULT");
*ppSig = NULL;
*pcSig = 0;
}
}
//*******************************************************************************
PCCOR_SIGNATURE MethodDesc::GetSig()
{
WRAPPER_NO_CONTRACT;
PCCOR_SIGNATURE pSig;
DWORD cSig;
GetSig(&pSig, &cSig);
PREFIX_ASSUME(pSig != NULL);
return pSig;
}
Signature MethodDesc::GetSignature()
{
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
PCCOR_SIGNATURE pSig;
DWORD cSig;
GetSig(&pSig, &cSig);
PREFIX_ASSUME(pSig != NULL);
return Signature(pSig, cSig);
}
PCODE MethodDesc::GetMethodEntryPoint()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
SUPPORTS_DAC;
}
CONTRACTL_END;
// Similarly to SetMethodEntryPoint(), it is up to the caller to ensure that calls to this function are appropriately
// synchronized
// Keep implementations of MethodDesc::GetMethodEntryPoint and MethodDesc::GetAddrOfSlot in sync!
g_IBCLogger.LogMethodDescAccess(this);
if (HasNonVtableSlot())
{
SIZE_T size = GetBaseSize();
TADDR pSlot = dac_cast<TADDR>(this) + size;
return *PTR_PCODE(pSlot);
}
_ASSERTE(GetMethodTable()->IsCanonicalMethodTable());
return GetMethodTable_NoLogging()->GetSlot(GetSlot());
}
PTR_PCODE MethodDesc::GetAddrOfSlot()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
SUPPORTS_DAC;
}
CONTRACTL_END;
// Keep implementations of MethodDesc::GetMethodEntryPoint and MethodDesc::GetAddrOfSlot in sync!
if (HasNonVtableSlot())
{
SIZE_T size = GetBaseSize();
return PTR_PCODE(dac_cast<TADDR>(this) + size);
}
_ASSERTE(GetMethodTable()->IsCanonicalMethodTable());
return GetMethodTable()->GetSlotPtr(GetSlot());
}
//*******************************************************************************
PTR_MethodDesc MethodDesc::GetDeclMethodDesc(UINT32 slotNumber)
{
CONTRACTL {
WRAPPER(THROWS);
WRAPPER(GC_TRIGGERS);
INSTANCE_CHECK;
} CONTRACTL_END;
MethodDesc *pMDResult = this;
// If the MethodDesc is not itself a methodImpl, but it is not in its native
// slot, then someone (perhaps itself) must have overridden a methodImpl
// in a parent, which causes the method to get put into all of the methodImpl
// slots. So, the MethodDesc is implicitly a methodImpl without containing
// the data. To find the real methodImpl MethodDesc, climb the inheritance
// hierarchy checking the native slot on the way.
if ((UINT32)pMDResult->GetSlot() != slotNumber)
{
while (!pMDResult->IsMethodImpl())
{
CONSISTENCY_CHECK(CheckPointer(pMDResult->GetMethodTable()->GetParentMethodTable()));
CONSISTENCY_CHECK(slotNumber < pMDResult->GetMethodTable()->GetParentMethodTable()->GetNumVirtuals());
pMDResult = pMDResult->GetMethodTable()->GetParentMethodTable()->GetMethodDescForSlot(slotNumber);
}
{
CONSISTENCY_CHECK(pMDResult->IsMethodImpl());
MethodImpl *pImpl = pMDResult->GetMethodImpl();
pMDResult = pImpl->FindMethodDesc(slotNumber, PTR_MethodDesc(pMDResult));
}
// It is possible that a methodImpl'd slot got copied into another slot because
// of slot unification, for example:
// C1::A is methodImpled with C2::B
// C1::B is methodImpled with C2::C
// this means that through slot unification that A is tied to B and B is tied to C,
// so A is tied to C even though C does not have a methodImpl entry specifically
// relating to that slot. In this case, we recurse to the parent type and ask the
// same question again.
if (pMDResult->GetSlot() != slotNumber)
{
MethodTable * pMTOfMD = pMDResult->GetMethodTable();
CONSISTENCY_CHECK(slotNumber < pMTOfMD->GetParentMethodTable()->GetNumVirtuals());
pMDResult = pMTOfMD->GetParentMethodTable()->GetMethodDescForSlot(slotNumber);
pMDResult = pMDResult->GetDeclMethodDesc(slotNumber);
}
}
CONSISTENCY_CHECK(CheckPointer(pMDResult));
CONSISTENCY_CHECK((UINT32)pMDResult->GetSlot() == slotNumber);
return PTR_MethodDesc(pMDResult);
}
//*******************************************************************************
// Returns a hash for the method.
// The hash will be the same for the method across multiple process runs.
#ifndef DACCESS_COMPILE
COUNT_T MethodDesc::GetStableHash()
{
WRAPPER_NO_CONTRACT;
const char * className = NULL;
if (IsLCGMethod())
{
className = "DynamicClass";
}
else if (IsILStub())
{
className = ILStubResolver::GetStubClassName(this);
}
if (className == NULL)
{
return GetVersionResilientMethodHashCode(this);
}
else
{
int typeHash = ComputeNameHashCode("", className);
return typeHash ^ ComputeNameHashCode(GetName());
}
}
#endif // DACCESS_COMPILE
//*******************************************************************************
// Get the number of type parameters to a generic method
DWORD MethodDesc::GetNumGenericMethodArgs()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
CANNOT_TAKE_LOCK;
SUPPORTS_DAC;
}
CONTRACTL_END
g_IBCLogger.LogMethodDescAccess(this);
if (GetClassification() == mcInstantiated)
{
InstantiatedMethodDesc *pIMD = AsInstantiatedMethodDesc();
return pIMD->m_wNumGenericArgs;
}
else return 0;
}
//*******************************************************************************
MethodTable * MethodDesc::GetExactDeclaringType(MethodTable * ownerOrSubType)
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
MethodTable * pMT = GetMethodTable();
// Fast path for typical case.
if (ownerOrSubType == pMT)
return pMT;
// If we come here for array method, the typedef tokens inside GetMethodTableMatchingParentClass
// will match, but the types are actually from unrelated arrays, so the result would be incorrect.
_ASSERTE(!IsArray());
return ownerOrSubType->GetMethodTableMatchingParentClass(pMT);
}
//*******************************************************************************
Instantiation MethodDesc::GetExactClassInstantiation(TypeHandle possibleObjType)
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
SUPPORTS_DAC;
}
CONTRACTL_END
return (possibleObjType.IsNull()
? GetClassInstantiation()
: possibleObjType.GetInstantiationOfParentClass(GetMethodTable()));
}
//*******************************************************************************
BOOL MethodDesc::HasSameMethodDefAs(MethodDesc * pMD)
{
LIMITED_METHOD_CONTRACT;
if (this == pMD)
return TRUE;
return (GetMemberDef() == pMD->GetMemberDef()) && (GetModule() == pMD->GetModule());
}
//*******************************************************************************
BOOL MethodDesc::IsTypicalSharedInstantiation()
{
WRAPPER_NO_CONTRACT;
Instantiation classInst = GetMethodTable()->GetInstantiation();
if (!ClassLoader::IsTypicalSharedInstantiation(classInst))
return FALSE;
if (IsGenericMethodDefinition())
return FALSE;
Instantiation methodInst = GetMethodInstantiation();
if (!ClassLoader::IsTypicalSharedInstantiation(methodInst))
return FALSE;
return TRUE;
}
//*******************************************************************************
Instantiation MethodDesc::LoadMethodInstantiation()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM(););
}
CONTRACTL_END
if (IsGenericMethodDefinition() && !IsTypicalMethodDefinition())
{
return LoadTypicalMethodDefinition()->GetMethodInstantiation();
}
else
return GetMethodInstantiation();
}
//*******************************************************************************
Module *MethodDesc::GetDefiningModuleForOpenMethod()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
}
CONTRACTL_END
Module *pModule = GetMethodTable()->GetDefiningModuleForOpenType();
if (pModule != NULL)
return pModule;
if (IsGenericMethodDefinition())
return GetModule();
Instantiation inst = GetMethodInstantiation();
for (DWORD i = 0; i < inst.GetNumArgs(); i++)
{
// Encoded types are never open
if (!inst[i].IsEncodedFixup())
{
pModule = inst[i].GetDefiningModuleForOpenType();
if (pModule != NULL)
return pModule;
}
}
return NULL;
}
//*******************************************************************************
BOOL MethodDesc::ContainsGenericVariables()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
}
CONTRACTL_END
// If this is a method of a generic type, does the type have
// non-instantiated type arguments
if (TypeHandle(GetMethodTable()).ContainsGenericVariables())
return TRUE;
if (IsGenericMethodDefinition())
return TRUE;
// If this is an instantiated generic method, are there are any generic type variables
if (GetNumGenericMethodArgs() != 0)
{
Instantiation methodInst = GetMethodInstantiation();
for (DWORD i = 0; i < methodInst.GetNumArgs(); i++)
{
if (methodInst[i].ContainsGenericVariables())
return TRUE;
}
}
return FALSE;
}
//*******************************************************************************
BOOL MethodDesc::IsTightlyBoundToMethodTable()
{
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
// Anything with the real vtable slot is tightly bound
if (!HasNonVtableSlot())
return TRUE;
// All instantiations of generic methods are stored in the InstMethHashTable.
if (HasMethodInstantiation())
{
if (IsGenericMethodDefinition())
return TRUE;
else
return FALSE;
}
// Wrapper stubs are stored in the InstMethHashTable, e.g. for static methods in generic classes
if (IsWrapperStub())
return FALSE;
return TRUE;
}
#ifndef DACCESS_COMPILE
//*******************************************************************************
// Update flags in a thread safe manner.
WORD MethodDesc::InterlockedUpdateFlags(WORD wMask, BOOL fSet)
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
WORD wOldState = m_wFlags;
DWORD dwMask = wMask;
// We need to make this operation atomic (multiple threads can play with the flags field at the same time). But the flags field
// is a word and we only have interlock operations over dwords. So we round down the flags field address to the nearest aligned
// dword (along with the intended bitfield mask). Note that we make the assumption that the flags word is aligned itself, so we
// only have two possibilites: the field already lies on a dword boundary or it's precisely one word out.
DWORD* pdwFlags = (DWORD*)((ULONG_PTR)&m_wFlags - (offsetof(MethodDesc, m_wFlags) & 0x3));
#ifdef _PREFAST_
#pragma warning(push)
#pragma warning(disable:6326) // "Suppress PREFast warning about comparing two constants"
#endif // _PREFAST_
#if BIGENDIAN
if ((offsetof(MethodDesc, m_wFlags) & 0x3) == 0) {
#else // !BIGENDIAN
if ((offsetof(MethodDesc, m_wFlags) & 0x3) != 0) {
#endif // !BIGENDIAN
static_assert_no_msg(sizeof(m_wFlags) == 2);
dwMask <<= 16;
}
#ifdef _PREFAST_
#pragma warning(pop)
#endif
g_IBCLogger.LogMethodDescWriteAccess(this);
if (fSet)
FastInterlockOr(pdwFlags, dwMask);
else
FastInterlockAnd(pdwFlags, ~dwMask);
return wOldState;
}
WORD MethodDesc::InterlockedUpdateFlags3(WORD wMask, BOOL fSet)
{
LIMITED_METHOD_CONTRACT;
WORD wOldState = m_wFlags3AndTokenRemainder;
DWORD dwMask = wMask;
// We need to make this operation atomic (multiple threads can play with the flags field at the same time). But the flags field
// is a word and we only have interlock operations over dwords. So we round down the flags field address to the nearest aligned
// dword (along with the intended bitfield mask). Note that we make the assumption that the flags word is aligned itself, so we
// only have two possibilites: the field already lies on a dword boundary or it's precisely one word out.
DWORD* pdwFlags = (DWORD*)((ULONG_PTR)&m_wFlags3AndTokenRemainder - (offsetof(MethodDesc, m_wFlags3AndTokenRemainder) & 0x3));
#ifdef _PREFAST_
#pragma warning(push)
#pragma warning(disable:6326) // "Suppress PREFast warning about comparing two constants"
#endif // _PREFAST_
#if BIGENDIAN
if ((offsetof(MethodDesc, m_wFlags3AndTokenRemainder) & 0x3) == 0) {
#else // !BIGENDIAN
if ((offsetof(MethodDesc, m_wFlags3AndTokenRemainder) & 0x3) != 0) {
#endif // !BIGENDIAN
static_assert_no_msg(sizeof(m_wFlags3AndTokenRemainder) == 2);
dwMask <<= 16;
}
#ifdef _PREFAST_
#pragma warning(pop)
#endif
g_IBCLogger.LogMethodDescWriteAccess(this);
if (fSet)
FastInterlockOr(pdwFlags, dwMask);
else
FastInterlockAnd(pdwFlags, ~dwMask);
return wOldState;
}
#endif // !DACCESS_COMPILE
//*******************************************************************************
// Returns the address of the native code.
//
// Methods which have no native code are either implemented by stubs or not jitted yet.
// For example, NDirectMethodDesc's have no native code. They are treated as
// implemented by stubs. On WIN64, these stubs are IL stubs, which DO have native code.
//
// This function returns null if the method has no native code.
PCODE MethodDesc::GetNativeCode()
{
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
_ASSERTE(!IsDefaultInterfaceMethod() || HasNativeCodeSlot());
g_IBCLogger.LogMethodDescAccess(this);
if (HasNativeCodeSlot())
{
// When profiler is enabled, profiler may ask to rejit a code even though we
// we have ngen code for this MethodDesc. (See MethodDesc::DoPrestub).
// This means that *GetAddrOfNativeCodeSlot()
// is not stable. It can turn from non-zero to zero.
PCODE pCode = *GetAddrOfNativeCodeSlot();
#ifdef TARGET_ARM
if (pCode != NULL)
pCode |= THUMB_CODE;
#endif
return pCode;
}
if (!HasStableEntryPoint() || HasPrecode())
return NULL;
return GetStableEntryPoint();
}
//*******************************************************************************
PTR_PCODE MethodDesc::GetAddrOfNativeCodeSlot()
{
WRAPPER_NO_CONTRACT;
_ASSERTE(HasNativeCodeSlot());
SIZE_T size = s_ClassificationSizeTable[m_wFlags & (mdcClassification | mdcHasNonVtableSlot | mdcMethodImpl)];
return (PTR_PCODE)(dac_cast<TADDR>(this) + size);
}
//*******************************************************************************
BOOL MethodDesc::IsVoid()
{
WRAPPER_NO_CONTRACT;
MetaSig sig(this);
return sig.IsReturnTypeVoid();
}
//*******************************************************************************
BOOL MethodDesc::HasRetBuffArg()
{
WRAPPER_NO_CONTRACT;
MetaSig sig(this);
ArgIterator argit(&sig);
return argit.HasRetBuffArg();
}
//*******************************************************************************
// This returns the offset of the IL.
// The offset is relative to the base of the IL image.
ULONG MethodDesc::GetRVA()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
SUPPORTS_DAC;
}
CONTRACTL_END
if (IsRuntimeSupplied())
{
return 0;
}
// Methods without metadata don't have an RVA. Examples are IL stubs and LCG methods.
if (IsNoMetadata())
{
return 0;
}
if (GetMemberDef() & 0x00FFFFFF)
{
Module *pModule = GetModule();
PREFIX_ASSUME(pModule != NULL);
DWORD dwDescrOffset;
DWORD dwImplFlags;
if (FAILED(pModule->GetMDImport()->GetMethodImplProps(GetMemberDef(), &dwDescrOffset, &dwImplFlags)))
{ // Class loader already asked for MethodImpls, so this should always succeed (unless there's a
// bug or a new code path)
_ASSERTE(!"If this ever fires, then this method should return HRESULT");
return 0;
}
BAD_FORMAT_NOTHROW_ASSERT(IsNDirect() || IsMiIL(dwImplFlags) || IsMiOPTIL(dwImplFlags) || dwDescrOffset == 0);
return dwDescrOffset;
}
return 0;
}
//*******************************************************************************
BOOL MethodDesc::IsVarArg()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
SUPPORTS_DAC;
Signature signature = GetSignature();
_ASSERTE(!signature.IsEmpty());
return MetaSig::IsVarArg(signature);
}
//*******************************************************************************
COR_ILMETHOD* MethodDesc::GetILHeader(BOOL fAllowOverrides /*=FALSE*/)
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
PRECONDITION(IsIL());
PRECONDITION(!IsUnboxingStub());
}
CONTRACTL_END
Module *pModule = GetModule();
// Always pickup 'permanent' overrides like reflection emit, EnC, etc.
// but only grab temporary overrides (like profiler rewrites) if asked to
TADDR pIL = pModule->GetDynamicIL(GetMemberDef(), fAllowOverrides);
if (pIL == NULL)
{
pIL = pModule->GetIL(GetRVA());
}
#ifdef _DEBUG_IMPL
if (pIL != NULL)
{
//
// This is convenient place to verify that COR_ILMETHOD_DECODER::GetOnDiskSize is in sync
// with our private DACized copy in PEDecoder::ComputeILMethodSize
//
COR_ILMETHOD_DECODER header((COR_ILMETHOD *)pIL);
SIZE_T size1 = header.GetOnDiskSize((COR_ILMETHOD *)pIL);
SIZE_T size2 = PEDecoder::ComputeILMethodSize(pIL);
_ASSERTE(size1 == size2);
}
#endif
#ifdef DACCESS_COMPILE
return (pIL != NULL) ? DacGetIlMethod(pIL) : NULL;
#else // !DACCESS_COMPILE
return PTR_COR_ILMETHOD(pIL);
#endif // !DACCESS_COMPILE
}
//*******************************************************************************
ReturnKind MethodDesc::ParseReturnKindFromSig(INDEBUG(bool supportStringConstructors))
{
CONTRACTL
{
if (FORBIDGC_LOADER_USE_ENABLED()) NOTHROW; else THROWS;
GC_NOTRIGGER;
FORBID_FAULT;
}
CONTRACTL_END
ENABLE_FORBID_GC_LOADER_USE_IN_THIS_SCOPE();
TypeHandle thValueType;
MetaSig sig(this);
CorElementType et = sig.GetReturnTypeNormalized(&thValueType);
switch (et)
{
case ELEMENT_TYPE_STRING:
case ELEMENT_TYPE_CLASS:
case ELEMENT_TYPE_SZARRAY:
case ELEMENT_TYPE_ARRAY:
case ELEMENT_TYPE_OBJECT:
case ELEMENT_TYPE_VAR:
return RT_Object;
#ifdef ENREGISTERED_RETURNTYPE_INTEGER_MAXSIZE
case ELEMENT_TYPE_VALUETYPE:
// We return value types in registers if they fit in ENREGISTERED_RETURNTYPE_MAXSIZE
// These valuetypes could contain gc refs.
{
ArgIterator argit(&sig);
if (!argit.HasRetBuffArg())
{
// the type must already be loaded
_ASSERTE(!thValueType.IsNull());
if (!thValueType.IsTypeDesc())
{
MethodTable * pReturnTypeMT = thValueType.AsMethodTable();
#ifdef UNIX_AMD64_ABI
if (pReturnTypeMT->IsRegPassedStruct())
{
// The Multi-reg return case using the classhandle is only implemented for AMD64 SystemV ABI.
// On other platforms, multi-reg return is not supported with GcInfo v1.
// So, the relevant information must be obtained from the GcInfo tables (which requires version2).
EEClass* eeClass = pReturnTypeMT->GetClass();
ReturnKind regKinds[2] = { RT_Unset, RT_Unset };
int orefCount = 0;
for (int i = 0; i < 2; i++)
{
if (eeClass->GetEightByteClassification(i) == SystemVClassificationTypeIntegerReference)
{
regKinds[i] = RT_Object;
}
else if (eeClass->GetEightByteClassification(i) == SystemVClassificationTypeIntegerByRef)
{
regKinds[i] = RT_ByRef;
}
else
{
regKinds[i] = RT_Scalar;
}
}
ReturnKind structReturnKind = GetStructReturnKind(regKinds[0], regKinds[1]);
return structReturnKind;
}
#endif // UNIX_AMD64_ABI
if (pReturnTypeMT->ContainsPointers() || pReturnTypeMT->IsByRefLike())
{
if (pReturnTypeMT->GetNumInstanceFields() == 1)
{
_ASSERTE(pReturnTypeMT->GetNumInstanceFieldBytes() == sizeof(void*));
// Note: we can't distinguish RT_Object from RT_ByRef, the caller has to tolerate that.
return RT_Object;
}
else
{
// Multi reg return case with pointers, can't restore the actual kind.
return RT_Illegal;
}
}
}
}
}
break;
#endif // ENREGISTERED_RETURNTYPE_INTEGER_MAXSIZE
#ifdef _DEBUG
case ELEMENT_TYPE_VOID:
// String constructors return objects. We should not have any ecall string
// constructors, except when called from gc coverage codes (which is only
// done under debug). We will therefore optimize the retail version of this
// method to not support string constructors.
if (IsCtor() && GetMethodTable()->HasComponentSize())
{
_ASSERTE(supportStringConstructors);
return RT_Object;
}
break;
#endif // _DEBUG
case ELEMENT_TYPE_BYREF:
return RT_ByRef;
default:
break;
}
return RT_Scalar;
}
ReturnKind MethodDesc::GetReturnKind(INDEBUG(bool supportStringConstructors))
{
// For simplicity, we don't hijack in funclets, but if you ever change that,
// be sure to choose the OnHijack... callback type to match that of the FUNCLET
// not the main method (it would probably be Scalar).
ENABLE_FORBID_GC_LOADER_USE_IN_THIS_SCOPE();
// Mark that we are performing a stackwalker like operation on the current thread.
// This is necessary to allow the signature parsing functions to work without triggering any loads
StackWalkerWalkingThreadHolder threadStackWalking(GetThread());
#ifdef TARGET_X86
MetaSig msig(this);
if (msig.HasFPReturn())
{
// Figuring out whether the function returns FP or not is hard to do
// on-the-fly, so we use a different callback helper on x86 where this
// piece of information is needed in order to perform the right save &
// restore of the return value around the call to OnHijackScalarWorker.
return RT_Float;
}
#endif // TARGET_X86
return ParseReturnKindFromSig(INDEBUG(supportStringConstructors));
}
#ifdef FEATURE_COMINTEROP
#ifndef DACCESS_COMPILE
//*******************************************************************************
LONG MethodDesc::GetComDispid()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END
ULONG dispid = -1;
HRESULT hr = GetMDImport()->GetDispIdOfMemberDef(
GetMemberDef(), // The member for which to get props.
&dispid // return dispid.
);
if (FAILED(hr))
return -1;
return (LONG)dispid;
}
//*******************************************************************************
WORD MethodDesc::GetComSlot()
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
FORBID_FAULT;
}
CONTRACTL_END
MethodTable * pMT = GetMethodTable();
_ASSERTE(pMT->IsInterface());
// COM slots are biased from MethodTable slots depending on interface type
WORD numExtraSlots = ComMethodTable::GetNumExtraSlots(pMT->GetComInterfaceType());
// Normal interfaces are layed out the same way as in the MethodTable, while
// sparse interfaces need to go through an extra layer of mapping.
WORD slot;
if (pMT->IsSparseForCOMInterop())
slot = numExtraSlots + pMT->GetClass()->GetSparseCOMInteropVTableMap()->LookupVTSlot(GetSlot());
else
slot = numExtraSlots + GetSlot();
return slot;
}
#endif // !DACCESS_COMPILE
#endif // FEATURE_COMINTEROP
//*******************************************************************************
DWORD MethodDesc::GetAttrs() const
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END
if (IsArray())
return dac_cast<PTR_ArrayMethodDesc>(this)->GetAttrs();
else if (IsNoMetadata())
return dac_cast<PTR_DynamicMethodDesc>(this)->GetAttrs();
DWORD dwAttributes;
if (FAILED(GetMDImport()->GetMethodDefProps(GetMemberDef(), &dwAttributes)))
{ // Class loader already asked for attributes, so this should always succeed (unless there's a
// bug or a new code path)
_ASSERTE(!"If this ever fires, then this method should return HRESULT");
return 0;
}
return dwAttributes;
}
//*******************************************************************************
DWORD MethodDesc::GetImplAttrs()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END
DWORD props;
if (FAILED(GetMDImport()->GetMethodImplProps(GetMemberDef(), NULL, &props)))
{ // Class loader already asked for MethodImpls, so this should always succeed (unless there's a
// bug or a new code path)
_ASSERTE(!"If this ever fires, then this method should return HRESULT");
return 0;
}
return props;
}
//*******************************************************************************
Module* MethodDesc::GetLoaderModule()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
if (HasMethodInstantiation() && !IsGenericMethodDefinition())
{
Module *retVal = ClassLoader::ComputeLoaderModule(GetMethodTable(),
GetMemberDef(),
GetMethodInstantiation());
return retVal;
}
else
{
return GetMethodTable()->GetLoaderModule();
}
}
//*******************************************************************************
Module *MethodDesc::GetModule() const
{
STATIC_CONTRACT_NOTHROW;
STATIC_CONTRACT_GC_NOTRIGGER;
STATIC_CONTRACT_FORBID_FAULT;
SUPPORTS_DAC;
g_IBCLogger.LogMethodDescAccess(this);
Module *pModule = GetModule_NoLogging();
return pModule;
}
//*******************************************************************************
Module *MethodDesc::GetModule_NoLogging() const
{
STATIC_CONTRACT_NOTHROW;
STATIC_CONTRACT_GC_NOTRIGGER;
STATIC_CONTRACT_FORBID_FAULT;
SUPPORTS_DAC;
MethodTable* pMT = GetMethodDescChunk()->GetMethodTable();
return pMT->GetModule();
}
//*******************************************************************************
// Is this an instantiating stub for generics? This does not include those
// BoxedEntryPointStubs which call an instantiating stub.
BOOL MethodDesc::IsInstantiatingStub()
{
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
return
(GetClassification() == mcInstantiated)
&& !IsUnboxingStub()
&& AsInstantiatedMethodDesc()->IMD_IsWrapperStubWithInstantiations();
}
//*******************************************************************************
BOOL MethodDesc::IsWrapperStub()
{
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
return (IsUnboxingStub() || IsInstantiatingStub());
}
#ifndef DACCESS_COMPILE
//*******************************************************************************
MethodDesc *MethodDesc::GetWrappedMethodDesc()
{
WRAPPER_NO_CONTRACT;
_ASSERTE(IsWrapperStub());
if (IsUnboxingStub())
{
return this->GetMethodTable()->GetUnboxedEntryPointMD(this);
}
if (IsInstantiatingStub())
{
MethodDesc *pRet = AsInstantiatedMethodDesc()->IMD_GetWrappedMethodDesc();
#ifdef _DEBUG
MethodDesc *pAltMD =
MethodDesc::FindOrCreateAssociatedMethodDesc(this,
this->GetMethodTable(),
FALSE, /* no unboxing entrypoint */
this->GetMethodInstantiation(),
TRUE /* get shared code */ );
_ASSERTE(pAltMD == pRet);
#endif // _DEBUG
return pRet;
}
return NULL;
}
MethodDesc *MethodDesc::GetExistingWrappedMethodDesc()
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
_ASSERTE(IsWrapperStub());
if (IsUnboxingStub())
{
return this->GetMethodTable()->GetExistingUnboxedEntryPointMD(this);
}
if (IsInstantiatingStub())
{
MethodDesc *pRet = AsInstantiatedMethodDesc()->IMD_GetWrappedMethodDesc();
return pRet;
}
return NULL;
}
#endif // !DACCESS_COMPILE
//*******************************************************************************
BOOL MethodDesc::IsSharedByGenericInstantiations()
{
LIMITED_METHOD_DAC_CONTRACT;
if (IsWrapperStub())
return FALSE;
else if (GetMethodTable()->IsSharedByGenericInstantiations())
return TRUE;
else return IsSharedByGenericMethodInstantiations();
}
//*******************************************************************************
BOOL MethodDesc::IsSharedByGenericMethodInstantiations()
{
LIMITED_METHOD_DAC_CONTRACT;
if (GetClassification() == mcInstantiated)
return AsInstantiatedMethodDesc()->IMD_IsSharedByGenericMethodInstantiations();
else return FALSE;
}
//*******************************************************************************
// Does this method require an extra MethodTable argument for instantiation information?
// This is the case for
// * per-inst static methods in shared-code instantiated generic classes (e.g. static void MyClass<string>::m())
// - there is no this pointer providing generic dictionary info
// * shared-code instance methods in instantiated generic structs (e.g. void MyValueType<string>::m())
// - unboxed 'this' pointer in value-type instance methods don't have MethodTable pointer by definition
// * shared instance and default interface methods called via interface dispatch (e. g. IFoo<string>.Foo calling into IFoo<object>::Foo())
// - this pointer is ambiguous as it can implement more than one IFoo<T>
BOOL MethodDesc::RequiresInstMethodTableArg()
{
LIMITED_METHOD_DAC_CONTRACT;
return
IsSharedByGenericInstantiations() &&
!HasMethodInstantiation() &&
(IsStatic() || GetMethodTable()->IsValueType() || (GetMethodTable()->IsInterface() && !IsAbstract()));
}
//*******************************************************************************
// Does this method require an extra InstantiatedMethodDesc argument for instantiation information?
// This is the case for
// * shared-code instantiated generic methods
BOOL MethodDesc::RequiresInstMethodDescArg()
{
LIMITED_METHOD_DAC_CONTRACT;
return IsSharedByGenericInstantiations() &&
HasMethodInstantiation();
}
//*******************************************************************************
// Does this method require any kind of extra argument for instantiation information?
BOOL MethodDesc::RequiresInstArg()
{
LIMITED_METHOD_DAC_CONTRACT;
BOOL fRet = IsSharedByGenericInstantiations() &&
(HasMethodInstantiation() || IsStatic() || GetMethodTable()->IsValueType() || (GetMethodTable()->IsInterface() && !IsAbstract()));
_ASSERT(fRet == (RequiresInstMethodTableArg() || RequiresInstMethodDescArg()));
return fRet;
}
//*******************************************************************************
BOOL MethodDesc::IsRuntimeMethodHandle()
{
WRAPPER_NO_CONTRACT;
// <TODO> Refine this check further for BoxedEntryPointStubs </TODO>
return (!HasMethodInstantiation() || !IsSharedByGenericMethodInstantiations());
}
//*******************************************************************************
// Strip off method and class instantiation if present e.g.
// C1<int>.m1<string> -> C1.m1
// C1<int>.m2 -> C1.m2
// C2.m2<int> -> C2.m2
// C2.m2 -> C2.m2
MethodDesc* MethodDesc::LoadTypicalMethodDefinition()
{
CONTRACT(MethodDesc*)
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM(););
POSTCONDITION(CheckPointer(RETVAL));
POSTCONDITION(RETVAL->IsTypicalMethodDefinition());
}
CONTRACT_END
#ifndef DACCESS_COMPILE
if (HasClassOrMethodInstantiation())
{
MethodTable *pMT = GetMethodTable();
if (!pMT->IsTypicalTypeDefinition())
pMT = ClassLoader::LoadTypeDefThrowing(pMT->GetModule(),
pMT->GetCl(),
ClassLoader::ThrowIfNotFound,
ClassLoader::PermitUninstDefOrRef).GetMethodTable();
CONSISTENCY_CHECK(TypeHandle(pMT).CheckFullyLoaded());
MethodDesc *resultMD = pMT->GetParallelMethodDesc(this);
PREFIX_ASSUME(resultMD != NULL);
resultMD->CheckRestore();
RETURN (resultMD);
}
else
#endif // !DACCESS_COMPILE
RETURN(this);
}
//*******************************************************************************
BOOL MethodDesc::IsTypicalMethodDefinition() const
{
LIMITED_METHOD_CONTRACT;
if (HasMethodInstantiation() && !IsGenericMethodDefinition())
return FALSE;
if (HasClassInstantiation() && !GetMethodTable()->IsGenericTypeDefinition())
return FALSE;
return TRUE;
}
//*******************************************************************************
BOOL MethodDesc::AcquiresInstMethodTableFromThis() {
LIMITED_METHOD_CONTRACT;
SUPPORTS_DAC;
return
IsSharedByGenericInstantiations() &&
!HasMethodInstantiation() &&
!IsStatic() &&
!GetMethodTable()->IsValueType() &&
!(GetMethodTable()->IsInterface() && !IsAbstract());
}
//*******************************************************************************
UINT MethodDesc::SizeOfArgStack()
{
WRAPPER_NO_CONTRACT;
MetaSig msig(this);
ArgIterator argit(&msig);
return argit.SizeOfArgStack();
}
UINT MethodDesc::SizeOfNativeArgStack()
{
#ifndef UNIX_AMD64_ABI
return SizeOfArgStack();
#else
WRAPPER_NO_CONTRACT;
MetaSig msig(this);
PInvokeArgIterator argit(&msig);
return argit.SizeOfArgStack();
#endif
}
#ifdef TARGET_X86
//*******************************************************************************
UINT MethodDesc::CbStackPop()
{
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
MetaSig msig(this);
ArgIterator argit(&msig);
bool fCtorOfVariableSizedObject = msig.HasThis() && (GetMethodTable() == g_pStringClass) && IsCtor();
if (fCtorOfVariableSizedObject)
{
msig.ClearHasThis();
}
return argit.CbStackPop();
}
#endif // TARGET_X86
#ifndef DACCESS_COMPILE
//*******************************************************************************
// Strip off the method instantiation (if present) e.g.
// C<int>.m<string> -> C<int>.m
// D.m<string> -> D.m
// Note that this also canonicalizes the owning method table
// @todo check uses and clean this up
MethodDesc* MethodDesc::StripMethodInstantiation()
{
CONTRACT(MethodDesc*)
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
POSTCONDITION(CheckPointer(RETVAL));
}
CONTRACT_END
if (!HasClassOrMethodInstantiation())
RETURN(this);
MethodTable *pMT = GetMethodTable()->GetCanonicalMethodTable();
MethodDesc *resultMD = pMT->GetParallelMethodDesc(this);
_ASSERTE(resultMD->IsGenericMethodDefinition() || !resultMD->HasMethodInstantiation());
RETURN(resultMD);
}
//*******************************************************************************
MethodDescChunk *MethodDescChunk::CreateChunk(LoaderHeap *pHeap, DWORD methodDescCount,
DWORD classification, BOOL fNonVtableSlot, BOOL fNativeCodeSlot, BOOL fComPlusCallInfo, MethodTable *pInitialMT, AllocMemTracker *pamTracker)
{
CONTRACT(MethodDescChunk *)
{
THROWS;
GC_NOTRIGGER;
INJECT_FAULT(ThrowOutOfMemory());
PRECONDITION(CheckPointer(pHeap));
PRECONDITION(CheckPointer(pInitialMT));
PRECONDITION(CheckPointer(pamTracker));
POSTCONDITION(CheckPointer(RETVAL));
}
CONTRACT_END;
SIZE_T oneSize = MethodDesc::GetBaseSize(classification);
if (fNonVtableSlot)
oneSize += sizeof(MethodDesc::NonVtableSlot);
if (fNativeCodeSlot)
oneSize += sizeof(MethodDesc::NativeCodeSlot);
#ifdef FEATURE_COMINTEROP
if (fComPlusCallInfo)
oneSize += sizeof(ComPlusCallInfo);
#else // FEATURE_COMINTEROP
_ASSERTE(!fComPlusCallInfo);
#endif // FEATURE_COMINTEROP
_ASSERTE((oneSize & MethodDesc::ALIGNMENT_MASK) == 0);
DWORD maxMethodDescsPerChunk = (DWORD)(MethodDescChunk::MaxSizeOfMethodDescs / oneSize);
if (methodDescCount == 0)
methodDescCount = maxMethodDescsPerChunk;
MethodDescChunk * pFirstChunk = NULL;
do
{
DWORD count = min(methodDescCount, maxMethodDescsPerChunk);
void * pMem = pamTracker->Track(
pHeap->AllocMem(S_SIZE_T(sizeof(TADDR) + sizeof(MethodDescChunk) + oneSize * count)));
// Skip pointer to temporary entrypoints
MethodDescChunk * pChunk = (MethodDescChunk *)((BYTE*)pMem + sizeof(TADDR));
pChunk->SetSizeAndCount(oneSize * count, count);
pChunk->SetMethodTable(pInitialMT);
MethodDesc * pMD = pChunk->GetFirstMethodDesc();
for (DWORD i = 0; i < count; i++)
{
pMD->SetChunkIndex(pChunk);
pMD->SetClassification(classification);
if (fNonVtableSlot)
pMD->SetHasNonVtableSlot();
if (fNativeCodeSlot)
pMD->SetHasNativeCodeSlot();
#ifdef FEATURE_COMINTEROP
if (fComPlusCallInfo)
pMD->SetupGenericComPlusCall();
#endif // FEATURE_COMINTEROP
_ASSERTE(pMD->SizeOf() == oneSize);
pMD = (MethodDesc *)((BYTE *)pMD + oneSize);
}
pChunk->m_next = pFirstChunk;
pFirstChunk = pChunk;
methodDescCount -= count;
}
while (methodDescCount > 0);
RETURN pFirstChunk;
}
//--------------------------------------------------------------------
// Virtual Resolution on Objects
//
// Given a MethodDesc and an Object, return the target address
// and/or the target MethodDesc and/or make a call.
//
// Some of the implementation of this logic is in
// MethodTable::GetMethodDescForInterfaceMethodAndServer.
// Those functions should really be moved here.
//--------------------------------------------------------------------
//*******************************************************************************
// The following resolve virtual dispatch for the given method on the given
// object down to an actual address to call, including any
// handling of context proxies and other thunking layers.
MethodDesc* MethodDesc::ResolveGenericVirtualMethod(OBJECTREF *orThis)
{
CONTRACT(MethodDesc *)
{
THROWS;
GC_TRIGGERS;
PRECONDITION(IsVtableMethod());
PRECONDITION(HasMethodInstantiation());
PRECONDITION(!ContainsGenericVariables());
POSTCONDITION(CheckPointer(RETVAL));
POSTCONDITION(RETVAL->HasMethodInstantiation());
}
CONTRACT_END;
// Method table of target (might be instantiated)
MethodTable *pObjMT = (*orThis)->GetMethodTable();
// This is the static method descriptor describing the call.
// It is not the destination of the call, which we must compute.
MethodDesc* pStaticMD = this;
// Strip off the method instantiation if present
MethodDesc* pStaticMDWithoutGenericMethodArgs = pStaticMD->StripMethodInstantiation();
// Compute the target, though we have not yet applied the type arguments.
MethodDesc *pTargetMDBeforeGenericMethodArgs =
pStaticMD->IsInterface()
? MethodTable::GetMethodDescForInterfaceMethodAndServer(TypeHandle(pStaticMD->GetMethodTable()),
pStaticMDWithoutGenericMethodArgs,orThis)
: pObjMT->GetMethodDescForSlot(pStaticMDWithoutGenericMethodArgs->GetSlot());
pTargetMDBeforeGenericMethodArgs->CheckRestore();
// The actual destination may lie anywhere in the inheritance hierarchy.
// between the static descriptor and the target object.
// So now compute where we are really going! This may be an instantiated
// class type if the generic virtual lies in a generic class.
MethodTable *pTargetMT = pTargetMDBeforeGenericMethodArgs->GetMethodTable();
// No need to find/create a new generic instantiation if the target is the
// same as the static, i.e. the virtual method has not been overriden.
if (!pTargetMT->IsSharedByGenericInstantiations() && !pTargetMT->IsValueType() &&
pTargetMDBeforeGenericMethodArgs == pStaticMDWithoutGenericMethodArgs)
RETURN(pStaticMD);
if (pTargetMT->IsSharedByGenericInstantiations())
{
pTargetMT = ClassLoader::LoadGenericInstantiationThrowing(pTargetMT->GetModule(),
pTargetMT->GetCl(),
pTargetMDBeforeGenericMethodArgs->GetExactClassInstantiation(TypeHandle(pObjMT))).GetMethodTable();
}
RETURN(MethodDesc::FindOrCreateAssociatedMethodDesc(
pTargetMDBeforeGenericMethodArgs,
pTargetMT,
(pTargetMT->IsValueType()), /* get unboxing entry point if a struct*/
pStaticMD->GetMethodInstantiation(),
FALSE /* no allowInstParam */ ));
}
//*******************************************************************************
PCODE MethodDesc::GetSingleCallableAddrOfVirtualizedCode(OBJECTREF *orThis, TypeHandle staticTH)
{
WRAPPER_NO_CONTRACT;
PRECONDITION(IsVtableMethod());
MethodTable *pObjMT = (*orThis)->GetMethodTable();
if (HasMethodInstantiation())
{
CheckRestore();
MethodDesc *pResultMD = ResolveGenericVirtualMethod(orThis);
// If we're remoting this call we can't call directly on the returned
// method desc, we need to go through a stub that guarantees we end up
// in the remoting handler. The stub we use below is normally just for
// non-virtual calls on virtual methods (that have the same problem
// where we could end up bypassing the remoting system), but it serves
// our purpose here (basically pushes our correctly instantiated,
// resolved method desc on the stack and calls the remoting code).
return pResultMD->GetSingleCallableAddrOfCode();
}
if (IsInterface())
{
MethodDesc * pTargetMD = MethodTable::GetMethodDescForInterfaceMethodAndServer(staticTH,this,orThis);
return pTargetMD->GetSingleCallableAddrOfCode();
}
return pObjMT->GetRestoredSlot(GetSlot());
}
//*******************************************************************************
// The following resolve virtual dispatch for the given method on the given
// object down to an actual address to call, including any
// handling of context proxies and other thunking layers.
PCODE MethodDesc::GetMultiCallableAddrOfVirtualizedCode(OBJECTREF *orThis, TypeHandle staticTH)
{
CONTRACT(PCODE)
{
THROWS;
GC_TRIGGERS;
PRECONDITION(IsVtableMethod());
POSTCONDITION(RETVAL != NULL);
}
CONTRACT_END;
// Method table of target (might be instantiated)
MethodTable *pObjMT = (*orThis)->GetMethodTable();
// This is the static method descriptor describing the call.
// It is not the destination of the call, which we must compute.
MethodDesc* pStaticMD = this;
MethodDesc *pTargetMD;
if (pStaticMD->HasMethodInstantiation())
{
CheckRestore();
pTargetMD = ResolveGenericVirtualMethod(orThis);
// If we're remoting this call we can't call directly on the returned
// method desc, we need to go through a stub that guarantees we end up
// in the remoting handler. The stub we use below is normally just for
// non-virtual calls on virtual methods (that have the same problem
// where we could end up bypassing the remoting system), but it serves
// our purpose here (basically pushes our correctly instantiated,
// resolved method desc on the stack and calls the remoting code).
RETURN(pTargetMD->GetMultiCallableAddrOfCode());
}
if (pStaticMD->IsInterface())
{
pTargetMD = MethodTable::GetMethodDescForInterfaceMethodAndServer(staticTH,pStaticMD,orThis);
RETURN(pTargetMD->GetMultiCallableAddrOfCode());
}
pTargetMD = pObjMT->GetMethodDescForSlot(pStaticMD->GetSlot());
RETURN (pTargetMD->GetMultiCallableAddrOfCode());
}
//*******************************************************************************
PCODE MethodDesc::GetMultiCallableAddrOfCode(CORINFO_ACCESS_FLAGS accessFlags /*=CORINFO_ACCESS_LDFTN*/)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM());
}
CONTRACTL_END
PCODE ret = TryGetMultiCallableAddrOfCode(accessFlags);
if (ret == NULL)
{
GCX_COOP();
// We have to allocate funcptr stub
ret = GetLoaderAllocator()->GetFuncPtrStubs()->GetFuncPtrStub(this);
}
return ret;
}
//*******************************************************************************
//
// Returns a callable entry point for a function.
// Multiple entry points could be used for a single function.
// ie. this function is not idempotent
//
// We must ensure that GetMultiCallableAddrOfCode works
// correctly for all of the following cases:
// 1. shared generic method instantiations
// 2. unshared generic method instantiations
// 3. instance methods in shared generic classes
// 4. instance methods in unshared generic classes
// 5. static methods in shared generic classes.
// 6. static methods in unshared generic classes.
//
// For case 1 and 5 the methods are implemented using
// an instantiating stub (i.e. IsInstantiatingStub()
// should be true). These stubs pass on to
// shared-generic-code-which-requires-an-extra-type-context-parameter.
// So whenever we use LDFTN on these we need to give out
// the address of an instantiating stub.
//
// For cases 2, 3, 4 and 6 we can just use the standard technique for LdFtn:
// (for 2 we give out the address of the fake "slot" in InstantiatedMethodDescs)
// (for 3 it doesn't matter if the code is shared between instantiations
// because the instantiation context is picked up from the "this" parameter.)
PCODE MethodDesc::TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_FLAGS accessFlags)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM());
}
CONTRACTL_END
// Record this method desc if required
g_IBCLogger.LogMethodDescAccess(this);
if (IsGenericMethodDefinition())
{
_ASSERTE(!"Cannot take the address of an uninstantiated generic method.");
COMPlusThrow(kInvalidProgramException);
}
if (accessFlags & CORINFO_ACCESS_LDFTN)
{
// Whenever we use LDFTN on shared-generic-code-which-requires-an-extra-parameter
// we need to give out the address of an instantiating stub. This is why we give
// out GetStableEntryPoint() for the IsInstantiatingStub() case: this is
// safe. But first we assert that we only use GetMultiCallableAddrOfCode on
// the instantiating stubs and not on the shared code itself.
_ASSERTE(!RequiresInstArg());
_ASSERTE(!IsSharedByGenericMethodInstantiations());
// No other access flags are valid with CORINFO_ACCESS_LDFTN
_ASSERTE((accessFlags & ~CORINFO_ACCESS_LDFTN) == 0);
}
// We create stable entrypoints for these upfront
if (IsWrapperStub() || IsEnCAddedMethod())
return GetStableEntryPoint();
// For EnC always just return the stable entrypoint so we can update the code
if (IsEnCMethod())
return GetStableEntryPoint();
// If the method has already been jitted, we can give out the direct address
// Note that we may have previously created a FuncPtrStubEntry, but
// GetMultiCallableAddrOfCode() does not need to be idempotent.
if (IsFCall())
{
// Call FCalls directly when possible
if (!IsInterface() && !GetMethodTable()->ContainsGenericVariables())
{
BOOL fSharedOrDynamicFCallImpl;
PCODE pFCallImpl = ECall::GetFCallImpl(this, &fSharedOrDynamicFCallImpl);
if (!fSharedOrDynamicFCallImpl)
return pFCallImpl;
// Fake ctors share one implementation that has to be wrapped by prestub
GetOrCreatePrecode();
}
}
else
{
if (IsPointingToStableNativeCode())
return GetNativeCode();
}
if (HasStableEntryPoint())
return GetStableEntryPoint();
if (IsVersionableWithVtableSlotBackpatch())
{
// Caller has to call via slot or allocate funcptr stub
return NULL;
}
// Force the creation of the precode if we would eventually got one anyway
if (MayHavePrecode())
return GetOrCreatePrecode()->GetEntryPoint();
#ifdef HAS_COMPACT_ENTRYPOINTS
// Caller has to call via slot or allocate funcptr stub
return NULL;
#else // HAS_COMPACT_ENTRYPOINTS
//
// Embed call to the temporary entrypoint into the code. It will be patched
// to point to the actual code later.
//
return GetTemporaryEntryPoint();
#endif // HAS_COMPACT_ENTRYPOINTS
}
//*******************************************************************************
PCODE MethodDesc::GetCallTarget(OBJECTREF* pThisObj, TypeHandle ownerType)
{
CONTRACTL
{
THROWS; // Resolving a generic virtual method can throw
GC_TRIGGERS;
MODE_COOPERATIVE;
}
CONTRACTL_END
PCODE pTarget;
if (IsVtableMethod() && !GetMethodTable()->IsValueType())
{
CONSISTENCY_CHECK(NULL != pThisObj);
if (ownerType.IsNull())
ownerType = GetMethodTable();
pTarget = GetSingleCallableAddrOfVirtualizedCode(pThisObj, ownerType);
}
else
{
pTarget = GetSingleCallableAddrOfCode();
}
return pTarget;
}
MethodDesc* NonVirtualEntry2MethodDesc(PCODE entryPoint)
{
CONTRACTL {
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END
RangeSection* pRS = ExecutionManager::FindCodeRange(entryPoint, ExecutionManager::GetScanFlags());
if (pRS == NULL)
return NULL;
MethodDesc* pMD;
if (pRS->pjit->JitCodeToMethodInfo(pRS, entryPoint, &pMD, NULL))
return pMD;
if (pRS->pjit->GetStubCodeBlockKind(pRS, entryPoint) == STUB_CODE_BLOCK_PRECODE)
return MethodDesc::GetMethodDescFromStubAddr(entryPoint);
// We should never get here
_ASSERTE(!"NonVirtualEntry2MethodDesc failed for RangeSection");
return NULL;
}
//*******************************************************************************
// convert an entry point into a method desc
MethodDesc* Entry2MethodDesc(PCODE entryPoint, MethodTable *pMT)
{
CONTRACT(MethodDesc*)
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
POSTCONDITION(RETVAL->SanityCheck());
}
CONTRACT_END
MethodDesc* pMD = NonVirtualEntry2MethodDesc(entryPoint);
if (pMD != NULL)
RETURN(pMD);
pMD = VirtualCallStubManagerManager::Entry2MethodDesc(entryPoint, pMT);
if (pMD != NULL)
RETURN(pMD);
// Is it an FCALL?
pMD = ECall::MapTargetBackToMethod(entryPoint);
if (pMD != NULL)
RETURN(pMD);
// We should never get here
_ASSERTE(!"Entry2MethodDesc failed");
RETURN (NULL);
}
//*******************************************************************************
BOOL MethodDesc::IsPointingToPrestub()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
if (!HasStableEntryPoint())
{
if (IsVersionableWithVtableSlotBackpatch())
{
return GetMethodEntryPoint() == GetTemporaryEntryPoint();
}
return TRUE;
}
if (!HasPrecode())
return FALSE;
return GetPrecode()->IsPointingToPrestub();
}
//*******************************************************************************
void MethodDesc::Reset()
{
WRAPPER_NO_CONTRACT;
// This method is not thread-safe since we are updating
// different pieces of data non-atomically.
// Use this only if you can guarantee thread-safety somehow.
_ASSERTE(IsEnCMethod() || // The process is frozen by the debugger
IsDynamicMethod() || // These are used in a very restricted way
GetLoaderModule()->IsReflection()); // Rental methods
// Reset any flags relevant to the old code
ClearFlagsOnUpdate();
if (HasPrecode())
{
GetPrecode()->Reset();
}
else
{
// We should go here only for the rental methods
_ASSERTE(GetLoaderModule()->IsReflection());
InterlockedUpdateFlags2(enum_flag2_HasStableEntryPoint | enum_flag2_HasPrecode, FALSE);
*GetAddrOfSlot() = GetTemporaryEntryPoint();
}
if (HasNativeCodeSlot())
{
*GetAddrOfNativeCodeSlot() = NULL;
}
_ASSERTE(!HasNativeCode());
}
//*******************************************************************************
Dictionary* MethodDesc::GetMethodDictionary()
{
WRAPPER_NO_CONTRACT;
return
(GetClassification() == mcInstantiated)
? (Dictionary*) (AsInstantiatedMethodDesc()->IMD_GetMethodDictionary())
: NULL;
}
//*******************************************************************************
DictionaryLayout* MethodDesc::GetDictionaryLayout()
{
WRAPPER_NO_CONTRACT;
return
((GetClassification() == mcInstantiated) && !IsUnboxingStub())
? AsInstantiatedMethodDesc()->IMD_GetDictionaryLayout()
: NULL;
}
#endif // !DACCESS_COMPILE
//*******************************************************************************
MethodImpl *MethodDesc::GetMethodImpl()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
PRECONDITION(HasMethodImplSlot());
SUPPORTS_DAC;
}
CONTRACTL_END
SIZE_T size = s_ClassificationSizeTable[m_wFlags & (mdcClassification | mdcHasNonVtableSlot)];
return PTR_MethodImpl(dac_cast<TADDR>(this) + size);
}
#ifndef DACCESS_COMPILE
//*******************************************************************************
BOOL MethodDesc::RequiresMethodDescCallingConvention(BOOL fEstimateForChunk /*=FALSE*/)
{
LIMITED_METHOD_CONTRACT;
// Interop marshaling is implemented using shared stubs
if (IsNDirect() || IsComPlusCall() || IsGenericComPlusCall())
return TRUE;
return FALSE;
}
//*******************************************************************************
BOOL MethodDesc::RequiresStableEntryPoint(BOOL fEstimateForChunk /*=FALSE*/)
{
LIMITED_METHOD_CONTRACT;
// Create precodes for versionable methods
if (IsVersionableWithPrecode())
return TRUE;
// Create precodes for edit and continue to make methods updateable
if (IsEnCMethod() || IsEnCAddedMethod())
return TRUE;
// Precreate precodes for LCG methods so we do not leak memory when the method descs are recycled
if (IsLCGMethod())
return TRUE;
if (fEstimateForChunk)
{
// Make a best guess based on the method table of the chunk.
if (IsInterface())
return TRUE;
}
else
{
// Wrapper stubs are stored in generic dictionary that's not backpatched
if (IsWrapperStub())
return TRUE;
// TODO: Can we avoid early allocation of precodes for interfaces and cominterop?
if ((IsInterface() && !IsStatic() && IsVirtual()) || IsComPlusCall())
return TRUE;
}
return FALSE;
}
#endif // !DACCESS_COMPILE
//*******************************************************************************
BOOL MethodDesc::MayHaveNativeCode()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END
// This code flow of this method should roughly match the code flow of MethodDesc::DoPrestub.
switch (GetClassification())
{
case mcIL: // IsIL() case. Handled below.
break;
case mcFCall: // FCalls do not have real native code.
return FALSE;
case mcNDirect: // NDirect never have native code (note that the NDirect method
return FALSE; // does not appear as having a native code even for stubs as IL)
case mcEEImpl: // Runtime provided implementation. No native code.
return FALSE;
case mcArray: // Runtime provided implementation. No native code.
return FALSE;
case mcInstantiated: // IsIL() case. Handled below.
break;
#ifdef FEATURE_COMINTEROP
case mcComInterop: // Generated stub. No native code.
return FALSE;
#endif // FEATURE_COMINTEROP
case mcDynamic: // LCG or stub-as-il.
return TRUE;
default:
_ASSERTE(!"Unknown classification");
}
_ASSERTE(IsIL());
if (IsWrapperStub() || ContainsGenericVariables() || IsAbstract())
{
return FALSE;
}
return TRUE;
}
//*******************************************************************************
void MethodDesc::CheckRestore(ClassLoadLevel level)
{
STATIC_CONTRACT_THROWS;
STATIC_CONTRACT_GC_TRIGGERS;
STATIC_CONTRACT_FAULT;
if (!GetMethodTable()->IsFullyLoaded())
{
g_IBCLogger.LogMethodDescAccess(this);
if (GetClassification() == mcInstantiated)
{
#ifndef DACCESS_COMPILE
InstantiatedMethodDesc *pIMD = AsInstantiatedMethodDesc();
// First restore method table pointer in singleton chunk;
// it might be out-of-module
ClassLoader::EnsureLoaded(TypeHandle(GetMethodTable()), level);
g_IBCLogger.LogMethodDescWriteAccess(this);
pIMD->m_wFlags2 = pIMD->m_wFlags2 & ~InstantiatedMethodDesc::Unrestored;
if (ETW_PROVIDER_ENABLED(MICROSOFT_WINDOWS_DOTNETRUNTIME_PROVIDER))
{
ETW::MethodLog::MethodRestored(this);
}
#else // DACCESS_COMPILE
DacNotImpl();
#endif // DACCESS_COMPILE
}
else if (IsILStub()) // the only stored-sig MD type that uses ET_INTERNAL
{
ClassLoader::EnsureLoaded(TypeHandle(GetMethodTable()), level);
#ifndef DACCESS_COMPILE
if (ETW_PROVIDER_ENABLED(MICROSOFT_WINDOWS_DOTNETRUNTIME_PROVIDER))
{
ETW::MethodLog::MethodRestored(this);
}
#else // DACCESS_COMPILE
DacNotImpl();
#endif // DACCESS_COMPILE
}
else
{
ClassLoader::EnsureLoaded(TypeHandle(GetMethodTable()), level);
}
}
}
// static
MethodDesc* MethodDesc::GetMethodDescFromStubAddr(PCODE addr, BOOL fSpeculative /*=FALSE*/)
{
CONTRACT(MethodDesc *)
{
GC_NOTRIGGER;
NOTHROW;
}
CONTRACT_END;
MethodDesc * pMD = NULL;
#ifdef HAS_COMPACT_ENTRYPOINTS
if (MethodDescChunk::IsCompactEntryPointAtAddress(addr))
{
pMD = MethodDescChunk::GetMethodDescFromCompactEntryPoint(addr, fSpeculative);
RETURN(pMD);
}
#endif // HAS_COMPACT_ENTRYPOINTS
// Otherwise this must be some kind of precode
//
Precode* pPrecode = Precode::GetPrecodeFromEntryPoint(addr, fSpeculative);
PREFIX_ASSUME(fSpeculative || (pPrecode != NULL));
if (pPrecode != NULL)
{
pMD = pPrecode->GetMethodDesc(fSpeculative);
RETURN(pMD);
}
RETURN(NULL); // Not found
}
#ifdef HAS_COMPACT_ENTRYPOINTS
#if defined(TARGET_X86)
#include <pshpack1.h>
static const struct CentralJumpCode {
BYTE m_movzxEAX[3];
BYTE m_shlEAX[3];
BYTE m_addEAX[1];
MethodDesc* m_pBaseMD;
BYTE m_jmp[1];
INT32 m_rel32;
inline void Setup(CentralJumpCode* pCodeRX, MethodDesc* pMD, PCODE target, LoaderAllocator *pLoaderAllocator) {
WRAPPER_NO_CONTRACT;
m_pBaseMD = pMD;
m_rel32 = rel32UsingJumpStub(&pCodeRX->m_rel32, target, pMD, pLoaderAllocator);
}
inline BOOL CheckTarget(TADDR target) {
LIMITED_METHOD_CONTRACT;
TADDR addr = rel32Decode(PTR_HOST_MEMBER_TADDR(CentralJumpCode, this, m_rel32));
return (addr == target);
}
}
c_CentralJumpCode = {
{ 0x0F, 0xB6, 0xC0 }, // movzx eax,al
{ 0xC1, 0xE0, MethodDesc::ALIGNMENT_SHIFT }, // shl eax, MethodDesc::ALIGNMENT_SHIFT
{ 0x05 }, NULL, // add eax, pBaseMD
{ 0xE9 }, 0 // jmp PreStub
};
#include <poppack.h>
#elif defined(TARGET_ARM)
#include <pshpack1.h>
struct CentralJumpCode {
BYTE m_ldrPC[4];
BYTE m_short[2];
MethodDescChunk *m_pChunk;
PCODE m_target;
inline void Setup(PCODE target, MethodDescChunk *pChunk) {
WRAPPER_NO_CONTRACT;
m_target = target;
m_pChunk = pChunk;
}
inline BOOL CheckTarget(TADDR target) {
WRAPPER_NO_CONTRACT;
return ((TADDR)m_target == target);
}
}
c_CentralJumpCode = {
{ 0xDF, 0xF8, 0x08, 0xF0 }, // ldr pc, =pTarget
{ 0x00, 0x00 }, // short offset for alignment
0, // pChunk
0 // pTarget
};
#include <poppack.h>
#else
#error Unsupported platform
#endif
typedef DPTR(struct CentralJumpCode) PTR_CentralJumpCode;
#define TEP_CENTRAL_JUMP_SIZE sizeof(c_CentralJumpCode)
static_assert_no_msg((TEP_CENTRAL_JUMP_SIZE & 1) == 0);
#define TEP_ENTRY_SIZE 4
#ifdef TARGET_ARM
#define TEP_HALF_ENTRY_SIZE (TEP_ENTRY_SIZE / 2)
// Compact entry point on arm consists of two thumb instructions:
// mov r12, pc
// b CentralJumpCode
// First instruction 0x46fc
#define TEP_ENTRY_INSTR1_BYTE1 0xFC
#define TEP_ENTRY_INSTR1_BYTE2 0x46
// Mask for unconditional branch opcode
#define TEP_ENTRY_INSTR2_MASK1 0xE0
// Mask for opcode
#define TEP_ENTRY_INSTR2_MASK2 0xF8
// Bit used for ARM to identify compact entry points
#define COMPACT_ENTRY_ARM_CODE 0x2
/* static */ int MethodDescChunk::GetCompactEntryPointMaxCount ()
{
LIMITED_METHOD_DAC_CONTRACT;
return MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB / TEP_ENTRY_SIZE;
}
// Get offset from the start of current compact entry point to the CentralJumpCode
static uint16_t DecodeOffsetFromBranchToCentralJump (uint16_t instr)
{
int16_t offset = decodeUnconditionalBranchThumb ((LPBYTE) &instr);
offset += PC_REG_RELATIVE_OFFSET + TEP_HALF_ENTRY_SIZE;
_ASSERTE (offset >= TEP_ENTRY_SIZE && (offset % TEP_ENTRY_SIZE == 0));
return (uint16_t) offset;
}
#ifndef DACCESS_COMPILE
// Encode branch instruction to central jump for current compact entry point
static uint16_t EncodeBranchToCentralJump (int16_t offset)
{
_ASSERTE (offset >= 0 && (offset % TEP_ENTRY_SIZE == 0));
offset += TEP_HALF_ENTRY_SIZE - PC_REG_RELATIVE_OFFSET;
uint16_t instr;
emitUnconditionalBranchThumb ((LPBYTE) &instr, offset);
return instr;
}
#endif // DACCESS_COMPILE
#else // TARGET_ARM
#define TEP_MAX_BEFORE_INDEX (1 + (127 / TEP_ENTRY_SIZE))
#define TEP_MAX_BLOCK_INDEX (TEP_MAX_BEFORE_INDEX + (128 - TEP_CENTRAL_JUMP_SIZE) / TEP_ENTRY_SIZE)
#define TEP_FULL_BLOCK_SIZE (TEP_MAX_BLOCK_INDEX * TEP_ENTRY_SIZE + TEP_CENTRAL_JUMP_SIZE)
#endif // TARGET_ARM
BOOL MethodDescChunk::IsCompactEntryPointAtAddress(PCODE addr)
{
LIMITED_METHOD_DAC_CONTRACT;
#if defined(TARGET_X86) || defined(TARGET_AMD64)
// Compact entrypoints start at odd addresses
return (addr & 1) != 0;
#elif defined(TARGET_ARM)
// Compact entrypoints start at odd addresses (thumb) with second bit set to 1
uint8_t compactEntryPointMask = THUMB_CODE | COMPACT_ENTRY_ARM_CODE;
return (addr & compactEntryPointMask) == compactEntryPointMask;
#else
#error Unsupported platform
#endif
}
//*******************************************************************************
/* static */ MethodDesc* MethodDescChunk::GetMethodDescFromCompactEntryPoint(PCODE addr, BOOL fSpeculative /*=FALSE*/)
{
LIMITED_METHOD_CONTRACT;
#ifdef DACCESS_COMPILE
// Always use speculative checks with DAC
fSpeculative = TRUE;
#endif
// Always do consistency check in debug
if (fSpeculative INDEBUG(|| TRUE))
{
#ifdef TARGET_ARM
TADDR instrCodeAddr = PCODEToPINSTR(addr);
if (!IsCompactEntryPointAtAddress(addr) ||
*PTR_BYTE(instrCodeAddr) != TEP_ENTRY_INSTR1_BYTE1 ||
*PTR_BYTE(instrCodeAddr+1) != TEP_ENTRY_INSTR1_BYTE2)
#else // TARGET_ARM
if ((addr & 3) != 1 ||
*PTR_BYTE(addr) != X86_INSTR_MOV_AL ||
*PTR_BYTE(addr+2) != X86_INSTR_JMP_REL8)
#endif // TARGET_ARM
{
if (fSpeculative) return NULL;
_ASSERTE(!"Unexpected code in temporary entrypoint");
}
}
#ifdef TARGET_ARM
// On ARM compact entry points are thumb
_ASSERTE ((addr & THUMB_CODE) != 0);
addr = addr - THUMB_CODE;
// Get offset for CentralJumpCode from current compact entry point
PTR_UINT16 pBranchInstr = (PTR_UINT16(addr)) + 1;
uint16_t offset = DecodeOffsetFromBranchToCentralJump (*pBranchInstr);
TADDR centralJump = addr + offset;
int index = (centralJump - addr - TEP_ENTRY_SIZE) / TEP_ENTRY_SIZE;
#else // TARGET_ARM
int index = *PTR_BYTE(addr+1);
TADDR centralJump = addr + 4 + *PTR_SBYTE(addr+3);
#endif // TARGET_ARM
CentralJumpCode* pCentralJumpCode = PTR_CentralJumpCode(centralJump);
// Always do consistency check in debug
if (fSpeculative INDEBUG(|| TRUE))
{
SIZE_T i;
for (i = 0; i < TEP_CENTRAL_JUMP_SIZE; i++)
{
BYTE b = ((BYTE*)&c_CentralJumpCode)[i];
if (b != 0 && b != *PTR_BYTE(centralJump+i))
{
if (fSpeculative) return NULL;
_ASSERTE(!"Unexpected code in temporary entrypoint");
}
}
#ifdef TARGET_ARM
_ASSERTE_IMPL(pCentralJumpCode->CheckTarget(GetPreStubCompactARMEntryPoint()));
#else // TARGET_ARM
_ASSERTE_IMPL(pCentralJumpCode->CheckTarget(GetPreStubEntryPoint()));
#endif // TARGET_ARM
}
#ifdef TARGET_ARM
// Go through all MethodDesc in MethodDescChunk and find the one with the required index
PTR_MethodDescChunk pChunk = *((DPTR(PTR_MethodDescChunk))(centralJump + offsetof(CentralJumpCode, m_pChunk)));
TADDR pMD = PTR_HOST_TO_TADDR (pChunk->GetFirstMethodDesc ());
_ASSERTE (index >= 0 && index < ((int) pChunk->GetCount ()));
index = ((int) pChunk->GetCount ()) - 1 - index;
SIZE_T totalSize = 0;
int curIndex = 0;
while (index != curIndex)
{
SIZE_T sizeCur = (PTR_MethodDesc (pMD))->SizeOf ();
totalSize += sizeCur;
pMD += sizeCur;
++curIndex;
}
return PTR_MethodDesc (pMD);
#else // TARGET_ARM
return PTR_MethodDesc((TADDR)pCentralJumpCode->m_pBaseMD + index * MethodDesc::ALIGNMENT);
#endif // TARGET_ARM
}
//*******************************************************************************
SIZE_T MethodDescChunk::SizeOfCompactEntryPoints(int count)
{
LIMITED_METHOD_DAC_CONTRACT;
#ifdef TARGET_ARM
return COMPACT_ENTRY_ARM_CODE + count * TEP_ENTRY_SIZE + TEP_CENTRAL_JUMP_SIZE;
#else // TARGET_ARM
int fullBlocks = count / TEP_MAX_BLOCK_INDEX;
int remainder = count % TEP_MAX_BLOCK_INDEX;
return 1 + (fullBlocks * TEP_FULL_BLOCK_SIZE) +
(remainder * TEP_ENTRY_SIZE) + ((remainder != 0) ? TEP_CENTRAL_JUMP_SIZE : 0);
#endif // TARGET_ARM
}
#ifndef DACCESS_COMPILE
TADDR MethodDescChunk::AllocateCompactEntryPoints(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker)
{
CONTRACTL {
THROWS;
GC_NOTRIGGER;
} CONTRACTL_END;
int count = GetCount();
SIZE_T size = SizeOfCompactEntryPoints(count);
TADDR temporaryEntryPoints = (TADDR)pamTracker->Track(pLoaderAllocator->GetPrecodeHeap()->AllocAlignedMem(size, sizeof(TADDR)));
ExecutableWriterHolder<void> temporaryEntryPointsWriterHolder((void *)temporaryEntryPoints, size);
size_t rxOffset = temporaryEntryPoints - (TADDR)temporaryEntryPointsWriterHolder.GetRW();
#ifdef TARGET_ARM
BYTE* p = (BYTE*)temporaryEntryPointsWriterHolder.GetRW() + COMPACT_ENTRY_ARM_CODE;
int relOffset = count * TEP_ENTRY_SIZE - TEP_ENTRY_SIZE; // relative offset for the short jump
_ASSERTE (relOffset < MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB);
#else // TARGET_ARM
// make the temporary entrypoints unaligned, so they are easy to identify
BYTE* p = (BYTE*)temporaryEntryPointsWriterHolder.GetRW() + 1;
int indexInBlock = TEP_MAX_BLOCK_INDEX; // recompute relOffset in first iteration
int relOffset = 0; // relative offset for the short jump
#endif // TARGET_ARM
MethodDesc * pBaseMD = 0; // index of the start of the block
MethodDesc * pMD = GetFirstMethodDesc();
for (int index = 0; index < count; index++)
{
#ifdef TARGET_ARM
uint8_t *pMovInstrByte1 = (uint8_t *)p;
uint8_t *pMovInstrByte2 = (uint8_t *)p+1;
uint16_t *pBranchInstr = ((uint16_t *)p)+1;
*pMovInstrByte1 = TEP_ENTRY_INSTR1_BYTE1;
*pMovInstrByte2 = TEP_ENTRY_INSTR1_BYTE2;
*pBranchInstr = EncodeBranchToCentralJump ((int16_t) relOffset);
p += TEP_ENTRY_SIZE;
#else // TARGET_ARM
if (indexInBlock == TEP_MAX_BLOCK_INDEX)
{
relOffset = (min(count - index, TEP_MAX_BEFORE_INDEX) - 1) * TEP_ENTRY_SIZE;
indexInBlock = 0;
pBaseMD = pMD;
}
*(p+0) = X86_INSTR_MOV_AL;
int methodDescIndex = pMD->GetMethodDescIndex() - pBaseMD->GetMethodDescIndex();
_ASSERTE(FitsInU1(methodDescIndex));
*(p+1) = (BYTE)methodDescIndex;
*(p+2) = X86_INSTR_JMP_REL8;
_ASSERTE(FitsInI1(relOffset));
*(p+3) = (BYTE)relOffset;
p += TEP_ENTRY_SIZE; static_assert_no_msg(TEP_ENTRY_SIZE == 4);
if (relOffset == 0)
{
CentralJumpCode* pCode = (CentralJumpCode*)p;
CentralJumpCode* pCodeRX = (CentralJumpCode*)(p + rxOffset);
memcpy(pCode, &c_CentralJumpCode, TEP_CENTRAL_JUMP_SIZE);
pCode->Setup(pCodeRX, pBaseMD, GetPreStubEntryPoint(), pLoaderAllocator);
p += TEP_CENTRAL_JUMP_SIZE;
relOffset -= TEP_CENTRAL_JUMP_SIZE;
}
indexInBlock++;
#endif // TARGET_ARM
relOffset -= TEP_ENTRY_SIZE;
pMD = (MethodDesc *)((BYTE *)pMD + pMD->SizeOf());
}
#ifdef TARGET_ARM
CentralJumpCode* pCode = (CentralJumpCode*)p;
memcpy(pCode, &c_CentralJumpCode, TEP_CENTRAL_JUMP_SIZE);
pCode->Setup (GetPreStubCompactARMEntryPoint(), this);
_ASSERTE(p + TEP_CENTRAL_JUMP_SIZE == (BYTE*)temporaryEntryPointsWriterHolder.GetRW() + size);
#else // TARGET_ARM
_ASSERTE(p == (BYTE*)temporaryEntryPointsWriterHolder.GetRW() + size);
#endif // TARGET_ARM
ClrFlushInstructionCache((LPVOID)temporaryEntryPoints, size);
SetHasCompactEntryPoints();
return temporaryEntryPoints;
}
#endif // !DACCESS_COMPILE
#endif // HAS_COMPACT_ENTRYPOINTS
//*******************************************************************************
PCODE MethodDescChunk::GetTemporaryEntryPoint(int index)
{
LIMITED_METHOD_CONTRACT;
#ifdef HAS_COMPACT_ENTRYPOINTS
if (HasCompactEntryPoints())
{
#ifdef TARGET_ARM
return GetTemporaryEntryPoints() + COMPACT_ENTRY_ARM_CODE + THUMB_CODE + index * TEP_ENTRY_SIZE;
#else // TARGET_ARM
int fullBlocks = index / TEP_MAX_BLOCK_INDEX;
int remainder = index % TEP_MAX_BLOCK_INDEX;
return GetTemporaryEntryPoints() + 1 + (fullBlocks * TEP_FULL_BLOCK_SIZE) +
(remainder * TEP_ENTRY_SIZE) + ((remainder >= TEP_MAX_BEFORE_INDEX) ? TEP_CENTRAL_JUMP_SIZE : 0);
#endif // TARGET_ARM
}
#endif // HAS_COMPACT_ENTRYPOINTS
return Precode::GetPrecodeForTemporaryEntryPoint(GetTemporaryEntryPoints(), index)->GetEntryPoint();
}
PCODE MethodDesc::GetTemporaryEntryPoint()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
MethodDescChunk* pChunk = GetMethodDescChunk();
int lo = 0, hi = pChunk->GetCount() - 1;
// Find the temporary entrypoint in the chunk by binary search
while (lo < hi)
{
int mid = (lo + hi) / 2;
TADDR pEntryPoint = pChunk->GetTemporaryEntryPoint(mid);
MethodDesc * pMD = MethodDesc::GetMethodDescFromStubAddr(pEntryPoint);
if (PTR_HOST_TO_TADDR(this) == PTR_HOST_TO_TADDR(pMD))
return pEntryPoint;
if (PTR_HOST_TO_TADDR(this) > PTR_HOST_TO_TADDR(pMD))
lo = mid + 1;
else
hi = mid - 1;
}
_ASSERTE(lo == hi);
TADDR pEntryPoint = pChunk->GetTemporaryEntryPoint(lo);
#ifdef _DEBUG
MethodDesc * pMD = MethodDesc::GetMethodDescFromStubAddr(pEntryPoint);
_ASSERTE(PTR_HOST_TO_TADDR(this) == PTR_HOST_TO_TADDR(pMD));
#endif
return pEntryPoint;
}
#ifndef DACCESS_COMPILE
//*******************************************************************************
void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker)
{
WRAPPER_NO_CONTRACT;
GetMethodDescChunk()->EnsureTemporaryEntryPointsCreated(pLoaderAllocator, pamTracker);
PTR_PCODE pSlot = GetAddrOfSlot();
_ASSERTE(*pSlot == NULL);
*pSlot = GetTemporaryEntryPoint();
if (RequiresStableEntryPoint())
{
// The rest of the system assumes that certain methods always have stable entrypoints.
// Create them now.
GetOrCreatePrecode();
}
}
//*******************************************************************************
void MethodDescChunk::CreateTemporaryEntryPoints(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(GetTemporaryEntryPoints() == NULL);
TADDR temporaryEntryPoints = Precode::AllocateTemporaryEntryPoints(this, pLoaderAllocator, pamTracker);
#ifdef HAS_COMPACT_ENTRYPOINTS
// Precodes allocated only if they provide more compact representation or if it is required
if (temporaryEntryPoints == NULL)
{
temporaryEntryPoints = AllocateCompactEntryPoints(pLoaderAllocator, pamTracker);
}
#endif // HAS_COMPACT_ENTRYPOINTS
*(((TADDR *)this)-1) = temporaryEntryPoints;
_ASSERTE(GetTemporaryEntryPoints() != NULL);
}
//*******************************************************************************
void MethodDesc::InterlockedUpdateFlags2(BYTE bMask, BOOL fSet)
{
WRAPPER_NO_CONTRACT;
ULONG* pLong = (ULONG*)(&m_bFlags2 - 3);
static_assert_no_msg(offsetof(MethodDesc, m_bFlags2) % sizeof(LONG) == 3);
#if BIGENDIAN
if (fSet)
FastInterlockOr(pLong, (ULONG)bMask);
else
FastInterlockAnd(pLong, ~(ULONG)bMask);
#else // !BIGENDIAN
if (fSet)
FastInterlockOr(pLong, (ULONG)bMask << (3 * 8));
else
FastInterlockAnd(pLong, ~((ULONG)bMask << (3 * 8)));
#endif // !BIGENDIAN
}
//*******************************************************************************
Precode* MethodDesc::GetOrCreatePrecode()
{
WRAPPER_NO_CONTRACT;
_ASSERTE(!IsVersionableWithVtableSlotBackpatch());
if (HasPrecode())
{
return GetPrecode();
}
PTR_PCODE pSlot = GetAddrOfSlot();
PCODE tempEntry = GetTemporaryEntryPoint();
PrecodeType requiredType = GetPrecodeType();
PrecodeType availableType = PRECODE_INVALID;
if (!GetMethodDescChunk()->HasCompactEntryPoints())
{
availableType = Precode::GetPrecodeFromEntryPoint(tempEntry)->GetType();
}
// Allocate the precode if necessary
if (requiredType != availableType)
{
// code:Precode::AllocateTemporaryEntryPoints should always create precode of the right type for dynamic methods.
// If we took this path for dynamic methods, the precode may leak since we may allocate it in domain-neutral loader heap.
_ASSERTE(!IsLCGMethod());
AllocMemTracker amt;
Precode* pPrecode = Precode::Allocate(requiredType, this, GetLoaderAllocator(), &amt);
if (FastInterlockCompareExchangePointer(pSlot, pPrecode->GetEntryPoint(), tempEntry) == tempEntry)
amt.SuppressRelease();
}
// Set the flags atomically
InterlockedUpdateFlags2(enum_flag2_HasStableEntryPoint | enum_flag2_HasPrecode, TRUE);
return Precode::GetPrecodeFromEntryPoint(*pSlot);
}
bool MethodDesc::DetermineAndSetIsEligibleForTieredCompilation()
{
WRAPPER_NO_CONTRACT;
#ifdef FEATURE_TIERED_COMPILATION
#ifndef FEATURE_CODE_VERSIONING
#error Tiered compilation requires code versioning
#endif
// Keep in-sync with MethodTableBuilder::NeedsNativeCodeSlot(bmtMDMethod * pMDMethod)
// to ensure native slots are available where needed.
if (
// Policy
g_pConfig->TieredCompilation() &&
// Functional requirement - The NativeCodeSlot is required to hold the code pointer for the default code version because
// the method's entry point slot will point to a precode or to the current code entry point
HasNativeCodeSlot() &&
// Functional requirement - These methods have no IL that could be optimized
!IsWrapperStub() &&
// Functional requirement
CodeVersionManager::IsMethodSupported(this) &&
// Policy - If QuickJit is disabled and the module does not have any pregenerated code, the method would effectively not
// be tiered currently, so make the method ineligible for tiering to avoid some unnecessary overhead
(g_pConfig->TieredCompilation_QuickJit() || GetModule()->IsReadyToRun()) &&
// Policy - Generating optimized code is not disabled
!IsJitOptimizationDisabled() &&
// Policy - Tiered compilation is not disabled by the profiler
!CORProfilerDisableTieredCompilation())
{
m_bFlags2 |= enum_flag2_IsEligibleForTieredCompilation;
_ASSERTE(IsVersionable());
return true;
}
#endif
return false;
}
#endif // !DACCESS_COMPILE
bool MethodDesc::IsJitOptimizationDisabled()
{
WRAPPER_NO_CONTRACT;
return
g_pConfig->JitMinOpts() ||
#ifdef _DEBUG
g_pConfig->GenDebuggableCode() ||
#endif
CORDisableJITOptimizations(GetModule()->GetDebuggerInfoBits()) ||
(!IsNoMetadata() && IsMiNoOptimization(GetImplAttrs()));
}
#ifndef DACCESS_COMPILE
void MethodDesc::RecordAndBackpatchEntryPointSlot(
LoaderAllocator *slotLoaderAllocator, // the loader allocator from which the slot's memory is allocated
TADDR slot,
EntryPointSlots::SlotType slotType)
{
WRAPPER_NO_CONTRACT;
GCX_PREEMP();
LoaderAllocator *mdLoaderAllocator = GetLoaderAllocator();
MethodDescBackpatchInfoTracker::ConditionalLockHolderForGCCoop slotBackpatchLockHolder;
RecordAndBackpatchEntryPointSlot_Locked(
mdLoaderAllocator,
slotLoaderAllocator,
slot,
slotType,
GetEntryPointToBackpatch_Locked());
}
// This function tries to record a slot that would contain an entry point for the method, and backpatches the slot to contain
// method's current entry point. Once recorded, changes to the entry point due to tiering will cause the slot to be backpatched
// as necessary.
void MethodDesc::RecordAndBackpatchEntryPointSlot_Locked(
LoaderAllocator *mdLoaderAllocator,
LoaderAllocator *slotLoaderAllocator, // the loader allocator from which the slot's memory is allocated
TADDR slot,
EntryPointSlots::SlotType slotType,
PCODE currentEntryPoint)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(MethodDescBackpatchInfoTracker::IsLockOwnedByCurrentThread());
_ASSERTE(mdLoaderAllocator != nullptr);
_ASSERTE(mdLoaderAllocator == GetLoaderAllocator());
_ASSERTE(slotLoaderAllocator != nullptr);
_ASSERTE(slot != NULL);
_ASSERTE(slotType < EntryPointSlots::SlotType_Count);
_ASSERTE(MayHaveEntryPointSlotsToBackpatch());
// The specified current entry point must actually be *current* in the sense that it must have been retrieved inside the
// lock, such that a recorded slot is guaranteed to point to the entry point at the time at which it was recorded, in order
// to synchronize with backpatching in MethodDesc::BackpatchEntryPointSlots(). If a slot pointing to an older entry point
// were to be recorded due to concurrency issues, it would not get backpatched to point to the more recent, actually
// current, entry point until another entry point change, which may never happen.
_ASSERTE(currentEntryPoint == GetEntryPointToBackpatch_Locked());
MethodDescBackpatchInfoTracker *backpatchTracker = mdLoaderAllocator->GetMethodDescBackpatchInfoTracker();
backpatchTracker->AddSlotAndPatch_Locked(this, slotLoaderAllocator, slot, slotType, currentEntryPoint);
}
FORCEINLINE bool MethodDesc::TryBackpatchEntryPointSlots(
PCODE entryPoint,
bool isPrestubEntryPoint,
bool onlyFromPrestubEntryPoint)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(MayHaveEntryPointSlotsToBackpatch());
_ASSERTE(entryPoint != NULL);
_ASSERTE(isPrestubEntryPoint == (entryPoint == GetPrestubEntryPointToBackpatch()));
_ASSERTE(!isPrestubEntryPoint || !onlyFromPrestubEntryPoint);
_ASSERTE(MethodDescBackpatchInfoTracker::IsLockOwnedByCurrentThread());
LoaderAllocator *mdLoaderAllocator = GetLoaderAllocator();
MethodDescBackpatchInfoTracker *backpatchInfoTracker = mdLoaderAllocator->GetMethodDescBackpatchInfoTracker();
// Get the entry point to backpatch inside the lock to synchronize with backpatching in MethodDesc::DoBackpatch()
PCODE previousEntryPoint = GetEntryPointToBackpatch_Locked();
if (previousEntryPoint == entryPoint)
{
return true;
}
if (onlyFromPrestubEntryPoint && previousEntryPoint != GetPrestubEntryPointToBackpatch())
{
return false;
}
if (IsVersionableWithVtableSlotBackpatch())
{
// Backpatch the func ptr stub if it was created
FuncPtrStubs *funcPtrStubs = mdLoaderAllocator->GetFuncPtrStubsNoCreate();
if (funcPtrStubs != nullptr)
{
Precode *funcPtrPrecode = funcPtrStubs->Lookup(this);
if (funcPtrPrecode != nullptr)
{
if (isPrestubEntryPoint)
{
funcPtrPrecode->ResetTargetInterlocked();
}
else
{
funcPtrPrecode->SetTargetInterlocked(entryPoint, FALSE /* fOnlyRedirectFromPrestub */);
}
}
}
}
backpatchInfoTracker->Backpatch_Locked(this, entryPoint);
// Set the entry point to backpatch inside the lock to synchronize with backpatching in MethodDesc::DoBackpatch(), and set
// it last in case there are exceptions above, as setting the entry point indicates that all recorded slots have been
// backpatched
SetEntryPointToBackpatch_Locked(entryPoint);
return true;
}
void MethodDesc::TrySetInitialCodeEntryPointForVersionableMethod(
PCODE entryPoint,
bool mayHaveEntryPointSlotsToBackpatch)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(entryPoint != NULL);
_ASSERTE(IsVersionable());
_ASSERTE(mayHaveEntryPointSlotsToBackpatch == MayHaveEntryPointSlotsToBackpatch());
if (mayHaveEntryPointSlotsToBackpatch)
{
TryBackpatchEntryPointSlotsFromPrestub(entryPoint);
}
else
{
_ASSERTE(IsVersionableWithPrecode());
GetOrCreatePrecode()->SetTargetInterlocked(entryPoint, TRUE /* fOnlyRedirectFromPrestub */);
}
}
void MethodDesc::SetCodeEntryPoint(PCODE entryPoint)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(entryPoint != NULL);
if (MayHaveEntryPointSlotsToBackpatch())
{
BackpatchEntryPointSlots(entryPoint);
}
else if (IsVersionable())
{
_ASSERTE(IsVersionableWithPrecode());
GetOrCreatePrecode()->SetTargetInterlocked(entryPoint, FALSE /* fOnlyRedirectFromPrestub */);
// SetTargetInterlocked() would return false if it lost the race with another thread. That is fine, this thread
// can continue assuming it was successful, similarly to it successfully updating the target and another thread
// updating the target again shortly afterwards.
}
else if (HasPrecode())
{
GetPrecode()->SetTargetInterlocked(entryPoint);
}
else if (!HasStableEntryPoint())
{
SetStableEntryPointInterlocked(entryPoint);
}
}
void MethodDesc::ResetCodeEntryPoint()
{
WRAPPER_NO_CONTRACT;
_ASSERTE(IsVersionable());
if (MayHaveEntryPointSlotsToBackpatch())
{
BackpatchToResetEntryPointSlots();
return;
}
_ASSERTE(IsVersionableWithPrecode());
if (HasPrecode())
{
GetPrecode()->ResetTargetInterlocked();
}
}
void MethodDesc::ResetCodeEntryPointForEnC()
{
WRAPPER_NO_CONTRACT;
_ASSERTE(!IsVersionable());
_ASSERTE(!IsVersionableWithPrecode());
_ASSERTE(!MayHaveEntryPointSlotsToBackpatch());
if (HasPrecode())
{
GetPrecode()->ResetTargetInterlocked();
}
if (HasNativeCodeSlot())
{
*GetAddrOfNativeCodeSlot() = NULL;
}
}
//*******************************************************************************
BOOL MethodDesc::SetNativeCodeInterlocked(PCODE addr, PCODE pExpected /*=NULL*/)
{
CONTRACTL {
THROWS;
GC_NOTRIGGER;
} CONTRACTL_END;
_ASSERTE(!IsDefaultInterfaceMethod() || HasNativeCodeSlot());
if (HasNativeCodeSlot())
{
#ifdef TARGET_ARM
_ASSERTE(IsThumbCode(addr) || (addr==NULL));
addr &= ~THUMB_CODE;
if (pExpected != NULL)
{
_ASSERTE(IsThumbCode(pExpected));
pExpected &= ~THUMB_CODE;
}
#endif
PTR_PCODE pSlot = GetAddrOfNativeCodeSlot();
NativeCodeSlot expected;
expected = *pSlot;
return FastInterlockCompareExchangePointer(reinterpret_cast<TADDR*>(pSlot),
(TADDR&)addr, (TADDR&)expected) == (TADDR&)expected;
}
_ASSERTE(pExpected == NULL);
return SetStableEntryPointInterlocked(addr);
}
//*******************************************************************************
void MethodDesc::SetMethodEntryPoint(PCODE addr)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(addr != NULL);
// Similarly to GetMethodEntryPoint(), it is up to the caller to ensure that calls to this function are appropriately
// synchronized. Currently, the only caller synchronizes with the following lock.
_ASSERTE(MethodDescBackpatchInfoTracker::IsLockOwnedByCurrentThread());
*GetAddrOfSlot() = addr;
}
//*******************************************************************************
BOOL MethodDesc::SetStableEntryPointInterlocked(PCODE addr)
{
CONTRACTL {
THROWS;
GC_NOTRIGGER;
} CONTRACTL_END;
_ASSERTE(!HasPrecode());
_ASSERTE(!IsVersionable());
PCODE pExpected = GetTemporaryEntryPoint();
PTR_PCODE pSlot = GetAddrOfSlot();
BOOL fResult = FastInterlockCompareExchangePointer(pSlot, addr, pExpected) == pExpected;
InterlockedUpdateFlags2(enum_flag2_HasStableEntryPoint, TRUE);
return fResult;
}
BOOL NDirectMethodDesc::ComputeMarshalingRequired()
{
WRAPPER_NO_CONTRACT;
return NDirect::MarshalingRequired(this);
}
/**********************************************************************************/
// Forward declare the NDirectImportWorker function - See dllimport.cpp
EXTERN_C LPVOID STDCALL NDirectImportWorker(NDirectMethodDesc*);
void *NDirectMethodDesc::ResolveAndSetNDirectTarget(_In_ NDirectMethodDesc* pMD)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM(););
PRECONDITION(CheckPointer(pMD));
}
CONTRACTL_END
// This build conditional is here due to dllimport.cpp
// not being relevant during the crossgen build.
LPVOID targetMaybe = NDirectImportWorker(pMD);
_ASSERTE(targetMaybe != nullptr);
pMD->SetNDirectTarget(targetMaybe);
return targetMaybe;
}
BOOL NDirectMethodDesc::TryResolveNDirectTargetForNoGCTransition(_In_ MethodDesc* pMD, _Out_ void** ndirectTarget)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM(););
PRECONDITION(CheckPointer(pMD));
PRECONDITION(CheckPointer(ndirectTarget));
}
CONTRACTL_END
if (!pMD->ShouldSuppressGCTransition())
return FALSE;
_ASSERTE(pMD->IsNDirect());
*ndirectTarget = ResolveAndSetNDirectTarget((NDirectMethodDesc*)pMD);
return TRUE;
}
//*******************************************************************************
void NDirectMethodDesc::InterlockedSetNDirectFlags(WORD wFlags)
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
}
CONTRACTL_END
// Since InterlockedCompareExchange only works on ULONGs,
// we'll have to operate on the entire ULONG. Ugh.
WORD *pFlags = &ndirect.m_wFlags;
// Make sure that m_flags is aligned on a 4 byte boundry
_ASSERTE( ( ((size_t) pFlags) & (sizeof(ULONG)-1) ) == 0);
// Ensure we won't be reading or writing outside the bounds of the NDirectMethodDesc.
_ASSERTE((BYTE*)pFlags >= (BYTE*)this);
_ASSERTE((BYTE*)pFlags+sizeof(ULONG) <= (BYTE*)(this+1));
DWORD dwMask = 0;
// Set the flags in the mask
((WORD*)&dwMask)[0] |= wFlags;
// Now, slam all 32 bits atomically.
FastInterlockOr((DWORD*)pFlags, dwMask);
}
#ifdef TARGET_WINDOWS
FARPROC NDirectMethodDesc::FindEntryPointWithMangling(NATIVE_LIBRARY_HANDLE hMod, PTR_CUTF8 entryPointName)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END;
FARPROC pFunc = GetProcAddress(hMod, entryPointName);
#if defined(TARGET_X86)
if (pFunc)
{
return pFunc;
}
if (IsStdCall())
{
EnsureStackArgumentSize();
DWORD probedEntrypointNameLength = (DWORD)(strlen(entryPointName) + 1); // 1 for null terminator
int dstbufsize = (int)(sizeof(char) * (probedEntrypointNameLength + 10)); // 10 for stdcall mangling
LPSTR szProbedEntrypointName = ((LPSTR)_alloca(dstbufsize + 1));
szProbedEntrypointName[0] = '_';
strcpy_s(szProbedEntrypointName + 1, dstbufsize, entryPointName);
szProbedEntrypointName[probedEntrypointNameLength] = '\0'; // Add an extra '\0'.
UINT16 numParamBytesMangle = GetStackArgumentSize();
sprintf_s(szProbedEntrypointName + probedEntrypointNameLength, dstbufsize - probedEntrypointNameLength + 1, "@%lu", (ULONG)numParamBytesMangle);
pFunc = GetProcAddress(hMod, szProbedEntrypointName);
}
#endif
return pFunc;
}
FARPROC NDirectMethodDesc::FindEntryPointWithSuffix(NATIVE_LIBRARY_HANDLE hMod, PTR_CUTF8 entryPointName, char suffix)
{
// Allocate space for a copy of the entry point name.
DWORD entryPointWithSuffixLen = (DWORD)(strlen(entryPointName) + 1); // +1 for charset decorations
int dstbufsize = (int)(sizeof(char) * (entryPointWithSuffixLen + 1)); // +1 for the null terminator
LPSTR entryPointWithSuffix = ((LPSTR)_alloca(dstbufsize));
// Copy the name so we can mangle it.
strcpy_s(entryPointWithSuffix, dstbufsize, entryPointName);
entryPointWithSuffix[entryPointWithSuffixLen] = '\0'; // Null terminator
entryPointWithSuffix[entryPointWithSuffixLen - 1] = suffix; // Charset suffix
// Look for entry point with the suffix based on charset
return FindEntryPointWithMangling(hMod, entryPointWithSuffix);
}
#endif
//*******************************************************************************
LPVOID NDirectMethodDesc::FindEntryPoint(NATIVE_LIBRARY_HANDLE hMod)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END;
char const * funcName = GetEntrypointName();
#ifndef TARGET_WINDOWS
return reinterpret_cast<LPVOID>(PAL_GetProcAddressDirect(hMod, funcName));
#else
// Handle ordinals.
if (funcName[0] == '#')
{
long ordinal = atol(funcName + 1);
return reinterpret_cast<LPVOID>(GetProcAddress(hMod, (LPCSTR)(size_t)((UINT16)ordinal)));
}
FARPROC pFunc = NULL;
if (IsNativeNoMangled())
{
// Look for the user-provided entry point name only
pFunc = FindEntryPointWithMangling(hMod, funcName);
}
else if (IsNativeAnsi())
{
// For ANSI, look for the user-provided entry point name first.
// If that does not exist, try the charset suffix.
pFunc = FindEntryPointWithMangling(hMod, funcName);
if (pFunc == NULL)
pFunc = FindEntryPointWithSuffix(hMod, funcName, 'A');
}
else
{
// For Unicode, look for the entry point name with the charset suffix first.
// The 'W' API takes precedence over the undecorated one.
pFunc = FindEntryPointWithSuffix(hMod, funcName, 'W');
if (pFunc == NULL)
pFunc = FindEntryPointWithMangling(hMod, funcName);
}
return reinterpret_cast<LPVOID>(pFunc);
#endif
}
#if defined(TARGET_X86)
//*******************************************************************************
void NDirectMethodDesc::EnsureStackArgumentSize()
{
STANDARD_VM_CONTRACT;
if (ndirect.m_cbStackArgumentSize == 0xFFFF)
{
// Marshalling required check sets the stack size as side-effect when marshalling is not required.
if (MarshalingRequired())
{
// Generating interop stub sets the stack size as side-effect in all cases
GetStubForInteropMethod(this, NDIRECTSTUB_FL_FOR_NUMPARAMBYTES);
}
}
}
#endif
//*******************************************************************************
void NDirectMethodDesc::InitEarlyBoundNDirectTarget()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM(););
}
CONTRACTL_END
_ASSERTE(IsEarlyBound());
if (IsClassConstructorTriggeredAtLinkTime())
{
GetMethodTable()->CheckRunClassInitThrowing();
}
const void *target = GetModule()->GetInternalPInvokeTarget(GetRVA());
_ASSERTE(target != 0);
if (HeuristicDoesThisLookLikeAGetLastErrorCall((LPBYTE)target))
target = (BYTE*)FalseGetLastError;
// As long as we've set the NDirect target field we don't need to backpatch the import thunk glue.
// All NDirect calls all through the NDirect target, so if it's updated, then we won't go into
// NDirectImportThunk(). In fact, backpatching the import thunk glue leads to race conditions.
SetNDirectTarget((LPVOID)target);
}
//*******************************************************************************
BOOL MethodDesc::HasUnmanagedCallersOnlyAttribute()
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
FORBID_FAULT;
}
CONTRACTL_END;
if (IsILStub())
{
// Stubs generated for being called from native code are equivalent to
// managed methods marked with UnmanagedCallersOnly.
return AsDynamicMethodDesc()->GetILStubType() == DynamicMethodDesc::StubNativeToCLRInterop;
}
HRESULT hr = GetCustomAttribute(
WellKnownAttribute::UnmanagedCallersOnly,
nullptr,
nullptr);
if (hr != S_OK)
{
// See https://github.com/dotnet/runtime/issues/37622
hr = GetCustomAttribute(
WellKnownAttribute::NativeCallableInternal,
nullptr,
nullptr);
}
return (hr == S_OK) ? TRUE : FALSE;
}
//*******************************************************************************
BOOL MethodDesc::ShouldSuppressGCTransition()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM());
}
CONTRACTL_END;
MethodDesc* tgt = nullptr;
if (IsNDirect())
{
tgt = this;
}
else if (IsILStub())
{
// From the IL stub, determine if the actual target has been
// marked to suppress the GC transition.
PTR_DynamicMethodDesc ilStubMD = AsDynamicMethodDesc();
PTR_ILStubResolver ilStubResolver = ilStubMD->GetILStubResolver();
tgt = ilStubResolver->GetStubTargetMethodDesc();
// In the event we can't get or don't have a target, there is no way
// to determine if we should suppress the GC transition.
if (tgt == nullptr)
return FALSE;
}
else
{
return FALSE;
}
_ASSERTE(tgt != nullptr);
bool suppressGCTransition;
NDirect::GetCallingConvention_IgnoreErrors(tgt, NULL /*callConv*/, &suppressGCTransition);
return suppressGCTransition ? TRUE : FALSE;
}
#ifdef FEATURE_COMINTEROP
//*******************************************************************************
void ComPlusCallMethodDesc::InitComEventCallInfo()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM());
}
CONTRACTL_END
MethodTable *pItfMT = GetInterfaceMethodTable();
MethodDesc *pItfMD = this;
MethodTable *pSrcItfClass = NULL;
MethodTable *pEvProvClass = NULL;
// Retrieve the event provider class.
WORD cbExtraSlots = ComMethodTable::GetNumExtraSlots(pItfMT->GetComInterfaceType());
WORD itfSlotNum = (WORD) m_pComPlusCallInfo->m_cachedComSlot - cbExtraSlots;
pItfMT->GetEventInterfaceInfo(&pSrcItfClass, &pEvProvClass);
m_pComPlusCallInfo->m_pEventProviderMD = MemberLoader::FindMethodForInterfaceSlot(pEvProvClass, pItfMT, itfSlotNum);
// If we could not find the method, then the event provider does not support
// this event. This is a fatal error.
if (!m_pComPlusCallInfo->m_pEventProviderMD)
{
// Init the interface MD for error reporting.
pItfMD = (ComPlusCallMethodDesc*)pItfMT->GetMethodDescForSlot(itfSlotNum);
// Retrieve the event provider class name.
StackSString ssEvProvClassName;
pEvProvClass->_GetFullyQualifiedNameForClass(ssEvProvClassName);
// Retrieve the COM event interface class name.
StackSString ssEvItfName;
pItfMT->_GetFullyQualifiedNameForClass(ssEvItfName);
// Convert the method name to unicode.
StackSString ssMethodName(SString::Utf8, pItfMD->GetName());
// Throw the exception.
COMPlusThrow(kTypeLoadException, IDS_EE_METHOD_NOT_FOUND_ON_EV_PROV,
ssMethodName.GetUnicode(), ssEvItfName.GetUnicode(), ssEvProvClassName.GetUnicode());
}
}
#endif // FEATURE_COMINTEROP
#endif // !DACCESS_COMPILE
#ifdef DACCESS_COMPILE
//*******************************************************************************
void
MethodDesc::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
{
SUPPORTS_DAC;
if (DacHasMethodDescBeenEnumerated(this))
{
return;
}
// Save away the whole MethodDescChunk as in many
// places RecoverChunk is called on a method desc so
// the whole chunk must be available. This also
// automatically picks up any prestubs and such.
GetMethodDescChunk()->EnumMemoryRegions(flags);
if (HasPrecode())
{
GetPrecode()->EnumMemoryRegions(flags);
}
// Need to save the Debug-Info for this method so that we can see it in a debugger later.
DebugInfoManager::EnumMemoryRegionsForMethodDebugInfo(flags, this);
if (!IsNoMetadata() ||IsILStub())
{
// The assembling of the string below implicitly dumps the memory we need.
StackSString str;
TypeString::AppendMethodInternal(str, this, TypeString::FormatSignature|TypeString::FormatNamespace|TypeString::FormatFullInst);
#ifdef FEATURE_MINIMETADATA_IN_TRIAGEDUMPS
if (flags == CLRDATA_ENUM_MEM_MINI || flags == CLRDATA_ENUM_MEM_TRIAGE)
{
// we want to save just the method name, so truncate at the open paranthesis
SString::Iterator it = str.Begin();
if (str.Find(it, W('(')))
{
// ensure the symbol ends in "()" to minimize regressions
// in !analyze assuming the existence of the argument list
str.Truncate(++it);
str.Append(W(')'));
}
DacMdCacheAddEEName(dac_cast<TADDR>(this), str);
}
#endif // FEATURE_MINIMETADATA_IN_TRIAGEDUMPS
// The module path is used in the output of !clrstack and !pe if the
// module is not available when the minidump is inspected. By retrieving
// the path here, the required memory is implicitly dumped.
Module* pModule = GetModule();
if (pModule)
{
pModule->GetPath();
}
}
#ifdef FEATURE_CODE_VERSIONING
// Make sure the active IL and native code version are in triage dumps.
CodeVersionManager* pCodeVersionManager = GetCodeVersionManager();
ILCodeVersion ilVersion = pCodeVersionManager->GetActiveILCodeVersion(dac_cast<PTR_MethodDesc>(this));
if (!ilVersion.IsNull())
{
ilVersion.GetActiveNativeCodeVersion(dac_cast<PTR_MethodDesc>(this));
ilVersion.GetVersionId();
ilVersion.GetRejitState();
ilVersion.GetIL();
}
#endif
// Also, call DacValidateMD to dump the memory it needs. !clrstack calls
// DacValidateMD before it retrieves the method name. We don't expect
// DacValidateMD to fail, but if it does, ignore the failure and try to assemble the
// string anyway so that clients that don't validate the MD still work.
DacValidateMD(this);
DacSetMethodDescEnumerated(this);
}
//*******************************************************************************
void
StoredSigMethodDesc::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
{
SUPPORTS_DAC;
// 'this' already done, see below.
DacEnumMemoryRegion(GetSigRVA(), m_cSig);
}
//*******************************************************************************
void
MethodDescChunk::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
{
SUPPORTS_DAC;
DAC_CHECK_ENUM_THIS();
EMEM_OUT(("MEM: %p MethodDescChunk\n", dac_cast<TADDR>(this)));
DacEnumMemoryRegion(dac_cast<TADDR>(this), SizeOf());
PTR_MethodTable pMT = GetMethodTable();
if (pMT.IsValid())
{
pMT->EnumMemoryRegions(flags);
}
SIZE_T size;
#ifdef HAS_COMPACT_ENTRYPOINTS
if (HasCompactEntryPoints())
{
size = SizeOfCompactEntryPoints(GetCount());
}
else
#endif // HAS_COMPACT_ENTRYPOINTS
{
size = Precode::SizeOfTemporaryEntryPoints(GetTemporaryEntryPoints(), GetCount());
}
DacEnumMemoryRegion(GetTemporaryEntryPoints(), size);
MethodDesc * pMD = GetFirstMethodDesc();
MethodDesc * pOldMD = NULL;
while (pMD != NULL && pMD != pOldMD)
{
pOldMD = pMD;
EX_TRY
{
if (pMD->IsMethodImpl())
{
pMD->GetMethodImpl()->EnumMemoryRegions(flags);
}
}
EX_CATCH_RETHROW_ONLY_COR_E_OPERATIONCANCELLED
EX_TRY
{
if (pMD->HasStoredSig())
{
dac_cast<PTR_StoredSigMethodDesc>(pMD)->EnumMemoryRegions(flags);
}
// Check whether the next MethodDesc is within the bounds of the current chunks
TADDR pNext = dac_cast<TADDR>(pMD) + pMD->SizeOf();
TADDR pEnd = dac_cast<TADDR>(this) + this->SizeOf();
pMD = (pNext < pEnd) ? PTR_MethodDesc(pNext) : NULL;
}
EX_CATCH_RETHROW_ONLY_COR_E_OPERATIONCANCELLED
}
}
#endif // DACCESS_COMPILE
#ifndef DACCESS_COMPILE
//*******************************************************************************
MethodDesc *MethodDesc::GetInterfaceMD()
{
CONTRACT (MethodDesc*) {
THROWS;
GC_TRIGGERS;
INSTANCE_CHECK;
PRECONDITION(!IsInterface());
POSTCONDITION(CheckPointer(RETVAL, NULL_OK));
} CONTRACT_END;
MethodTable *pMT = GetMethodTable();
RETURN(pMT->ReverseInterfaceMDLookup(GetSlot()));
}
#endif // !DACCESS_COMPILE
PTR_LoaderAllocator MethodDesc::GetLoaderAllocator()
{
WRAPPER_NO_CONTRACT;
return GetLoaderModule()->GetLoaderAllocator();
}
#if !defined(DACCESS_COMPILE)
REFLECTMETHODREF MethodDesc::GetStubMethodInfo()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM());
MODE_COOPERATIVE;
}
CONTRACTL_END;
REFLECTMETHODREF retVal;
REFLECTMETHODREF methodRef = (REFLECTMETHODREF)AllocateObject(CoreLibBinder::GetClass(CLASS__STUBMETHODINFO));
GCPROTECT_BEGIN(methodRef);
methodRef->SetMethod(this);
LoaderAllocator *pLoaderAllocatorOfMethod = this->GetLoaderAllocator();
if (pLoaderAllocatorOfMethod->IsCollectible())
methodRef->SetKeepAlive(pLoaderAllocatorOfMethod->GetExposedObject());
retVal = methodRef;
GCPROTECT_END();
return retVal;
}
#endif // !DACCESS_COMPILE
#ifndef DACCESS_COMPILE
typedef void (*WalkValueTypeParameterFnPtr)(Module *pModule, mdToken token, Module *pDefModule, mdToken tkDefToken, SigPointer *ptr, SigTypeContext *pTypeContext, void *pData);
void MethodDesc::WalkValueTypeParameters(MethodTable *pMT, WalkValueTypeParameterFnPtr function, void *pData)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
}
CONTRACTL_END;
uint32_t numArgs = 0;
Module *pModule = this->GetModule();
SigPointer ptr = this->GetSigPointer();
// skip over calling convention.
uint32_t callConv = 0;
IfFailThrowBF(ptr.GetCallingConvInfo(&callConv), BFA_BAD_SIGNATURE, pModule);
// If calling convention is generic, skip GenParamCount
if (callConv & IMAGE_CEE_CS_CALLCONV_GENERIC)
{
IfFailThrowBF(ptr.GetData(NULL), BFA_BAD_SIGNATURE, pModule);
}
IfFailThrowBF(ptr.GetData(&numArgs), BFA_BAD_SIGNATURE, pModule);
SigTypeContext typeContext(this, TypeHandle(pMT));
// iterate over the return type and parameters
for (DWORD j = 0; j <= numArgs; j++)
{
CorElementType type = ptr.PeekElemTypeClosed(pModule, &typeContext);
if (type != ELEMENT_TYPE_VALUETYPE)
goto moveToNextToken;
mdToken token;
Module *pTokenModule;
token = ptr.PeekValueTypeTokenClosed(pModule, &typeContext, &pTokenModule);
if (token == mdTokenNil)
goto moveToNextToken;
DWORD dwAttrType;
Module *pDefModule;
mdToken defToken;
dwAttrType = 0;
if (ClassLoader::ResolveTokenToTypeDefThrowing(pTokenModule, token, &pDefModule, &defToken))
{
if (function != NULL)
function(pModule, token, pDefModule, defToken, &ptr, &typeContext, pData);
}
moveToNextToken:
// move to next argument token
IfFailThrowBF(ptr.SkipExactlyOne(), BFA_BAD_SIGNATURE, pModule);
}
if (!HaveValueTypeParametersBeenWalked())
{
SetValueTypeParametersWalked();
}
}
PrecodeType MethodDesc::GetPrecodeType()
{
LIMITED_METHOD_CONTRACT;
PrecodeType precodeType = PRECODE_INVALID;
#ifdef HAS_FIXUP_PRECODE
if (!RequiresMethodDescCallingConvention())
{
// Use the more efficient fixup precode if possible
precodeType = PRECODE_FIXUP;
}
else
#endif // HAS_FIXUP_PRECODE
{
precodeType = PRECODE_STUB;
}
return precodeType;
}
#endif // !DACCESS_COMPILE
#ifdef FEATURE_COMINTEROP
#ifndef DACCESS_COMPILE
void ComPlusCallMethodDesc::InitRetThunk()
{
WRAPPER_NO_CONTRACT;
#ifdef TARGET_X86
if (m_pComPlusCallInfo->m_pRetThunk != NULL)
return;
// Record the fact that we are writting into the ComPlusCallMethodDesc
g_IBCLogger.LogMethodDescAccess(this);
UINT numStackBytes = CbStackPop();
LPVOID pRetThunk = ComPlusCall::GetRetThunk(numStackBytes);
FastInterlockCompareExchangePointer<void *>(&m_pComPlusCallInfo->m_pRetThunk, pRetThunk, NULL);
#endif // TARGET_X86
}
#endif //!DACCESS_COMPILE
#endif // FEATURE_COMINTEROP
#ifndef DACCESS_COMPILE
void MethodDesc::PrepareForUseAsADependencyOfANativeImageWorker()
{
STANDARD_VM_CONTRACT;
// This function ensures that a method is ready for use as a dependency of a native image
// The current requirement is only that valuetypes can be resolved to their type defs as much
// as is possible. (If the method is actually called, then this will not throw, but there
// are cases where we call this method and we are unaware if this method will actually be called
// or accessed as a native image dependency. This explains the contract (STANDARD_VM_CONTRACT)
// - This method should be callable only when general purpose VM code can be called
// , as well as the TRY/CATCH.
// - This function should not introduce failures
EX_TRY
{
WalkValueTypeParameters(this->GetMethodTable(), NULL, NULL);
}
EX_CATCH
{
}
EX_END_CATCH(RethrowTerminalExceptions);
_ASSERTE(HaveValueTypeParametersBeenWalked());
}
static void CheckForEquivalenceAndLoadType(Module *pModule, mdToken token, Module *pDefModule, mdToken defToken, const SigParser *ptr, SigTypeContext *pTypeContext, void *pData)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
}
CONTRACTL_END;
BOOL *pHasEquivalentParam = (BOOL *)pData;
#ifdef FEATURE_TYPEEQUIVALENCE
*pHasEquivalentParam = IsTypeDefEquivalent(defToken, pDefModule);
#else
_ASSERTE(*pHasEquivalentParam == FALSE); // Assert this is always false.
#endif // FEATURE_TYPEEQUIVALENCE
SigPointer sigPtr(*ptr);
TypeHandle th = sigPtr.GetTypeHandleThrowing(pModule, pTypeContext);
_ASSERTE(!th.IsNull());
}
void MethodDesc::PrepareForUseAsAFunctionPointer()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END;
// Since function pointers are unsafe and can enable type punning, all
// value type parameters must be loaded prior to providing a function pointer.
if (HaveValueTypeParametersBeenLoaded())
return;
BOOL fHasTypeEquivalentStructParameters = FALSE;
WalkValueTypeParameters(this->GetMethodTable(), CheckForEquivalenceAndLoadType, &fHasTypeEquivalentStructParameters);
#ifdef FEATURE_TYPEEQUIVALENCE
if (!fHasTypeEquivalentStructParameters)
SetDoesNotHaveEquivalentValuetypeParameters();
#endif // FEATURE_TYPEEQUIVALENCE
SetValueTypeParametersLoaded();
}
#endif //!DACCESS_COMPILE
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// ===========================================================================
// File: Method.CPP
//
//
// See the book of the runtime entry for overall design:
// file:../../doc/BookOfTheRuntime/ClassLoader/MethodDescDesign.doc
//
#include "common.h"
#include "excep.h"
#include "dbginterface.h"
#include "ecall.h"
#include "eeconfig.h"
#include "mlinfo.h"
#include "dllimport.h"
#include "generics.h"
#include "genericdict.h"
#include "typedesc.h"
#include "typestring.h"
#include "virtualcallstub.h"
#include "jitinterface.h"
#include "runtimehandles.h"
#include "eventtrace.h"
#include "interoputil.h"
#include "prettyprintsig.h"
#include "formattype.h"
#include "fieldmarshaler.h"
#include "versionresilienthashcode.h"
#include "typehashingalgorithms.h"
#ifdef FEATURE_COMINTEROP
#include "comcallablewrapper.h"
#include "clrtocomcall.h"
#endif
#ifdef FEATURE_MINIMETADATA_IN_TRIAGEDUMPS
GVAL_IMPL(DWORD, g_MiniMetaDataBuffMaxSize);
GVAL_IMPL(TADDR, g_MiniMetaDataBuffAddress);
#endif // FEATURE_MINIMETADATA_IN_TRIAGEDUMPS
// forward decl
bool FixupSignatureContainingInternalTypes(
DataImage * image,
PCCOR_SIGNATURE pSig,
DWORD cSig,
bool checkOnly = false);
// Alias ComPlusCallMethodDesc to regular MethodDesc to simplify definition of the size table
#ifndef FEATURE_COMINTEROP
#define ComPlusCallMethodDesc MethodDesc
#endif
// Verify that the structure sizes of our MethodDescs support proper
// aligning for atomic stub replacement.
//
static_assert_no_msg((sizeof(MethodDescChunk) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(MethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(FCallMethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(NDirectMethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(EEImplMethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(ArrayMethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(ComPlusCallMethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
static_assert_no_msg((sizeof(DynamicMethodDesc) & MethodDesc::ALIGNMENT_MASK) == 0);
#define METHOD_DESC_SIZES(adjustment) \
adjustment + sizeof(MethodDesc), /* mcIL */ \
adjustment + sizeof(FCallMethodDesc), /* mcFCall */ \
adjustment + sizeof(NDirectMethodDesc), /* mcNDirect */ \
adjustment + sizeof(EEImplMethodDesc), /* mcEEImpl */ \
adjustment + sizeof(ArrayMethodDesc), /* mcArray */ \
adjustment + sizeof(InstantiatedMethodDesc), /* mcInstantiated */ \
adjustment + sizeof(ComPlusCallMethodDesc), /* mcComInterOp */ \
adjustment + sizeof(DynamicMethodDesc) /* mcDynamic */
const BYTE MethodDesc::s_ClassificationSizeTable[] = {
// This is the raw
METHOD_DESC_SIZES(0),
// This extended part of the table is used for faster MethodDesc size lookup.
// We index using optional slot flags into it
METHOD_DESC_SIZES(sizeof(NonVtableSlot)),
METHOD_DESC_SIZES(sizeof(MethodImpl)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(MethodImpl)),
METHOD_DESC_SIZES(sizeof(NativeCodeSlot)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(NativeCodeSlot)),
METHOD_DESC_SIZES(sizeof(MethodImpl) + sizeof(NativeCodeSlot)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(MethodImpl) + sizeof(NativeCodeSlot)),
#ifdef FEATURE_COMINTEROP
METHOD_DESC_SIZES(sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(MethodImpl) + sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(MethodImpl) + sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(NativeCodeSlot) + sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(NativeCodeSlot) + sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(MethodImpl) + sizeof(NativeCodeSlot) + sizeof(ComPlusCallInfo)),
METHOD_DESC_SIZES(sizeof(NonVtableSlot) + sizeof(MethodImpl) + sizeof(NativeCodeSlot) + sizeof(ComPlusCallInfo))
#endif
};
#ifndef FEATURE_COMINTEROP
#undef ComPlusCallMethodDesc
#endif
class ArgIteratorBaseForPInvoke : public ArgIteratorBase
{
protected:
FORCEINLINE BOOL IsRegPassedStruct(MethodTable* pMT)
{
return pMT->GetNativeLayoutInfo()->IsNativeStructPassedInRegisters();
}
};
class PInvokeArgIterator : public ArgIteratorTemplate<ArgIteratorBaseForPInvoke>
{
public:
PInvokeArgIterator(MetaSig* pSig)
{
m_pSig = pSig;
}
};
//*******************************************************************************
SIZE_T MethodDesc::SizeOf()
{
LIMITED_METHOD_DAC_CONTRACT;
SIZE_T size = s_ClassificationSizeTable[m_wFlags &
(mdcClassification
| mdcHasNonVtableSlot
| mdcMethodImpl
#ifdef FEATURE_COMINTEROP
| mdcHasComPlusCallInfo
#endif
| mdcHasNativeCodeSlot)];
return size;
}
/*********************************************************************/
#ifndef DACCESS_COMPILE
BOOL NDirectMethodDesc::HasDefaultDllImportSearchPathsAttribute()
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
if(IsDefaultDllImportSearchPathsAttributeCached())
{
return (ndirect.m_wFlags & kDefaultDllImportSearchPathsStatus) != 0;
}
BOOL attributeIsFound = GetDefaultDllImportSearchPathsAttributeValue(GetModule(),GetMemberDef(),&ndirect.m_DefaultDllImportSearchPathsAttributeValue);
if(attributeIsFound )
{
InterlockedSetNDirectFlags(kDefaultDllImportSearchPathsIsCached | kDefaultDllImportSearchPathsStatus);
}
else
{
InterlockedSetNDirectFlags(kDefaultDllImportSearchPathsIsCached);
}
return (ndirect.m_wFlags & kDefaultDllImportSearchPathsStatus) != 0;
}
#endif //!DACCESS_COMPILE
//*******************************************************************************
#ifndef DACCESS_COMPILE
VOID MethodDesc::EnsureActive()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END;
GetMethodTable()->EnsureInstanceActive();
if (HasMethodInstantiation() && !IsGenericMethodDefinition())
{
Instantiation methodInst = GetMethodInstantiation();
for (DWORD i = 0; i < methodInst.GetNumArgs(); ++i)
{
MethodTable * pMT = methodInst[i].GetMethodTable();
if (pMT)
pMT->EnsureInstanceActive();
}
}
}
#endif //!DACCESS_COMPILE
//*******************************************************************************
CHECK MethodDesc::CheckActivated()
{
WRAPPER_NO_CONTRACT;
CHECK(GetModule()->CheckActivated());
CHECK_OK;
}
//*******************************************************************************
BaseDomain *MethodDesc::GetDomain()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
SUPPORTS_DAC;
}
CONTRACTL_END
return AppDomain::GetCurrentDomain();
}
#ifndef DACCESS_COMPILE
//*******************************************************************************
LoaderAllocator * MethodDesc::GetDomainSpecificLoaderAllocator()
{
if (GetLoaderModule()->IsCollectible())
{
return GetLoaderAllocator();
}
else
{
return ::GetAppDomain()->GetLoaderAllocator();
}
}
#endif //!DACCESS_COMPILE
//*******************************************************************************
LPCUTF8 MethodDesc::GetName(USHORT slot)
{
// MethodDesc::GetDeclMethodDesc can throw.
WRAPPER_NO_CONTRACT;
MethodDesc *pDeclMD = GetDeclMethodDesc((UINT32)slot);
CONSISTENCY_CHECK(IsInterface() || !pDeclMD->IsInterface());
return pDeclMD->GetName();
}
//*******************************************************************************
LPCUTF8 MethodDesc::GetName()
{
CONTRACTL
{
if (FORBIDGC_LOADER_USE_ENABLED()) NOTHROW; else THROWS; // MethodImpl::FindMethodDesc can throw.
GC_NOTRIGGER;
FORBID_FAULT;
SUPPORTS_DAC;
}CONTRACTL_END;
g_IBCLogger.LogMethodDescAccess(this);
if (IsArray())
{
// Array classes don't have metadata tokens
return dac_cast<PTR_ArrayMethodDesc>(this)->GetMethodName();
}
else if (IsNoMetadata())
{
// LCG methods don't have metadata tokens
return dac_cast<PTR_DynamicMethodDesc>(this)->GetMethodName();
}
else
{
// Get the metadata string name for this method
LPCUTF8 result = NULL;
if (FAILED(GetMDImport()->GetNameOfMethodDef(GetMemberDef(), &result)))
{
result = NULL;
}
return(result);
}
}
#ifndef DACCESS_COMPILE
/*
* Function to get a method's name, its namespace
*/
VOID MethodDesc::GetMethodInfoNoSig(SString &namespaceOrClassName, SString &methodName)
{
static LPCWSTR pDynamicClassName = W("dynamicClass");
// namespace
if(IsDynamicMethod())
namespaceOrClassName.Append(pDynamicClassName);
else
TypeString::AppendType(namespaceOrClassName, TypeHandle(GetMethodTable()));
// name
methodName.AppendUTF8(GetName());
}
/*
* Function to get a method's name, its namespace and signature (legacy format)
*/
VOID MethodDesc::GetMethodInfo(SString &namespaceOrClassName, SString &methodName, SString &methodSignature)
{
GetMethodInfoNoSig(namespaceOrClassName, methodName);
// signature
CQuickBytes qbOut;
ULONG cSig = 0;
PCCOR_SIGNATURE pSig;
GetSig(&pSig, &cSig);
PrettyPrintSigInternalLegacy(pSig, cSig, " ", &qbOut, GetMDImport());
methodSignature.AppendUTF8((char *)qbOut.Ptr());
}
/*
* Function to get a method's name, its namespace and signature (new format)
*/
VOID MethodDesc::GetMethodInfoWithNewSig(SString &namespaceOrClassName, SString &methodName, SString &methodSignature)
{
GetMethodInfoNoSig(namespaceOrClassName, methodName);
// signature
CQuickBytes qbOut;
ULONG cSig = 0;
PCCOR_SIGNATURE pSig;
GetSig(&pSig, &cSig);
PrettyPrintSig(pSig, (DWORD)cSig, "", &qbOut, GetMDImport(), NULL);
methodSignature.AppendUTF8((char *)qbOut.Ptr());
}
/*
* Function to get a method's full name, something like
* void [mscorlib]System.StubHelpers.BSTRMarshaler::ClearNative(native int)
*/
VOID MethodDesc::GetFullMethodInfo(SString& fullMethodSigName)
{
SString namespaceOrClassName, methodName;
GetMethodInfoNoSig(namespaceOrClassName, methodName);
// signature
CQuickBytes qbOut;
ULONG cSig = 0;
PCCOR_SIGNATURE pSig;
SString methodFullName;
StackScratchBuffer namespaceNameBuffer, methodNameBuffer;
methodFullName.AppendPrintf(
(LPCUTF8)"[%s] %s::%s",
GetModule()->GetAssembly()->GetSimpleName(),
namespaceOrClassName.GetUTF8(namespaceNameBuffer),
methodName.GetUTF8(methodNameBuffer));
GetSig(&pSig, &cSig);
StackScratchBuffer buffer;
PrettyPrintSig(pSig, (DWORD)cSig, methodFullName.GetUTF8(buffer), &qbOut, GetMDImport(), NULL);
fullMethodSigName.AppendUTF8((char *)qbOut.Ptr());
}
#endif
//*******************************************************************************
BOOL MethodDesc::MightHaveName(ULONG nameHashValue)
{
LIMITED_METHOD_CONTRACT;
// We only have space for a name hash when we are using the packed slot layout
if (RequiresFullSlotNumber())
{
return TRUE;
}
WORD thisHashValue = m_wSlotNumber & enum_packedSlotLayout_NameHashMask;
// A zero value might mean no hash has ever been set
// (checking this way is better than dedicating a bit to tell us)
if (thisHashValue == 0)
{
return TRUE;
}
WORD testHashValue = (WORD) nameHashValue & enum_packedSlotLayout_NameHashMask;
return (thisHashValue == testHashValue);
}
//*******************************************************************************
void MethodDesc::GetSig(PCCOR_SIGNATURE *ppSig, DWORD *pcSig)
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
SUPPORTS_DAC;
}
CONTRACTL_END
if (HasStoredSig())
{
PTR_StoredSigMethodDesc pSMD = dac_cast<PTR_StoredSigMethodDesc>(this);
if (pSMD->HasStoredMethodSig() || GetClassification()==mcDynamic)
{
*ppSig = pSMD->GetStoredMethodSig(pcSig);
PREFIX_ASSUME(*ppSig != NULL);
return;
}
}
GetSigFromMetadata(GetMDImport(), ppSig, pcSig);
PREFIX_ASSUME(*ppSig != NULL);
}
//*******************************************************************************
// get a function signature from its metadata
// Arguments:
// input:
// importer the metatdata importer to be used
// output:
// ppSig the function signature
// pcSig number of elements in the signature
void MethodDesc::GetSigFromMetadata(IMDInternalImport * importer,
PCCOR_SIGNATURE * ppSig,
DWORD * pcSig)
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
SUPPORTS_DAC;
}
CONTRACTL_END
if (FAILED(importer->GetSigOfMethodDef(GetMemberDef(), pcSig, ppSig)))
{ // Class loader already asked for signature, so this should always succeed (unless there's a
// bug or a new code path)
_ASSERTE(!"If this ever fires, then this method should return HRESULT");
*ppSig = NULL;
*pcSig = 0;
}
}
//*******************************************************************************
PCCOR_SIGNATURE MethodDesc::GetSig()
{
WRAPPER_NO_CONTRACT;
PCCOR_SIGNATURE pSig;
DWORD cSig;
GetSig(&pSig, &cSig);
PREFIX_ASSUME(pSig != NULL);
return pSig;
}
Signature MethodDesc::GetSignature()
{
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
PCCOR_SIGNATURE pSig;
DWORD cSig;
GetSig(&pSig, &cSig);
PREFIX_ASSUME(pSig != NULL);
return Signature(pSig, cSig);
}
PCODE MethodDesc::GetMethodEntryPoint()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
SUPPORTS_DAC;
}
CONTRACTL_END;
// Similarly to SetMethodEntryPoint(), it is up to the caller to ensure that calls to this function are appropriately
// synchronized
// Keep implementations of MethodDesc::GetMethodEntryPoint and MethodDesc::GetAddrOfSlot in sync!
g_IBCLogger.LogMethodDescAccess(this);
if (HasNonVtableSlot())
{
SIZE_T size = GetBaseSize();
TADDR pSlot = dac_cast<TADDR>(this) + size;
return *PTR_PCODE(pSlot);
}
_ASSERTE(GetMethodTable()->IsCanonicalMethodTable());
return GetMethodTable_NoLogging()->GetSlot(GetSlot());
}
PTR_PCODE MethodDesc::GetAddrOfSlot()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
SUPPORTS_DAC;
}
CONTRACTL_END;
// Keep implementations of MethodDesc::GetMethodEntryPoint and MethodDesc::GetAddrOfSlot in sync!
if (HasNonVtableSlot())
{
SIZE_T size = GetBaseSize();
return PTR_PCODE(dac_cast<TADDR>(this) + size);
}
_ASSERTE(GetMethodTable()->IsCanonicalMethodTable());
return GetMethodTable()->GetSlotPtr(GetSlot());
}
//*******************************************************************************
PTR_MethodDesc MethodDesc::GetDeclMethodDesc(UINT32 slotNumber)
{
CONTRACTL {
WRAPPER(THROWS);
WRAPPER(GC_TRIGGERS);
INSTANCE_CHECK;
} CONTRACTL_END;
MethodDesc *pMDResult = this;
// If the MethodDesc is not itself a methodImpl, but it is not in its native
// slot, then someone (perhaps itself) must have overridden a methodImpl
// in a parent, which causes the method to get put into all of the methodImpl
// slots. So, the MethodDesc is implicitly a methodImpl without containing
// the data. To find the real methodImpl MethodDesc, climb the inheritance
// hierarchy checking the native slot on the way.
if ((UINT32)pMDResult->GetSlot() != slotNumber)
{
while (!pMDResult->IsMethodImpl())
{
CONSISTENCY_CHECK(CheckPointer(pMDResult->GetMethodTable()->GetParentMethodTable()));
CONSISTENCY_CHECK(slotNumber < pMDResult->GetMethodTable()->GetParentMethodTable()->GetNumVirtuals());
pMDResult = pMDResult->GetMethodTable()->GetParentMethodTable()->GetMethodDescForSlot(slotNumber);
}
{
CONSISTENCY_CHECK(pMDResult->IsMethodImpl());
MethodImpl *pImpl = pMDResult->GetMethodImpl();
pMDResult = pImpl->FindMethodDesc(slotNumber, PTR_MethodDesc(pMDResult));
}
// It is possible that a methodImpl'd slot got copied into another slot because
// of slot unification, for example:
// C1::A is methodImpled with C2::B
// C1::B is methodImpled with C2::C
// this means that through slot unification that A is tied to B and B is tied to C,
// so A is tied to C even though C does not have a methodImpl entry specifically
// relating to that slot. In this case, we recurse to the parent type and ask the
// same question again.
if (pMDResult->GetSlot() != slotNumber)
{
MethodTable * pMTOfMD = pMDResult->GetMethodTable();
CONSISTENCY_CHECK(slotNumber < pMTOfMD->GetParentMethodTable()->GetNumVirtuals());
pMDResult = pMTOfMD->GetParentMethodTable()->GetMethodDescForSlot(slotNumber);
pMDResult = pMDResult->GetDeclMethodDesc(slotNumber);
}
}
CONSISTENCY_CHECK(CheckPointer(pMDResult));
CONSISTENCY_CHECK((UINT32)pMDResult->GetSlot() == slotNumber);
return PTR_MethodDesc(pMDResult);
}
//*******************************************************************************
// Returns a hash for the method.
// The hash will be the same for the method across multiple process runs.
#ifndef DACCESS_COMPILE
COUNT_T MethodDesc::GetStableHash()
{
WRAPPER_NO_CONTRACT;
const char * className = NULL;
if (IsLCGMethod())
{
className = "DynamicClass";
}
else if (IsILStub())
{
className = ILStubResolver::GetStubClassName(this);
}
if (className == NULL)
{
return GetVersionResilientMethodHashCode(this);
}
else
{
int typeHash = ComputeNameHashCode("", className);
return typeHash ^ ComputeNameHashCode(GetName());
}
}
#endif // DACCESS_COMPILE
//*******************************************************************************
// Get the number of type parameters to a generic method
DWORD MethodDesc::GetNumGenericMethodArgs()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
CANNOT_TAKE_LOCK;
SUPPORTS_DAC;
}
CONTRACTL_END
g_IBCLogger.LogMethodDescAccess(this);
if (GetClassification() == mcInstantiated)
{
InstantiatedMethodDesc *pIMD = AsInstantiatedMethodDesc();
return pIMD->m_wNumGenericArgs;
}
else return 0;
}
//*******************************************************************************
MethodTable * MethodDesc::GetExactDeclaringType(MethodTable * ownerOrSubType)
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
MethodTable * pMT = GetMethodTable();
// Fast path for typical case.
if (ownerOrSubType == pMT)
return pMT;
// If we come here for array method, the typedef tokens inside GetMethodTableMatchingParentClass
// will match, but the types are actually from unrelated arrays, so the result would be incorrect.
_ASSERTE(!IsArray());
return ownerOrSubType->GetMethodTableMatchingParentClass(pMT);
}
//*******************************************************************************
Instantiation MethodDesc::GetExactClassInstantiation(TypeHandle possibleObjType)
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
SUPPORTS_DAC;
}
CONTRACTL_END
return (possibleObjType.IsNull()
? GetClassInstantiation()
: possibleObjType.GetInstantiationOfParentClass(GetMethodTable()));
}
//*******************************************************************************
BOOL MethodDesc::HasSameMethodDefAs(MethodDesc * pMD)
{
LIMITED_METHOD_CONTRACT;
if (this == pMD)
return TRUE;
return (GetMemberDef() == pMD->GetMemberDef()) && (GetModule() == pMD->GetModule());
}
//*******************************************************************************
BOOL MethodDesc::IsTypicalSharedInstantiation()
{
WRAPPER_NO_CONTRACT;
Instantiation classInst = GetMethodTable()->GetInstantiation();
if (!ClassLoader::IsTypicalSharedInstantiation(classInst))
return FALSE;
if (IsGenericMethodDefinition())
return FALSE;
Instantiation methodInst = GetMethodInstantiation();
if (!ClassLoader::IsTypicalSharedInstantiation(methodInst))
return FALSE;
return TRUE;
}
//*******************************************************************************
Instantiation MethodDesc::LoadMethodInstantiation()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM(););
}
CONTRACTL_END
if (IsGenericMethodDefinition() && !IsTypicalMethodDefinition())
{
return LoadTypicalMethodDefinition()->GetMethodInstantiation();
}
else
return GetMethodInstantiation();
}
//*******************************************************************************
Module *MethodDesc::GetDefiningModuleForOpenMethod()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
}
CONTRACTL_END
Module *pModule = GetMethodTable()->GetDefiningModuleForOpenType();
if (pModule != NULL)
return pModule;
if (IsGenericMethodDefinition())
return GetModule();
Instantiation inst = GetMethodInstantiation();
for (DWORD i = 0; i < inst.GetNumArgs(); i++)
{
// Encoded types are never open
if (!inst[i].IsEncodedFixup())
{
pModule = inst[i].GetDefiningModuleForOpenType();
if (pModule != NULL)
return pModule;
}
}
return NULL;
}
//*******************************************************************************
BOOL MethodDesc::ContainsGenericVariables()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
}
CONTRACTL_END
// If this is a method of a generic type, does the type have
// non-instantiated type arguments
if (TypeHandle(GetMethodTable()).ContainsGenericVariables())
return TRUE;
if (IsGenericMethodDefinition())
return TRUE;
// If this is an instantiated generic method, are there are any generic type variables
if (GetNumGenericMethodArgs() != 0)
{
Instantiation methodInst = GetMethodInstantiation();
for (DWORD i = 0; i < methodInst.GetNumArgs(); i++)
{
if (methodInst[i].ContainsGenericVariables())
return TRUE;
}
}
return FALSE;
}
//*******************************************************************************
BOOL MethodDesc::IsTightlyBoundToMethodTable()
{
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
// Anything with the real vtable slot is tightly bound
if (!HasNonVtableSlot())
return TRUE;
// All instantiations of generic methods are stored in the InstMethHashTable.
if (HasMethodInstantiation())
{
if (IsGenericMethodDefinition())
return TRUE;
else
return FALSE;
}
// Wrapper stubs are stored in the InstMethHashTable, e.g. for static methods in generic classes
if (IsWrapperStub())
return FALSE;
return TRUE;
}
#ifndef DACCESS_COMPILE
//*******************************************************************************
// Update flags in a thread safe manner.
WORD MethodDesc::InterlockedUpdateFlags(WORD wMask, BOOL fSet)
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
WORD wOldState = m_wFlags;
DWORD dwMask = wMask;
// We need to make this operation atomic (multiple threads can play with the flags field at the same time). But the flags field
// is a word and we only have interlock operations over dwords. So we round down the flags field address to the nearest aligned
// dword (along with the intended bitfield mask). Note that we make the assumption that the flags word is aligned itself, so we
// only have two possibilites: the field already lies on a dword boundary or it's precisely one word out.
DWORD* pdwFlags = (DWORD*)((ULONG_PTR)&m_wFlags - (offsetof(MethodDesc, m_wFlags) & 0x3));
#ifdef _PREFAST_
#pragma warning(push)
#pragma warning(disable:6326) // "Suppress PREFast warning about comparing two constants"
#endif // _PREFAST_
#if BIGENDIAN
if ((offsetof(MethodDesc, m_wFlags) & 0x3) == 0) {
#else // !BIGENDIAN
if ((offsetof(MethodDesc, m_wFlags) & 0x3) != 0) {
#endif // !BIGENDIAN
static_assert_no_msg(sizeof(m_wFlags) == 2);
dwMask <<= 16;
}
#ifdef _PREFAST_
#pragma warning(pop)
#endif
g_IBCLogger.LogMethodDescWriteAccess(this);
if (fSet)
FastInterlockOr(pdwFlags, dwMask);
else
FastInterlockAnd(pdwFlags, ~dwMask);
return wOldState;
}
WORD MethodDesc::InterlockedUpdateFlags3(WORD wMask, BOOL fSet)
{
LIMITED_METHOD_CONTRACT;
WORD wOldState = m_wFlags3AndTokenRemainder;
DWORD dwMask = wMask;
// We need to make this operation atomic (multiple threads can play with the flags field at the same time). But the flags field
// is a word and we only have interlock operations over dwords. So we round down the flags field address to the nearest aligned
// dword (along with the intended bitfield mask). Note that we make the assumption that the flags word is aligned itself, so we
// only have two possibilites: the field already lies on a dword boundary or it's precisely one word out.
DWORD* pdwFlags = (DWORD*)((ULONG_PTR)&m_wFlags3AndTokenRemainder - (offsetof(MethodDesc, m_wFlags3AndTokenRemainder) & 0x3));
#ifdef _PREFAST_
#pragma warning(push)
#pragma warning(disable:6326) // "Suppress PREFast warning about comparing two constants"
#endif // _PREFAST_
#if BIGENDIAN
if ((offsetof(MethodDesc, m_wFlags3AndTokenRemainder) & 0x3) == 0) {
#else // !BIGENDIAN
if ((offsetof(MethodDesc, m_wFlags3AndTokenRemainder) & 0x3) != 0) {
#endif // !BIGENDIAN
static_assert_no_msg(sizeof(m_wFlags3AndTokenRemainder) == 2);
dwMask <<= 16;
}
#ifdef _PREFAST_
#pragma warning(pop)
#endif
g_IBCLogger.LogMethodDescWriteAccess(this);
if (fSet)
FastInterlockOr(pdwFlags, dwMask);
else
FastInterlockAnd(pdwFlags, ~dwMask);
return wOldState;
}
#endif // !DACCESS_COMPILE
//*******************************************************************************
// Returns the address of the native code.
//
// Methods which have no native code are either implemented by stubs or not jitted yet.
// For example, NDirectMethodDesc's have no native code. They are treated as
// implemented by stubs. On WIN64, these stubs are IL stubs, which DO have native code.
//
// This function returns null if the method has no native code.
PCODE MethodDesc::GetNativeCode()
{
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
_ASSERTE(!IsDefaultInterfaceMethod() || HasNativeCodeSlot());
g_IBCLogger.LogMethodDescAccess(this);
if (HasNativeCodeSlot())
{
// When profiler is enabled, profiler may ask to rejit a code even though we
// we have ngen code for this MethodDesc. (See MethodDesc::DoPrestub).
// This means that *GetAddrOfNativeCodeSlot()
// is not stable. It can turn from non-zero to zero.
PCODE pCode = *GetAddrOfNativeCodeSlot();
#ifdef TARGET_ARM
if (pCode != NULL)
pCode |= THUMB_CODE;
#endif
return pCode;
}
if (!HasStableEntryPoint() || HasPrecode())
return NULL;
return GetStableEntryPoint();
}
//*******************************************************************************
PTR_PCODE MethodDesc::GetAddrOfNativeCodeSlot()
{
WRAPPER_NO_CONTRACT;
_ASSERTE(HasNativeCodeSlot());
SIZE_T size = s_ClassificationSizeTable[m_wFlags & (mdcClassification | mdcHasNonVtableSlot | mdcMethodImpl)];
return (PTR_PCODE)(dac_cast<TADDR>(this) + size);
}
//*******************************************************************************
BOOL MethodDesc::IsVoid()
{
WRAPPER_NO_CONTRACT;
MetaSig sig(this);
return sig.IsReturnTypeVoid();
}
//*******************************************************************************
BOOL MethodDesc::HasRetBuffArg()
{
WRAPPER_NO_CONTRACT;
MetaSig sig(this);
ArgIterator argit(&sig);
return argit.HasRetBuffArg();
}
//*******************************************************************************
// This returns the offset of the IL.
// The offset is relative to the base of the IL image.
ULONG MethodDesc::GetRVA()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
SUPPORTS_DAC;
}
CONTRACTL_END
if (IsRuntimeSupplied())
{
return 0;
}
// Methods without metadata don't have an RVA. Examples are IL stubs and LCG methods.
if (IsNoMetadata())
{
return 0;
}
if (GetMemberDef() & 0x00FFFFFF)
{
Module *pModule = GetModule();
PREFIX_ASSUME(pModule != NULL);
DWORD dwDescrOffset;
DWORD dwImplFlags;
if (FAILED(pModule->GetMDImport()->GetMethodImplProps(GetMemberDef(), &dwDescrOffset, &dwImplFlags)))
{ // Class loader already asked for MethodImpls, so this should always succeed (unless there's a
// bug or a new code path)
_ASSERTE(!"If this ever fires, then this method should return HRESULT");
return 0;
}
BAD_FORMAT_NOTHROW_ASSERT(IsNDirect() || IsMiIL(dwImplFlags) || IsMiOPTIL(dwImplFlags) || dwDescrOffset == 0);
return dwDescrOffset;
}
return 0;
}
//*******************************************************************************
BOOL MethodDesc::IsVarArg()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
SUPPORTS_DAC;
Signature signature = GetSignature();
_ASSERTE(!signature.IsEmpty());
return MetaSig::IsVarArg(signature);
}
//*******************************************************************************
COR_ILMETHOD* MethodDesc::GetILHeader(BOOL fAllowOverrides /*=FALSE*/)
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
PRECONDITION(IsIL());
PRECONDITION(!IsUnboxingStub());
}
CONTRACTL_END
Module *pModule = GetModule();
// Always pickup 'permanent' overrides like reflection emit, EnC, etc.
// but only grab temporary overrides (like profiler rewrites) if asked to
TADDR pIL = pModule->GetDynamicIL(GetMemberDef(), fAllowOverrides);
if (pIL == NULL)
{
pIL = pModule->GetIL(GetRVA());
}
#ifdef _DEBUG_IMPL
if (pIL != NULL)
{
//
// This is convenient place to verify that COR_ILMETHOD_DECODER::GetOnDiskSize is in sync
// with our private DACized copy in PEDecoder::ComputeILMethodSize
//
COR_ILMETHOD_DECODER header((COR_ILMETHOD *)pIL);
SIZE_T size1 = header.GetOnDiskSize((COR_ILMETHOD *)pIL);
SIZE_T size2 = PEDecoder::ComputeILMethodSize(pIL);
_ASSERTE(size1 == size2);
}
#endif
#ifdef DACCESS_COMPILE
return (pIL != NULL) ? DacGetIlMethod(pIL) : NULL;
#else // !DACCESS_COMPILE
return PTR_COR_ILMETHOD(pIL);
#endif // !DACCESS_COMPILE
}
//*******************************************************************************
ReturnKind MethodDesc::ParseReturnKindFromSig(INDEBUG(bool supportStringConstructors))
{
CONTRACTL
{
if (FORBIDGC_LOADER_USE_ENABLED()) NOTHROW; else THROWS;
GC_NOTRIGGER;
FORBID_FAULT;
}
CONTRACTL_END
ENABLE_FORBID_GC_LOADER_USE_IN_THIS_SCOPE();
TypeHandle thValueType;
MetaSig sig(this);
CorElementType et = sig.GetReturnTypeNormalized(&thValueType);
switch (et)
{
case ELEMENT_TYPE_STRING:
case ELEMENT_TYPE_CLASS:
case ELEMENT_TYPE_SZARRAY:
case ELEMENT_TYPE_ARRAY:
case ELEMENT_TYPE_OBJECT:
case ELEMENT_TYPE_VAR:
return RT_Object;
#ifdef ENREGISTERED_RETURNTYPE_INTEGER_MAXSIZE
case ELEMENT_TYPE_VALUETYPE:
// We return value types in registers if they fit in ENREGISTERED_RETURNTYPE_MAXSIZE
// These valuetypes could contain gc refs.
{
ArgIterator argit(&sig);
if (!argit.HasRetBuffArg())
{
// the type must already be loaded
_ASSERTE(!thValueType.IsNull());
if (!thValueType.IsTypeDesc())
{
MethodTable * pReturnTypeMT = thValueType.AsMethodTable();
#ifdef UNIX_AMD64_ABI
if (pReturnTypeMT->IsRegPassedStruct())
{
// The Multi-reg return case using the classhandle is only implemented for AMD64 SystemV ABI.
// On other platforms, multi-reg return is not supported with GcInfo v1.
// So, the relevant information must be obtained from the GcInfo tables (which requires version2).
EEClass* eeClass = pReturnTypeMT->GetClass();
ReturnKind regKinds[2] = { RT_Unset, RT_Unset };
int orefCount = 0;
for (int i = 0; i < 2; i++)
{
if (eeClass->GetEightByteClassification(i) == SystemVClassificationTypeIntegerReference)
{
regKinds[i] = RT_Object;
}
else if (eeClass->GetEightByteClassification(i) == SystemVClassificationTypeIntegerByRef)
{
regKinds[i] = RT_ByRef;
}
else
{
regKinds[i] = RT_Scalar;
}
}
ReturnKind structReturnKind = GetStructReturnKind(regKinds[0], regKinds[1]);
return structReturnKind;
}
#endif // UNIX_AMD64_ABI
if (pReturnTypeMT->ContainsPointers() || pReturnTypeMT->IsByRefLike())
{
if (pReturnTypeMT->GetNumInstanceFields() == 1)
{
_ASSERTE(pReturnTypeMT->GetNumInstanceFieldBytes() == sizeof(void*));
// Note: we can't distinguish RT_Object from RT_ByRef, the caller has to tolerate that.
return RT_Object;
}
else
{
// Multi reg return case with pointers, can't restore the actual kind.
return RT_Illegal;
}
}
}
}
}
break;
#endif // ENREGISTERED_RETURNTYPE_INTEGER_MAXSIZE
#ifdef _DEBUG
case ELEMENT_TYPE_VOID:
// String constructors return objects. We should not have any ecall string
// constructors, except when called from gc coverage codes (which is only
// done under debug). We will therefore optimize the retail version of this
// method to not support string constructors.
if (IsCtor() && GetMethodTable()->HasComponentSize())
{
_ASSERTE(supportStringConstructors);
return RT_Object;
}
break;
#endif // _DEBUG
case ELEMENT_TYPE_BYREF:
return RT_ByRef;
default:
break;
}
return RT_Scalar;
}
ReturnKind MethodDesc::GetReturnKind(INDEBUG(bool supportStringConstructors))
{
// For simplicity, we don't hijack in funclets, but if you ever change that,
// be sure to choose the OnHijack... callback type to match that of the FUNCLET
// not the main method (it would probably be Scalar).
ENABLE_FORBID_GC_LOADER_USE_IN_THIS_SCOPE();
// Mark that we are performing a stackwalker like operation on the current thread.
// This is necessary to allow the signature parsing functions to work without triggering any loads
StackWalkerWalkingThreadHolder threadStackWalking(GetThread());
#ifdef TARGET_X86
MetaSig msig(this);
if (msig.HasFPReturn())
{
// Figuring out whether the function returns FP or not is hard to do
// on-the-fly, so we use a different callback helper on x86 where this
// piece of information is needed in order to perform the right save &
// restore of the return value around the call to OnHijackScalarWorker.
return RT_Float;
}
#endif // TARGET_X86
return ParseReturnKindFromSig(INDEBUG(supportStringConstructors));
}
#ifdef FEATURE_COMINTEROP
#ifndef DACCESS_COMPILE
//*******************************************************************************
LONG MethodDesc::GetComDispid()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END
ULONG dispid = -1;
HRESULT hr = GetMDImport()->GetDispIdOfMemberDef(
GetMemberDef(), // The member for which to get props.
&dispid // return dispid.
);
if (FAILED(hr))
return -1;
return (LONG)dispid;
}
//*******************************************************************************
WORD MethodDesc::GetComSlot()
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
FORBID_FAULT;
}
CONTRACTL_END
MethodTable * pMT = GetMethodTable();
_ASSERTE(pMT->IsInterface());
// COM slots are biased from MethodTable slots depending on interface type
WORD numExtraSlots = ComMethodTable::GetNumExtraSlots(pMT->GetComInterfaceType());
// Normal interfaces are layed out the same way as in the MethodTable, while
// sparse interfaces need to go through an extra layer of mapping.
WORD slot;
if (pMT->IsSparseForCOMInterop())
slot = numExtraSlots + pMT->GetClass()->GetSparseCOMInteropVTableMap()->LookupVTSlot(GetSlot());
else
slot = numExtraSlots + GetSlot();
return slot;
}
#endif // !DACCESS_COMPILE
#endif // FEATURE_COMINTEROP
//*******************************************************************************
DWORD MethodDesc::GetAttrs() const
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END
if (IsArray())
return dac_cast<PTR_ArrayMethodDesc>(this)->GetAttrs();
else if (IsNoMetadata())
return dac_cast<PTR_DynamicMethodDesc>(this)->GetAttrs();
DWORD dwAttributes;
if (FAILED(GetMDImport()->GetMethodDefProps(GetMemberDef(), &dwAttributes)))
{ // Class loader already asked for attributes, so this should always succeed (unless there's a
// bug or a new code path)
_ASSERTE(!"If this ever fires, then this method should return HRESULT");
return 0;
}
return dwAttributes;
}
//*******************************************************************************
DWORD MethodDesc::GetImplAttrs()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END
DWORD props;
if (FAILED(GetMDImport()->GetMethodImplProps(GetMemberDef(), NULL, &props)))
{ // Class loader already asked for MethodImpls, so this should always succeed (unless there's a
// bug or a new code path)
_ASSERTE(!"If this ever fires, then this method should return HRESULT");
return 0;
}
return props;
}
//*******************************************************************************
Module* MethodDesc::GetLoaderModule()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
if (HasMethodInstantiation() && !IsGenericMethodDefinition())
{
Module *retVal = ClassLoader::ComputeLoaderModule(GetMethodTable(),
GetMemberDef(),
GetMethodInstantiation());
return retVal;
}
else
{
return GetMethodTable()->GetLoaderModule();
}
}
//*******************************************************************************
Module *MethodDesc::GetModule() const
{
STATIC_CONTRACT_NOTHROW;
STATIC_CONTRACT_GC_NOTRIGGER;
STATIC_CONTRACT_FORBID_FAULT;
SUPPORTS_DAC;
g_IBCLogger.LogMethodDescAccess(this);
Module *pModule = GetModule_NoLogging();
return pModule;
}
//*******************************************************************************
Module *MethodDesc::GetModule_NoLogging() const
{
STATIC_CONTRACT_NOTHROW;
STATIC_CONTRACT_GC_NOTRIGGER;
STATIC_CONTRACT_FORBID_FAULT;
SUPPORTS_DAC;
MethodTable* pMT = GetMethodDescChunk()->GetMethodTable();
return pMT->GetModule();
}
//*******************************************************************************
// Is this an instantiating stub for generics? This does not include those
// BoxedEntryPointStubs which call an instantiating stub.
BOOL MethodDesc::IsInstantiatingStub()
{
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
return
(GetClassification() == mcInstantiated)
&& !IsUnboxingStub()
&& AsInstantiatedMethodDesc()->IMD_IsWrapperStubWithInstantiations();
}
//*******************************************************************************
BOOL MethodDesc::IsWrapperStub()
{
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
return (IsUnboxingStub() || IsInstantiatingStub());
}
#ifndef DACCESS_COMPILE
//*******************************************************************************
MethodDesc *MethodDesc::GetWrappedMethodDesc()
{
WRAPPER_NO_CONTRACT;
_ASSERTE(IsWrapperStub());
if (IsUnboxingStub())
{
return this->GetMethodTable()->GetUnboxedEntryPointMD(this);
}
if (IsInstantiatingStub())
{
MethodDesc *pRet = AsInstantiatedMethodDesc()->IMD_GetWrappedMethodDesc();
#ifdef _DEBUG
MethodDesc *pAltMD =
MethodDesc::FindOrCreateAssociatedMethodDesc(this,
this->GetMethodTable(),
FALSE, /* no unboxing entrypoint */
this->GetMethodInstantiation(),
TRUE /* get shared code */ );
_ASSERTE(pAltMD == pRet);
#endif // _DEBUG
return pRet;
}
return NULL;
}
MethodDesc *MethodDesc::GetExistingWrappedMethodDesc()
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
_ASSERTE(IsWrapperStub());
if (IsUnboxingStub())
{
return this->GetMethodTable()->GetExistingUnboxedEntryPointMD(this);
}
if (IsInstantiatingStub())
{
MethodDesc *pRet = AsInstantiatedMethodDesc()->IMD_GetWrappedMethodDesc();
return pRet;
}
return NULL;
}
#endif // !DACCESS_COMPILE
//*******************************************************************************
BOOL MethodDesc::IsSharedByGenericInstantiations()
{
LIMITED_METHOD_DAC_CONTRACT;
if (IsWrapperStub())
return FALSE;
else if (GetMethodTable()->IsSharedByGenericInstantiations())
return TRUE;
else return IsSharedByGenericMethodInstantiations();
}
//*******************************************************************************
BOOL MethodDesc::IsSharedByGenericMethodInstantiations()
{
LIMITED_METHOD_DAC_CONTRACT;
if (GetClassification() == mcInstantiated)
return AsInstantiatedMethodDesc()->IMD_IsSharedByGenericMethodInstantiations();
else return FALSE;
}
//*******************************************************************************
// Does this method require an extra MethodTable argument for instantiation information?
// This is the case for
// * per-inst static methods in shared-code instantiated generic classes (e.g. static void MyClass<string>::m())
// - there is no this pointer providing generic dictionary info
// * shared-code instance methods in instantiated generic structs (e.g. void MyValueType<string>::m())
// - unboxed 'this' pointer in value-type instance methods don't have MethodTable pointer by definition
// * shared instance and default interface methods called via interface dispatch (e. g. IFoo<string>.Foo calling into IFoo<object>::Foo())
// - this pointer is ambiguous as it can implement more than one IFoo<T>
BOOL MethodDesc::RequiresInstMethodTableArg()
{
LIMITED_METHOD_DAC_CONTRACT;
return
IsSharedByGenericInstantiations() &&
!HasMethodInstantiation() &&
(IsStatic() || GetMethodTable()->IsValueType() || (GetMethodTable()->IsInterface() && !IsAbstract()));
}
//*******************************************************************************
// Does this method require an extra InstantiatedMethodDesc argument for instantiation information?
// This is the case for
// * shared-code instantiated generic methods
BOOL MethodDesc::RequiresInstMethodDescArg()
{
LIMITED_METHOD_DAC_CONTRACT;
return IsSharedByGenericInstantiations() &&
HasMethodInstantiation();
}
//*******************************************************************************
// Does this method require any kind of extra argument for instantiation information?
BOOL MethodDesc::RequiresInstArg()
{
LIMITED_METHOD_DAC_CONTRACT;
BOOL fRet = IsSharedByGenericInstantiations() &&
(HasMethodInstantiation() || IsStatic() || GetMethodTable()->IsValueType() || (GetMethodTable()->IsInterface() && !IsAbstract()));
_ASSERT(fRet == (RequiresInstMethodTableArg() || RequiresInstMethodDescArg()));
return fRet;
}
//*******************************************************************************
BOOL MethodDesc::IsRuntimeMethodHandle()
{
WRAPPER_NO_CONTRACT;
// <TODO> Refine this check further for BoxedEntryPointStubs </TODO>
return (!HasMethodInstantiation() || !IsSharedByGenericMethodInstantiations());
}
//*******************************************************************************
// Strip off method and class instantiation if present e.g.
// C1<int>.m1<string> -> C1.m1
// C1<int>.m2 -> C1.m2
// C2.m2<int> -> C2.m2
// C2.m2 -> C2.m2
MethodDesc* MethodDesc::LoadTypicalMethodDefinition()
{
CONTRACT(MethodDesc*)
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM(););
POSTCONDITION(CheckPointer(RETVAL));
POSTCONDITION(RETVAL->IsTypicalMethodDefinition());
}
CONTRACT_END
#ifndef DACCESS_COMPILE
if (HasClassOrMethodInstantiation())
{
MethodTable *pMT = GetMethodTable();
if (!pMT->IsTypicalTypeDefinition())
pMT = ClassLoader::LoadTypeDefThrowing(pMT->GetModule(),
pMT->GetCl(),
ClassLoader::ThrowIfNotFound,
ClassLoader::PermitUninstDefOrRef).GetMethodTable();
CONSISTENCY_CHECK(TypeHandle(pMT).CheckFullyLoaded());
MethodDesc *resultMD = pMT->GetParallelMethodDesc(this);
PREFIX_ASSUME(resultMD != NULL);
resultMD->CheckRestore();
RETURN (resultMD);
}
else
#endif // !DACCESS_COMPILE
RETURN(this);
}
//*******************************************************************************
BOOL MethodDesc::IsTypicalMethodDefinition() const
{
LIMITED_METHOD_CONTRACT;
if (HasMethodInstantiation() && !IsGenericMethodDefinition())
return FALSE;
if (HasClassInstantiation() && !GetMethodTable()->IsGenericTypeDefinition())
return FALSE;
return TRUE;
}
//*******************************************************************************
BOOL MethodDesc::AcquiresInstMethodTableFromThis() {
LIMITED_METHOD_CONTRACT;
SUPPORTS_DAC;
return
IsSharedByGenericInstantiations() &&
!HasMethodInstantiation() &&
!IsStatic() &&
!GetMethodTable()->IsValueType() &&
!(GetMethodTable()->IsInterface() && !IsAbstract());
}
//*******************************************************************************
UINT MethodDesc::SizeOfArgStack()
{
WRAPPER_NO_CONTRACT;
MetaSig msig(this);
ArgIterator argit(&msig);
return argit.SizeOfArgStack();
}
UINT MethodDesc::SizeOfNativeArgStack()
{
#ifndef UNIX_AMD64_ABI
return SizeOfArgStack();
#else
WRAPPER_NO_CONTRACT;
MetaSig msig(this);
PInvokeArgIterator argit(&msig);
return argit.SizeOfArgStack();
#endif
}
#ifdef TARGET_X86
//*******************************************************************************
UINT MethodDesc::CbStackPop()
{
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
MetaSig msig(this);
ArgIterator argit(&msig);
bool fCtorOfVariableSizedObject = msig.HasThis() && (GetMethodTable() == g_pStringClass) && IsCtor();
if (fCtorOfVariableSizedObject)
{
msig.ClearHasThis();
}
return argit.CbStackPop();
}
#endif // TARGET_X86
#ifndef DACCESS_COMPILE
//*******************************************************************************
// Strip off the method instantiation (if present) e.g.
// C<int>.m<string> -> C<int>.m
// D.m<string> -> D.m
// Note that this also canonicalizes the owning method table
// @todo check uses and clean this up
MethodDesc* MethodDesc::StripMethodInstantiation()
{
CONTRACT(MethodDesc*)
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
POSTCONDITION(CheckPointer(RETVAL));
}
CONTRACT_END
if (!HasClassOrMethodInstantiation())
RETURN(this);
MethodTable *pMT = GetMethodTable()->GetCanonicalMethodTable();
MethodDesc *resultMD = pMT->GetParallelMethodDesc(this);
_ASSERTE(resultMD->IsGenericMethodDefinition() || !resultMD->HasMethodInstantiation());
RETURN(resultMD);
}
//*******************************************************************************
MethodDescChunk *MethodDescChunk::CreateChunk(LoaderHeap *pHeap, DWORD methodDescCount,
DWORD classification, BOOL fNonVtableSlot, BOOL fNativeCodeSlot, BOOL fComPlusCallInfo, MethodTable *pInitialMT, AllocMemTracker *pamTracker)
{
CONTRACT(MethodDescChunk *)
{
THROWS;
GC_NOTRIGGER;
INJECT_FAULT(ThrowOutOfMemory());
PRECONDITION(CheckPointer(pHeap));
PRECONDITION(CheckPointer(pInitialMT));
PRECONDITION(CheckPointer(pamTracker));
POSTCONDITION(CheckPointer(RETVAL));
}
CONTRACT_END;
SIZE_T oneSize = MethodDesc::GetBaseSize(classification);
if (fNonVtableSlot)
oneSize += sizeof(MethodDesc::NonVtableSlot);
if (fNativeCodeSlot)
oneSize += sizeof(MethodDesc::NativeCodeSlot);
#ifdef FEATURE_COMINTEROP
if (fComPlusCallInfo)
oneSize += sizeof(ComPlusCallInfo);
#else // FEATURE_COMINTEROP
_ASSERTE(!fComPlusCallInfo);
#endif // FEATURE_COMINTEROP
_ASSERTE((oneSize & MethodDesc::ALIGNMENT_MASK) == 0);
DWORD maxMethodDescsPerChunk = (DWORD)(MethodDescChunk::MaxSizeOfMethodDescs / oneSize);
if (methodDescCount == 0)
methodDescCount = maxMethodDescsPerChunk;
MethodDescChunk * pFirstChunk = NULL;
do
{
DWORD count = min(methodDescCount, maxMethodDescsPerChunk);
void * pMem = pamTracker->Track(
pHeap->AllocMem(S_SIZE_T(sizeof(TADDR) + sizeof(MethodDescChunk) + oneSize * count)));
// Skip pointer to temporary entrypoints
MethodDescChunk * pChunk = (MethodDescChunk *)((BYTE*)pMem + sizeof(TADDR));
pChunk->SetSizeAndCount(oneSize * count, count);
pChunk->SetMethodTable(pInitialMT);
MethodDesc * pMD = pChunk->GetFirstMethodDesc();
for (DWORD i = 0; i < count; i++)
{
pMD->SetChunkIndex(pChunk);
pMD->SetClassification(classification);
if (fNonVtableSlot)
pMD->SetHasNonVtableSlot();
if (fNativeCodeSlot)
pMD->SetHasNativeCodeSlot();
#ifdef FEATURE_COMINTEROP
if (fComPlusCallInfo)
pMD->SetupGenericComPlusCall();
#endif // FEATURE_COMINTEROP
_ASSERTE(pMD->SizeOf() == oneSize);
pMD = (MethodDesc *)((BYTE *)pMD + oneSize);
}
pChunk->m_next = pFirstChunk;
pFirstChunk = pChunk;
methodDescCount -= count;
}
while (methodDescCount > 0);
RETURN pFirstChunk;
}
//--------------------------------------------------------------------
// Virtual Resolution on Objects
//
// Given a MethodDesc and an Object, return the target address
// and/or the target MethodDesc and/or make a call.
//
// Some of the implementation of this logic is in
// MethodTable::GetMethodDescForInterfaceMethodAndServer.
// Those functions should really be moved here.
//--------------------------------------------------------------------
//*******************************************************************************
// The following resolve virtual dispatch for the given method on the given
// object down to an actual address to call, including any
// handling of context proxies and other thunking layers.
MethodDesc* MethodDesc::ResolveGenericVirtualMethod(OBJECTREF *orThis)
{
CONTRACT(MethodDesc *)
{
THROWS;
GC_TRIGGERS;
PRECONDITION(IsVtableMethod());
PRECONDITION(HasMethodInstantiation());
PRECONDITION(!ContainsGenericVariables());
POSTCONDITION(CheckPointer(RETVAL));
POSTCONDITION(RETVAL->HasMethodInstantiation());
}
CONTRACT_END;
// Method table of target (might be instantiated)
MethodTable *pObjMT = (*orThis)->GetMethodTable();
// This is the static method descriptor describing the call.
// It is not the destination of the call, which we must compute.
MethodDesc* pStaticMD = this;
// Strip off the method instantiation if present
MethodDesc* pStaticMDWithoutGenericMethodArgs = pStaticMD->StripMethodInstantiation();
// Compute the target, though we have not yet applied the type arguments.
MethodDesc *pTargetMDBeforeGenericMethodArgs =
pStaticMD->IsInterface()
? MethodTable::GetMethodDescForInterfaceMethodAndServer(TypeHandle(pStaticMD->GetMethodTable()),
pStaticMDWithoutGenericMethodArgs,orThis)
: pObjMT->GetMethodDescForSlot(pStaticMDWithoutGenericMethodArgs->GetSlot());
pTargetMDBeforeGenericMethodArgs->CheckRestore();
// The actual destination may lie anywhere in the inheritance hierarchy.
// between the static descriptor and the target object.
// So now compute where we are really going! This may be an instantiated
// class type if the generic virtual lies in a generic class.
MethodTable *pTargetMT = pTargetMDBeforeGenericMethodArgs->GetMethodTable();
// No need to find/create a new generic instantiation if the target is the
// same as the static, i.e. the virtual method has not been overriden.
if (!pTargetMT->IsSharedByGenericInstantiations() && !pTargetMT->IsValueType() &&
pTargetMDBeforeGenericMethodArgs == pStaticMDWithoutGenericMethodArgs)
RETURN(pStaticMD);
if (pTargetMT->IsSharedByGenericInstantiations())
{
pTargetMT = ClassLoader::LoadGenericInstantiationThrowing(pTargetMT->GetModule(),
pTargetMT->GetCl(),
pTargetMDBeforeGenericMethodArgs->GetExactClassInstantiation(TypeHandle(pObjMT))).GetMethodTable();
}
RETURN(MethodDesc::FindOrCreateAssociatedMethodDesc(
pTargetMDBeforeGenericMethodArgs,
pTargetMT,
(pTargetMT->IsValueType()), /* get unboxing entry point if a struct*/
pStaticMD->GetMethodInstantiation(),
FALSE /* no allowInstParam */ ));
}
//*******************************************************************************
PCODE MethodDesc::GetSingleCallableAddrOfVirtualizedCode(OBJECTREF *orThis, TypeHandle staticTH)
{
WRAPPER_NO_CONTRACT;
PRECONDITION(IsVtableMethod());
MethodTable *pObjMT = (*orThis)->GetMethodTable();
if (HasMethodInstantiation())
{
CheckRestore();
MethodDesc *pResultMD = ResolveGenericVirtualMethod(orThis);
// If we're remoting this call we can't call directly on the returned
// method desc, we need to go through a stub that guarantees we end up
// in the remoting handler. The stub we use below is normally just for
// non-virtual calls on virtual methods (that have the same problem
// where we could end up bypassing the remoting system), but it serves
// our purpose here (basically pushes our correctly instantiated,
// resolved method desc on the stack and calls the remoting code).
return pResultMD->GetSingleCallableAddrOfCode();
}
if (IsInterface())
{
MethodDesc * pTargetMD = MethodTable::GetMethodDescForInterfaceMethodAndServer(staticTH,this,orThis);
return pTargetMD->GetSingleCallableAddrOfCode();
}
return pObjMT->GetRestoredSlot(GetSlot());
}
//*******************************************************************************
// The following resolve virtual dispatch for the given method on the given
// object down to an actual address to call, including any
// handling of context proxies and other thunking layers.
PCODE MethodDesc::GetMultiCallableAddrOfVirtualizedCode(OBJECTREF *orThis, TypeHandle staticTH)
{
CONTRACT(PCODE)
{
THROWS;
GC_TRIGGERS;
PRECONDITION(IsVtableMethod());
POSTCONDITION(RETVAL != NULL);
}
CONTRACT_END;
// Method table of target (might be instantiated)
MethodTable *pObjMT = (*orThis)->GetMethodTable();
// This is the static method descriptor describing the call.
// It is not the destination of the call, which we must compute.
MethodDesc* pStaticMD = this;
MethodDesc *pTargetMD;
if (pStaticMD->HasMethodInstantiation())
{
CheckRestore();
pTargetMD = ResolveGenericVirtualMethod(orThis);
// If we're remoting this call we can't call directly on the returned
// method desc, we need to go through a stub that guarantees we end up
// in the remoting handler. The stub we use below is normally just for
// non-virtual calls on virtual methods (that have the same problem
// where we could end up bypassing the remoting system), but it serves
// our purpose here (basically pushes our correctly instantiated,
// resolved method desc on the stack and calls the remoting code).
RETURN(pTargetMD->GetMultiCallableAddrOfCode());
}
if (pStaticMD->IsInterface())
{
pTargetMD = MethodTable::GetMethodDescForInterfaceMethodAndServer(staticTH,pStaticMD,orThis);
RETURN(pTargetMD->GetMultiCallableAddrOfCode());
}
pTargetMD = pObjMT->GetMethodDescForSlot(pStaticMD->GetSlot());
RETURN (pTargetMD->GetMultiCallableAddrOfCode());
}
//*******************************************************************************
PCODE MethodDesc::GetMultiCallableAddrOfCode(CORINFO_ACCESS_FLAGS accessFlags /*=CORINFO_ACCESS_LDFTN*/)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM());
}
CONTRACTL_END
PCODE ret = TryGetMultiCallableAddrOfCode(accessFlags);
if (ret == NULL)
{
GCX_COOP();
// We have to allocate funcptr stub
ret = GetLoaderAllocator()->GetFuncPtrStubs()->GetFuncPtrStub(this);
}
return ret;
}
//*******************************************************************************
//
// Returns a callable entry point for a function.
// Multiple entry points could be used for a single function.
// ie. this function is not idempotent
//
// We must ensure that GetMultiCallableAddrOfCode works
// correctly for all of the following cases:
// 1. shared generic method instantiations
// 2. unshared generic method instantiations
// 3. instance methods in shared generic classes
// 4. instance methods in unshared generic classes
// 5. static methods in shared generic classes.
// 6. static methods in unshared generic classes.
//
// For case 1 and 5 the methods are implemented using
// an instantiating stub (i.e. IsInstantiatingStub()
// should be true). These stubs pass on to
// shared-generic-code-which-requires-an-extra-type-context-parameter.
// So whenever we use LDFTN on these we need to give out
// the address of an instantiating stub.
//
// For cases 2, 3, 4 and 6 we can just use the standard technique for LdFtn:
// (for 2 we give out the address of the fake "slot" in InstantiatedMethodDescs)
// (for 3 it doesn't matter if the code is shared between instantiations
// because the instantiation context is picked up from the "this" parameter.)
PCODE MethodDesc::TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_FLAGS accessFlags)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM());
}
CONTRACTL_END
// Record this method desc if required
g_IBCLogger.LogMethodDescAccess(this);
if (IsGenericMethodDefinition())
{
_ASSERTE(!"Cannot take the address of an uninstantiated generic method.");
COMPlusThrow(kInvalidProgramException);
}
if (accessFlags & CORINFO_ACCESS_LDFTN)
{
// Whenever we use LDFTN on shared-generic-code-which-requires-an-extra-parameter
// we need to give out the address of an instantiating stub. This is why we give
// out GetStableEntryPoint() for the IsInstantiatingStub() case: this is
// safe. But first we assert that we only use GetMultiCallableAddrOfCode on
// the instantiating stubs and not on the shared code itself.
_ASSERTE(!RequiresInstArg());
_ASSERTE(!IsSharedByGenericMethodInstantiations());
// No other access flags are valid with CORINFO_ACCESS_LDFTN
_ASSERTE((accessFlags & ~CORINFO_ACCESS_LDFTN) == 0);
}
// We create stable entrypoints for these upfront
if (IsWrapperStub() || IsEnCAddedMethod())
return GetStableEntryPoint();
// For EnC always just return the stable entrypoint so we can update the code
if (IsEnCMethod())
return GetStableEntryPoint();
// If the method has already been jitted, we can give out the direct address
// Note that we may have previously created a FuncPtrStubEntry, but
// GetMultiCallableAddrOfCode() does not need to be idempotent.
if (IsFCall())
{
// Call FCalls directly when possible
if (!IsInterface() && !GetMethodTable()->ContainsGenericVariables())
{
BOOL fSharedOrDynamicFCallImpl;
PCODE pFCallImpl = ECall::GetFCallImpl(this, &fSharedOrDynamicFCallImpl);
if (!fSharedOrDynamicFCallImpl)
return pFCallImpl;
// Fake ctors share one implementation that has to be wrapped by prestub
GetOrCreatePrecode();
}
}
else
{
if (IsPointingToStableNativeCode())
return GetNativeCode();
}
if (HasStableEntryPoint())
return GetStableEntryPoint();
if (IsVersionableWithVtableSlotBackpatch())
{
// Caller has to call via slot or allocate funcptr stub
return NULL;
}
// Force the creation of the precode if we would eventually got one anyway
if (MayHavePrecode())
return GetOrCreatePrecode()->GetEntryPoint();
#ifdef HAS_COMPACT_ENTRYPOINTS
// Caller has to call via slot or allocate funcptr stub
return NULL;
#else // HAS_COMPACT_ENTRYPOINTS
//
// Embed call to the temporary entrypoint into the code. It will be patched
// to point to the actual code later.
//
return GetTemporaryEntryPoint();
#endif // HAS_COMPACT_ENTRYPOINTS
}
//*******************************************************************************
PCODE MethodDesc::GetCallTarget(OBJECTREF* pThisObj, TypeHandle ownerType)
{
CONTRACTL
{
THROWS; // Resolving a generic virtual method can throw
GC_TRIGGERS;
MODE_COOPERATIVE;
}
CONTRACTL_END
PCODE pTarget;
if (IsVtableMethod() && !GetMethodTable()->IsValueType())
{
CONSISTENCY_CHECK(NULL != pThisObj);
if (ownerType.IsNull())
ownerType = GetMethodTable();
pTarget = GetSingleCallableAddrOfVirtualizedCode(pThisObj, ownerType);
}
else
{
pTarget = GetSingleCallableAddrOfCode();
}
return pTarget;
}
MethodDesc* NonVirtualEntry2MethodDesc(PCODE entryPoint)
{
CONTRACTL {
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END
RangeSection* pRS = ExecutionManager::FindCodeRange(entryPoint, ExecutionManager::GetScanFlags());
if (pRS == NULL)
return NULL;
MethodDesc* pMD;
if (pRS->pjit->JitCodeToMethodInfo(pRS, entryPoint, &pMD, NULL))
return pMD;
if (pRS->pjit->GetStubCodeBlockKind(pRS, entryPoint) == STUB_CODE_BLOCK_PRECODE)
return MethodDesc::GetMethodDescFromStubAddr(entryPoint);
// We should never get here
_ASSERTE(!"NonVirtualEntry2MethodDesc failed for RangeSection");
return NULL;
}
//*******************************************************************************
// convert an entry point into a method desc
MethodDesc* Entry2MethodDesc(PCODE entryPoint, MethodTable *pMT)
{
CONTRACT(MethodDesc*)
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
POSTCONDITION(RETVAL->SanityCheck());
}
CONTRACT_END
MethodDesc* pMD = NonVirtualEntry2MethodDesc(entryPoint);
if (pMD != NULL)
RETURN(pMD);
pMD = VirtualCallStubManagerManager::Entry2MethodDesc(entryPoint, pMT);
if (pMD != NULL)
RETURN(pMD);
// Is it an FCALL?
pMD = ECall::MapTargetBackToMethod(entryPoint);
if (pMD != NULL)
RETURN(pMD);
// We should never get here
_ASSERTE(!"Entry2MethodDesc failed");
RETURN (NULL);
}
//*******************************************************************************
BOOL MethodDesc::IsPointingToPrestub()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
if (!HasStableEntryPoint())
{
if (IsVersionableWithVtableSlotBackpatch())
{
return GetMethodEntryPoint() == GetTemporaryEntryPoint();
}
return TRUE;
}
if (!HasPrecode())
return FALSE;
return GetPrecode()->IsPointingToPrestub();
}
//*******************************************************************************
void MethodDesc::Reset()
{
WRAPPER_NO_CONTRACT;
// This method is not thread-safe since we are updating
// different pieces of data non-atomically.
// Use this only if you can guarantee thread-safety somehow.
_ASSERTE(IsEnCMethod() || // The process is frozen by the debugger
IsDynamicMethod() || // These are used in a very restricted way
GetLoaderModule()->IsReflection()); // Rental methods
// Reset any flags relevant to the old code
ClearFlagsOnUpdate();
if (HasPrecode())
{
GetPrecode()->Reset();
}
else
{
// We should go here only for the rental methods
_ASSERTE(GetLoaderModule()->IsReflection());
InterlockedUpdateFlags2(enum_flag2_HasStableEntryPoint | enum_flag2_HasPrecode, FALSE);
*GetAddrOfSlot() = GetTemporaryEntryPoint();
}
if (HasNativeCodeSlot())
{
*GetAddrOfNativeCodeSlot() = NULL;
}
_ASSERTE(!HasNativeCode());
}
//*******************************************************************************
Dictionary* MethodDesc::GetMethodDictionary()
{
WRAPPER_NO_CONTRACT;
return
(GetClassification() == mcInstantiated)
? (Dictionary*) (AsInstantiatedMethodDesc()->IMD_GetMethodDictionary())
: NULL;
}
//*******************************************************************************
DictionaryLayout* MethodDesc::GetDictionaryLayout()
{
WRAPPER_NO_CONTRACT;
return
((GetClassification() == mcInstantiated) && !IsUnboxingStub())
? AsInstantiatedMethodDesc()->IMD_GetDictionaryLayout()
: NULL;
}
#endif // !DACCESS_COMPILE
//*******************************************************************************
MethodImpl *MethodDesc::GetMethodImpl()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
FORBID_FAULT;
PRECONDITION(HasMethodImplSlot());
SUPPORTS_DAC;
}
CONTRACTL_END
SIZE_T size = s_ClassificationSizeTable[m_wFlags & (mdcClassification | mdcHasNonVtableSlot)];
return PTR_MethodImpl(dac_cast<TADDR>(this) + size);
}
#ifndef DACCESS_COMPILE
//*******************************************************************************
BOOL MethodDesc::RequiresMethodDescCallingConvention(BOOL fEstimateForChunk /*=FALSE*/)
{
LIMITED_METHOD_CONTRACT;
// Interop marshaling is implemented using shared stubs
if (IsNDirect() || IsComPlusCall() || IsGenericComPlusCall())
return TRUE;
return FALSE;
}
//*******************************************************************************
BOOL MethodDesc::RequiresStableEntryPoint(BOOL fEstimateForChunk /*=FALSE*/)
{
LIMITED_METHOD_CONTRACT;
// Create precodes for versionable methods
if (IsVersionableWithPrecode())
return TRUE;
// Create precodes for edit and continue to make methods updateable
if (IsEnCMethod() || IsEnCAddedMethod())
return TRUE;
// Precreate precodes for LCG methods so we do not leak memory when the method descs are recycled
if (IsLCGMethod())
return TRUE;
if (fEstimateForChunk)
{
// Make a best guess based on the method table of the chunk.
if (IsInterface())
return TRUE;
}
else
{
// Wrapper stubs are stored in generic dictionary that's not backpatched
if (IsWrapperStub())
return TRUE;
// TODO: Can we avoid early allocation of precodes for interfaces and cominterop?
if ((IsInterface() && !IsStatic() && IsVirtual()) || IsComPlusCall())
return TRUE;
}
return FALSE;
}
#endif // !DACCESS_COMPILE
//*******************************************************************************
BOOL MethodDesc::MayHaveNativeCode()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END
// This code flow of this method should roughly match the code flow of MethodDesc::DoPrestub.
switch (GetClassification())
{
case mcIL: // IsIL() case. Handled below.
break;
case mcFCall: // FCalls do not have real native code.
return FALSE;
case mcNDirect: // NDirect never have native code (note that the NDirect method
return FALSE; // does not appear as having a native code even for stubs as IL)
case mcEEImpl: // Runtime provided implementation. No native code.
return FALSE;
case mcArray: // Runtime provided implementation. No native code.
return FALSE;
case mcInstantiated: // IsIL() case. Handled below.
break;
#ifdef FEATURE_COMINTEROP
case mcComInterop: // Generated stub. No native code.
return FALSE;
#endif // FEATURE_COMINTEROP
case mcDynamic: // LCG or stub-as-il.
return TRUE;
default:
_ASSERTE(!"Unknown classification");
}
_ASSERTE(IsIL());
if (IsWrapperStub() || ContainsGenericVariables() || IsAbstract())
{
return FALSE;
}
return TRUE;
}
//*******************************************************************************
void MethodDesc::CheckRestore(ClassLoadLevel level)
{
STATIC_CONTRACT_THROWS;
STATIC_CONTRACT_GC_TRIGGERS;
STATIC_CONTRACT_FAULT;
if (!GetMethodTable()->IsFullyLoaded())
{
g_IBCLogger.LogMethodDescAccess(this);
if (GetClassification() == mcInstantiated)
{
#ifndef DACCESS_COMPILE
InstantiatedMethodDesc *pIMD = AsInstantiatedMethodDesc();
// First restore method table pointer in singleton chunk;
// it might be out-of-module
ClassLoader::EnsureLoaded(TypeHandle(GetMethodTable()), level);
g_IBCLogger.LogMethodDescWriteAccess(this);
pIMD->m_wFlags2 = pIMD->m_wFlags2 & ~InstantiatedMethodDesc::Unrestored;
if (ETW_PROVIDER_ENABLED(MICROSOFT_WINDOWS_DOTNETRUNTIME_PROVIDER))
{
ETW::MethodLog::MethodRestored(this);
}
#else // DACCESS_COMPILE
DacNotImpl();
#endif // DACCESS_COMPILE
}
else if (IsILStub()) // the only stored-sig MD type that uses ET_INTERNAL
{
ClassLoader::EnsureLoaded(TypeHandle(GetMethodTable()), level);
#ifndef DACCESS_COMPILE
if (ETW_PROVIDER_ENABLED(MICROSOFT_WINDOWS_DOTNETRUNTIME_PROVIDER))
{
ETW::MethodLog::MethodRestored(this);
}
#else // DACCESS_COMPILE
DacNotImpl();
#endif // DACCESS_COMPILE
}
else
{
ClassLoader::EnsureLoaded(TypeHandle(GetMethodTable()), level);
}
}
}
// static
MethodDesc* MethodDesc::GetMethodDescFromStubAddr(PCODE addr, BOOL fSpeculative /*=FALSE*/)
{
CONTRACT(MethodDesc *)
{
GC_NOTRIGGER;
NOTHROW;
}
CONTRACT_END;
MethodDesc * pMD = NULL;
#ifdef HAS_COMPACT_ENTRYPOINTS
if (MethodDescChunk::IsCompactEntryPointAtAddress(addr))
{
pMD = MethodDescChunk::GetMethodDescFromCompactEntryPoint(addr, fSpeculative);
RETURN(pMD);
}
#endif // HAS_COMPACT_ENTRYPOINTS
// Otherwise this must be some kind of precode
//
Precode* pPrecode = Precode::GetPrecodeFromEntryPoint(addr, fSpeculative);
PREFIX_ASSUME(fSpeculative || (pPrecode != NULL));
if (pPrecode != NULL)
{
pMD = pPrecode->GetMethodDesc(fSpeculative);
RETURN(pMD);
}
RETURN(NULL); // Not found
}
#ifdef HAS_COMPACT_ENTRYPOINTS
#if defined(TARGET_X86)
#include <pshpack1.h>
static const struct CentralJumpCode {
BYTE m_movzxEAX[3];
BYTE m_shlEAX[3];
BYTE m_addEAX[1];
MethodDesc* m_pBaseMD;
BYTE m_jmp[1];
INT32 m_rel32;
inline void Setup(CentralJumpCode* pCodeRX, MethodDesc* pMD, PCODE target, LoaderAllocator *pLoaderAllocator) {
WRAPPER_NO_CONTRACT;
m_pBaseMD = pMD;
m_rel32 = rel32UsingJumpStub(&pCodeRX->m_rel32, target, pMD, pLoaderAllocator);
}
inline BOOL CheckTarget(TADDR target) {
LIMITED_METHOD_CONTRACT;
TADDR addr = rel32Decode(PTR_HOST_MEMBER_TADDR(CentralJumpCode, this, m_rel32));
return (addr == target);
}
}
c_CentralJumpCode = {
{ 0x0F, 0xB6, 0xC0 }, // movzx eax,al
{ 0xC1, 0xE0, MethodDesc::ALIGNMENT_SHIFT }, // shl eax, MethodDesc::ALIGNMENT_SHIFT
{ 0x05 }, NULL, // add eax, pBaseMD
{ 0xE9 }, 0 // jmp PreStub
};
#include <poppack.h>
#elif defined(TARGET_ARM)
#include <pshpack1.h>
struct CentralJumpCode {
BYTE m_ldrPC[4];
BYTE m_short[2];
MethodDescChunk *m_pChunk;
PCODE m_target;
inline void Setup(PCODE target, MethodDescChunk *pChunk) {
WRAPPER_NO_CONTRACT;
m_target = target;
m_pChunk = pChunk;
}
inline BOOL CheckTarget(TADDR target) {
WRAPPER_NO_CONTRACT;
return ((TADDR)m_target == target);
}
}
c_CentralJumpCode = {
{ 0xDF, 0xF8, 0x08, 0xF0 }, // ldr pc, =pTarget
{ 0x00, 0x00 }, // short offset for alignment
0, // pChunk
0 // pTarget
};
#include <poppack.h>
#else
#error Unsupported platform
#endif
typedef DPTR(struct CentralJumpCode) PTR_CentralJumpCode;
#define TEP_CENTRAL_JUMP_SIZE sizeof(c_CentralJumpCode)
static_assert_no_msg((TEP_CENTRAL_JUMP_SIZE & 1) == 0);
#define TEP_ENTRY_SIZE 4
#ifdef TARGET_ARM
#define TEP_HALF_ENTRY_SIZE (TEP_ENTRY_SIZE / 2)
// Compact entry point on arm consists of two thumb instructions:
// mov r12, pc
// b CentralJumpCode
// First instruction 0x46fc
#define TEP_ENTRY_INSTR1_BYTE1 0xFC
#define TEP_ENTRY_INSTR1_BYTE2 0x46
// Mask for unconditional branch opcode
#define TEP_ENTRY_INSTR2_MASK1 0xE0
// Mask for opcode
#define TEP_ENTRY_INSTR2_MASK2 0xF8
// Bit used for ARM to identify compact entry points
#define COMPACT_ENTRY_ARM_CODE 0x2
/* static */ int MethodDescChunk::GetCompactEntryPointMaxCount ()
{
LIMITED_METHOD_DAC_CONTRACT;
return MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB / TEP_ENTRY_SIZE;
}
// Get offset from the start of current compact entry point to the CentralJumpCode
static uint16_t DecodeOffsetFromBranchToCentralJump (uint16_t instr)
{
int16_t offset = decodeUnconditionalBranchThumb ((LPBYTE) &instr);
offset += PC_REG_RELATIVE_OFFSET + TEP_HALF_ENTRY_SIZE;
_ASSERTE (offset >= TEP_ENTRY_SIZE && (offset % TEP_ENTRY_SIZE == 0));
return (uint16_t) offset;
}
#ifndef DACCESS_COMPILE
// Encode branch instruction to central jump for current compact entry point
static uint16_t EncodeBranchToCentralJump (int16_t offset)
{
_ASSERTE (offset >= 0 && (offset % TEP_ENTRY_SIZE == 0));
offset += TEP_HALF_ENTRY_SIZE - PC_REG_RELATIVE_OFFSET;
uint16_t instr;
emitUnconditionalBranchThumb ((LPBYTE) &instr, offset);
return instr;
}
#endif // DACCESS_COMPILE
#else // TARGET_ARM
#define TEP_MAX_BEFORE_INDEX (1 + (127 / TEP_ENTRY_SIZE))
#define TEP_MAX_BLOCK_INDEX (TEP_MAX_BEFORE_INDEX + (128 - TEP_CENTRAL_JUMP_SIZE) / TEP_ENTRY_SIZE)
#define TEP_FULL_BLOCK_SIZE (TEP_MAX_BLOCK_INDEX * TEP_ENTRY_SIZE + TEP_CENTRAL_JUMP_SIZE)
#endif // TARGET_ARM
BOOL MethodDescChunk::IsCompactEntryPointAtAddress(PCODE addr)
{
LIMITED_METHOD_DAC_CONTRACT;
#if defined(TARGET_X86) || defined(TARGET_AMD64)
// Compact entrypoints start at odd addresses
return (addr & 1) != 0;
#elif defined(TARGET_ARM)
// Compact entrypoints start at odd addresses (thumb) with second bit set to 1
uint8_t compactEntryPointMask = THUMB_CODE | COMPACT_ENTRY_ARM_CODE;
return (addr & compactEntryPointMask) == compactEntryPointMask;
#else
#error Unsupported platform
#endif
}
//*******************************************************************************
/* static */ MethodDesc* MethodDescChunk::GetMethodDescFromCompactEntryPoint(PCODE addr, BOOL fSpeculative /*=FALSE*/)
{
LIMITED_METHOD_CONTRACT;
#ifdef DACCESS_COMPILE
// Always use speculative checks with DAC
fSpeculative = TRUE;
#endif
// Always do consistency check in debug
if (fSpeculative INDEBUG(|| TRUE))
{
#ifdef TARGET_ARM
TADDR instrCodeAddr = PCODEToPINSTR(addr);
if (!IsCompactEntryPointAtAddress(addr) ||
*PTR_BYTE(instrCodeAddr) != TEP_ENTRY_INSTR1_BYTE1 ||
*PTR_BYTE(instrCodeAddr+1) != TEP_ENTRY_INSTR1_BYTE2)
#else // TARGET_ARM
if ((addr & 3) != 1 ||
*PTR_BYTE(addr) != X86_INSTR_MOV_AL ||
*PTR_BYTE(addr+2) != X86_INSTR_JMP_REL8)
#endif // TARGET_ARM
{
if (fSpeculative) return NULL;
_ASSERTE(!"Unexpected code in temporary entrypoint");
}
}
#ifdef TARGET_ARM
// On ARM compact entry points are thumb
_ASSERTE ((addr & THUMB_CODE) != 0);
addr = addr - THUMB_CODE;
// Get offset for CentralJumpCode from current compact entry point
PTR_UINT16 pBranchInstr = (PTR_UINT16(addr)) + 1;
uint16_t offset = DecodeOffsetFromBranchToCentralJump (*pBranchInstr);
TADDR centralJump = addr + offset;
int index = (centralJump - addr - TEP_ENTRY_SIZE) / TEP_ENTRY_SIZE;
#else // TARGET_ARM
int index = *PTR_BYTE(addr+1);
TADDR centralJump = addr + 4 + *PTR_SBYTE(addr+3);
#endif // TARGET_ARM
CentralJumpCode* pCentralJumpCode = PTR_CentralJumpCode(centralJump);
// Always do consistency check in debug
if (fSpeculative INDEBUG(|| TRUE))
{
SIZE_T i;
for (i = 0; i < TEP_CENTRAL_JUMP_SIZE; i++)
{
BYTE b = ((BYTE*)&c_CentralJumpCode)[i];
if (b != 0 && b != *PTR_BYTE(centralJump+i))
{
if (fSpeculative) return NULL;
_ASSERTE(!"Unexpected code in temporary entrypoint");
}
}
#ifdef TARGET_ARM
_ASSERTE_IMPL(pCentralJumpCode->CheckTarget(GetPreStubCompactARMEntryPoint()));
#else // TARGET_ARM
_ASSERTE_IMPL(pCentralJumpCode->CheckTarget(GetPreStubEntryPoint()));
#endif // TARGET_ARM
}
#ifdef TARGET_ARM
// Go through all MethodDesc in MethodDescChunk and find the one with the required index
PTR_MethodDescChunk pChunk = *((DPTR(PTR_MethodDescChunk))(centralJump + offsetof(CentralJumpCode, m_pChunk)));
TADDR pMD = PTR_HOST_TO_TADDR (pChunk->GetFirstMethodDesc ());
_ASSERTE (index >= 0 && index < ((int) pChunk->GetCount ()));
index = ((int) pChunk->GetCount ()) - 1 - index;
SIZE_T totalSize = 0;
int curIndex = 0;
while (index != curIndex)
{
SIZE_T sizeCur = (PTR_MethodDesc (pMD))->SizeOf ();
totalSize += sizeCur;
pMD += sizeCur;
++curIndex;
}
return PTR_MethodDesc (pMD);
#else // TARGET_ARM
return PTR_MethodDesc((TADDR)pCentralJumpCode->m_pBaseMD + index * MethodDesc::ALIGNMENT);
#endif // TARGET_ARM
}
//*******************************************************************************
SIZE_T MethodDescChunk::SizeOfCompactEntryPoints(int count)
{
LIMITED_METHOD_DAC_CONTRACT;
#ifdef TARGET_ARM
return COMPACT_ENTRY_ARM_CODE + count * TEP_ENTRY_SIZE + TEP_CENTRAL_JUMP_SIZE;
#else // TARGET_ARM
int fullBlocks = count / TEP_MAX_BLOCK_INDEX;
int remainder = count % TEP_MAX_BLOCK_INDEX;
return 1 + (fullBlocks * TEP_FULL_BLOCK_SIZE) +
(remainder * TEP_ENTRY_SIZE) + ((remainder != 0) ? TEP_CENTRAL_JUMP_SIZE : 0);
#endif // TARGET_ARM
}
#ifndef DACCESS_COMPILE
TADDR MethodDescChunk::AllocateCompactEntryPoints(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker)
{
CONTRACTL {
THROWS;
GC_NOTRIGGER;
} CONTRACTL_END;
int count = GetCount();
SIZE_T size = SizeOfCompactEntryPoints(count);
TADDR temporaryEntryPoints = (TADDR)pamTracker->Track(pLoaderAllocator->GetPrecodeHeap()->AllocAlignedMem(size, sizeof(TADDR)));
ExecutableWriterHolder<void> temporaryEntryPointsWriterHolder((void *)temporaryEntryPoints, size);
size_t rxOffset = temporaryEntryPoints - (TADDR)temporaryEntryPointsWriterHolder.GetRW();
#ifdef TARGET_ARM
BYTE* p = (BYTE*)temporaryEntryPointsWriterHolder.GetRW() + COMPACT_ENTRY_ARM_CODE;
int relOffset = count * TEP_ENTRY_SIZE - TEP_ENTRY_SIZE; // relative offset for the short jump
_ASSERTE (relOffset < MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB);
#else // TARGET_ARM
// make the temporary entrypoints unaligned, so they are easy to identify
BYTE* p = (BYTE*)temporaryEntryPointsWriterHolder.GetRW() + 1;
int indexInBlock = TEP_MAX_BLOCK_INDEX; // recompute relOffset in first iteration
int relOffset = 0; // relative offset for the short jump
#endif // TARGET_ARM
MethodDesc * pBaseMD = 0; // index of the start of the block
MethodDesc * pMD = GetFirstMethodDesc();
for (int index = 0; index < count; index++)
{
#ifdef TARGET_ARM
uint8_t *pMovInstrByte1 = (uint8_t *)p;
uint8_t *pMovInstrByte2 = (uint8_t *)p+1;
uint16_t *pBranchInstr = ((uint16_t *)p)+1;
*pMovInstrByte1 = TEP_ENTRY_INSTR1_BYTE1;
*pMovInstrByte2 = TEP_ENTRY_INSTR1_BYTE2;
*pBranchInstr = EncodeBranchToCentralJump ((int16_t) relOffset);
p += TEP_ENTRY_SIZE;
#else // TARGET_ARM
if (indexInBlock == TEP_MAX_BLOCK_INDEX)
{
relOffset = (min(count - index, TEP_MAX_BEFORE_INDEX) - 1) * TEP_ENTRY_SIZE;
indexInBlock = 0;
pBaseMD = pMD;
}
*(p+0) = X86_INSTR_MOV_AL;
int methodDescIndex = pMD->GetMethodDescIndex() - pBaseMD->GetMethodDescIndex();
_ASSERTE(FitsInU1(methodDescIndex));
*(p+1) = (BYTE)methodDescIndex;
*(p+2) = X86_INSTR_JMP_REL8;
_ASSERTE(FitsInI1(relOffset));
*(p+3) = (BYTE)relOffset;
p += TEP_ENTRY_SIZE; static_assert_no_msg(TEP_ENTRY_SIZE == 4);
if (relOffset == 0)
{
CentralJumpCode* pCode = (CentralJumpCode*)p;
CentralJumpCode* pCodeRX = (CentralJumpCode*)(p + rxOffset);
memcpy(pCode, &c_CentralJumpCode, TEP_CENTRAL_JUMP_SIZE);
pCode->Setup(pCodeRX, pBaseMD, GetPreStubEntryPoint(), pLoaderAllocator);
p += TEP_CENTRAL_JUMP_SIZE;
relOffset -= TEP_CENTRAL_JUMP_SIZE;
}
indexInBlock++;
#endif // TARGET_ARM
relOffset -= TEP_ENTRY_SIZE;
pMD = (MethodDesc *)((BYTE *)pMD + pMD->SizeOf());
}
#ifdef TARGET_ARM
CentralJumpCode* pCode = (CentralJumpCode*)p;
memcpy(pCode, &c_CentralJumpCode, TEP_CENTRAL_JUMP_SIZE);
pCode->Setup (GetPreStubCompactARMEntryPoint(), this);
_ASSERTE(p + TEP_CENTRAL_JUMP_SIZE == (BYTE*)temporaryEntryPointsWriterHolder.GetRW() + size);
#else // TARGET_ARM
_ASSERTE(p == (BYTE*)temporaryEntryPointsWriterHolder.GetRW() + size);
#endif // TARGET_ARM
ClrFlushInstructionCache((LPVOID)temporaryEntryPoints, size);
SetHasCompactEntryPoints();
return temporaryEntryPoints;
}
#endif // !DACCESS_COMPILE
#endif // HAS_COMPACT_ENTRYPOINTS
//*******************************************************************************
PCODE MethodDescChunk::GetTemporaryEntryPoint(int index)
{
LIMITED_METHOD_CONTRACT;
#ifdef HAS_COMPACT_ENTRYPOINTS
if (HasCompactEntryPoints())
{
#ifdef TARGET_ARM
return GetTemporaryEntryPoints() + COMPACT_ENTRY_ARM_CODE + THUMB_CODE + index * TEP_ENTRY_SIZE;
#else // TARGET_ARM
int fullBlocks = index / TEP_MAX_BLOCK_INDEX;
int remainder = index % TEP_MAX_BLOCK_INDEX;
return GetTemporaryEntryPoints() + 1 + (fullBlocks * TEP_FULL_BLOCK_SIZE) +
(remainder * TEP_ENTRY_SIZE) + ((remainder >= TEP_MAX_BEFORE_INDEX) ? TEP_CENTRAL_JUMP_SIZE : 0);
#endif // TARGET_ARM
}
#endif // HAS_COMPACT_ENTRYPOINTS
return Precode::GetPrecodeForTemporaryEntryPoint(GetTemporaryEntryPoints(), index)->GetEntryPoint();
}
PCODE MethodDesc::GetTemporaryEntryPoint()
{
CONTRACTL
{
NOTHROW;
GC_NOTRIGGER;
MODE_ANY;
}
CONTRACTL_END;
MethodDescChunk* pChunk = GetMethodDescChunk();
int lo = 0, hi = pChunk->GetCount() - 1;
// Find the temporary entrypoint in the chunk by binary search
while (lo < hi)
{
int mid = (lo + hi) / 2;
TADDR pEntryPoint = pChunk->GetTemporaryEntryPoint(mid);
MethodDesc * pMD = MethodDesc::GetMethodDescFromStubAddr(pEntryPoint);
if (PTR_HOST_TO_TADDR(this) == PTR_HOST_TO_TADDR(pMD))
return pEntryPoint;
if (PTR_HOST_TO_TADDR(this) > PTR_HOST_TO_TADDR(pMD))
lo = mid + 1;
else
hi = mid - 1;
}
_ASSERTE(lo == hi);
TADDR pEntryPoint = pChunk->GetTemporaryEntryPoint(lo);
#ifdef _DEBUG
MethodDesc * pMD = MethodDesc::GetMethodDescFromStubAddr(pEntryPoint);
_ASSERTE(PTR_HOST_TO_TADDR(this) == PTR_HOST_TO_TADDR(pMD));
#endif
return pEntryPoint;
}
#ifndef DACCESS_COMPILE
//*******************************************************************************
void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker)
{
WRAPPER_NO_CONTRACT;
GetMethodDescChunk()->EnsureTemporaryEntryPointsCreated(pLoaderAllocator, pamTracker);
PTR_PCODE pSlot = GetAddrOfSlot();
_ASSERTE(*pSlot == NULL);
*pSlot = GetTemporaryEntryPoint();
if (RequiresStableEntryPoint())
{
// The rest of the system assumes that certain methods always have stable entrypoints.
// Create them now.
GetOrCreatePrecode();
}
}
//*******************************************************************************
void MethodDescChunk::CreateTemporaryEntryPoints(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(GetTemporaryEntryPoints() == NULL);
TADDR temporaryEntryPoints = Precode::AllocateTemporaryEntryPoints(this, pLoaderAllocator, pamTracker);
#ifdef HAS_COMPACT_ENTRYPOINTS
// Precodes allocated only if they provide more compact representation or if it is required
if (temporaryEntryPoints == NULL)
{
temporaryEntryPoints = AllocateCompactEntryPoints(pLoaderAllocator, pamTracker);
}
#endif // HAS_COMPACT_ENTRYPOINTS
*(((TADDR *)this)-1) = temporaryEntryPoints;
_ASSERTE(GetTemporaryEntryPoints() != NULL);
}
//*******************************************************************************
void MethodDesc::InterlockedUpdateFlags2(BYTE bMask, BOOL fSet)
{
WRAPPER_NO_CONTRACT;
ULONG* pLong = (ULONG*)(&m_bFlags2 - 3);
static_assert_no_msg(offsetof(MethodDesc, m_bFlags2) % sizeof(LONG) == 3);
#if BIGENDIAN
if (fSet)
FastInterlockOr(pLong, (ULONG)bMask);
else
FastInterlockAnd(pLong, ~(ULONG)bMask);
#else // !BIGENDIAN
if (fSet)
FastInterlockOr(pLong, (ULONG)bMask << (3 * 8));
else
FastInterlockAnd(pLong, ~((ULONG)bMask << (3 * 8)));
#endif // !BIGENDIAN
}
//*******************************************************************************
Precode* MethodDesc::GetOrCreatePrecode()
{
WRAPPER_NO_CONTRACT;
_ASSERTE(!IsVersionableWithVtableSlotBackpatch());
if (HasPrecode())
{
return GetPrecode();
}
PTR_PCODE pSlot = GetAddrOfSlot();
PCODE tempEntry = GetTemporaryEntryPoint();
PrecodeType requiredType = GetPrecodeType();
PrecodeType availableType = PRECODE_INVALID;
if (!GetMethodDescChunk()->HasCompactEntryPoints())
{
availableType = Precode::GetPrecodeFromEntryPoint(tempEntry)->GetType();
}
// Allocate the precode if necessary
if (requiredType != availableType)
{
// code:Precode::AllocateTemporaryEntryPoints should always create precode of the right type for dynamic methods.
// If we took this path for dynamic methods, the precode may leak since we may allocate it in domain-neutral loader heap.
_ASSERTE(!IsLCGMethod());
AllocMemTracker amt;
Precode* pPrecode = Precode::Allocate(requiredType, this, GetLoaderAllocator(), &amt);
if (FastInterlockCompareExchangePointer(pSlot, pPrecode->GetEntryPoint(), tempEntry) == tempEntry)
amt.SuppressRelease();
}
// Set the flags atomically
InterlockedUpdateFlags2(enum_flag2_HasStableEntryPoint | enum_flag2_HasPrecode, TRUE);
return Precode::GetPrecodeFromEntryPoint(*pSlot);
}
bool MethodDesc::DetermineAndSetIsEligibleForTieredCompilation()
{
WRAPPER_NO_CONTRACT;
#ifdef FEATURE_TIERED_COMPILATION
#ifndef FEATURE_CODE_VERSIONING
#error Tiered compilation requires code versioning
#endif
// Keep in-sync with MethodTableBuilder::NeedsNativeCodeSlot(bmtMDMethod * pMDMethod)
// to ensure native slots are available where needed.
if (
// Policy
g_pConfig->TieredCompilation() &&
// Functional requirement - The NativeCodeSlot is required to hold the code pointer for the default code version because
// the method's entry point slot will point to a precode or to the current code entry point
HasNativeCodeSlot() &&
// Functional requirement - These methods have no IL that could be optimized
!IsWrapperStub() &&
// Functional requirement
CodeVersionManager::IsMethodSupported(this) &&
// Policy - If QuickJit is disabled and the module does not have any pregenerated code, the method would effectively not
// be tiered currently, so make the method ineligible for tiering to avoid some unnecessary overhead
(g_pConfig->TieredCompilation_QuickJit() || GetModule()->IsReadyToRun()) &&
// Policy - Generating optimized code is not disabled
!IsJitOptimizationDisabled() &&
// Policy - Tiered compilation is not disabled by the profiler
!CORProfilerDisableTieredCompilation())
{
m_bFlags2 |= enum_flag2_IsEligibleForTieredCompilation;
_ASSERTE(IsVersionable());
return true;
}
#endif
return false;
}
#endif // !DACCESS_COMPILE
bool MethodDesc::IsJitOptimizationDisabled()
{
WRAPPER_NO_CONTRACT;
return
g_pConfig->JitMinOpts() ||
#ifdef _DEBUG
g_pConfig->GenDebuggableCode() ||
#endif
CORDisableJITOptimizations(GetModule()->GetDebuggerInfoBits()) ||
(!IsNoMetadata() && IsMiNoOptimization(GetImplAttrs()));
}
#ifndef DACCESS_COMPILE
void MethodDesc::RecordAndBackpatchEntryPointSlot(
LoaderAllocator *slotLoaderAllocator, // the loader allocator from which the slot's memory is allocated
TADDR slot,
EntryPointSlots::SlotType slotType)
{
WRAPPER_NO_CONTRACT;
GCX_PREEMP();
LoaderAllocator *mdLoaderAllocator = GetLoaderAllocator();
MethodDescBackpatchInfoTracker::ConditionalLockHolderForGCCoop slotBackpatchLockHolder;
RecordAndBackpatchEntryPointSlot_Locked(
mdLoaderAllocator,
slotLoaderAllocator,
slot,
slotType,
GetEntryPointToBackpatch_Locked());
}
// This function tries to record a slot that would contain an entry point for the method, and backpatches the slot to contain
// method's current entry point. Once recorded, changes to the entry point due to tiering will cause the slot to be backpatched
// as necessary.
void MethodDesc::RecordAndBackpatchEntryPointSlot_Locked(
LoaderAllocator *mdLoaderAllocator,
LoaderAllocator *slotLoaderAllocator, // the loader allocator from which the slot's memory is allocated
TADDR slot,
EntryPointSlots::SlotType slotType,
PCODE currentEntryPoint)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(MethodDescBackpatchInfoTracker::IsLockOwnedByCurrentThread());
_ASSERTE(mdLoaderAllocator != nullptr);
_ASSERTE(mdLoaderAllocator == GetLoaderAllocator());
_ASSERTE(slotLoaderAllocator != nullptr);
_ASSERTE(slot != NULL);
_ASSERTE(slotType < EntryPointSlots::SlotType_Count);
_ASSERTE(MayHaveEntryPointSlotsToBackpatch());
// The specified current entry point must actually be *current* in the sense that it must have been retrieved inside the
// lock, such that a recorded slot is guaranteed to point to the entry point at the time at which it was recorded, in order
// to synchronize with backpatching in MethodDesc::BackpatchEntryPointSlots(). If a slot pointing to an older entry point
// were to be recorded due to concurrency issues, it would not get backpatched to point to the more recent, actually
// current, entry point until another entry point change, which may never happen.
_ASSERTE(currentEntryPoint == GetEntryPointToBackpatch_Locked());
MethodDescBackpatchInfoTracker *backpatchTracker = mdLoaderAllocator->GetMethodDescBackpatchInfoTracker();
backpatchTracker->AddSlotAndPatch_Locked(this, slotLoaderAllocator, slot, slotType, currentEntryPoint);
}
FORCEINLINE bool MethodDesc::TryBackpatchEntryPointSlots(
PCODE entryPoint,
bool isPrestubEntryPoint,
bool onlyFromPrestubEntryPoint)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(MayHaveEntryPointSlotsToBackpatch());
_ASSERTE(entryPoint != NULL);
_ASSERTE(isPrestubEntryPoint == (entryPoint == GetPrestubEntryPointToBackpatch()));
_ASSERTE(!isPrestubEntryPoint || !onlyFromPrestubEntryPoint);
_ASSERTE(MethodDescBackpatchInfoTracker::IsLockOwnedByCurrentThread());
LoaderAllocator *mdLoaderAllocator = GetLoaderAllocator();
MethodDescBackpatchInfoTracker *backpatchInfoTracker = mdLoaderAllocator->GetMethodDescBackpatchInfoTracker();
// Get the entry point to backpatch inside the lock to synchronize with backpatching in MethodDesc::DoBackpatch()
PCODE previousEntryPoint = GetEntryPointToBackpatch_Locked();
if (previousEntryPoint == entryPoint)
{
return true;
}
if (onlyFromPrestubEntryPoint && previousEntryPoint != GetPrestubEntryPointToBackpatch())
{
return false;
}
if (IsVersionableWithVtableSlotBackpatch())
{
// Backpatch the func ptr stub if it was created
FuncPtrStubs *funcPtrStubs = mdLoaderAllocator->GetFuncPtrStubsNoCreate();
if (funcPtrStubs != nullptr)
{
Precode *funcPtrPrecode = funcPtrStubs->Lookup(this);
if (funcPtrPrecode != nullptr)
{
if (isPrestubEntryPoint)
{
funcPtrPrecode->ResetTargetInterlocked();
}
else
{
funcPtrPrecode->SetTargetInterlocked(entryPoint, FALSE /* fOnlyRedirectFromPrestub */);
}
}
}
}
backpatchInfoTracker->Backpatch_Locked(this, entryPoint);
// Set the entry point to backpatch inside the lock to synchronize with backpatching in MethodDesc::DoBackpatch(), and set
// it last in case there are exceptions above, as setting the entry point indicates that all recorded slots have been
// backpatched
SetEntryPointToBackpatch_Locked(entryPoint);
return true;
}
void MethodDesc::TrySetInitialCodeEntryPointForVersionableMethod(
PCODE entryPoint,
bool mayHaveEntryPointSlotsToBackpatch)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(entryPoint != NULL);
_ASSERTE(IsVersionable());
_ASSERTE(mayHaveEntryPointSlotsToBackpatch == MayHaveEntryPointSlotsToBackpatch());
if (mayHaveEntryPointSlotsToBackpatch)
{
TryBackpatchEntryPointSlotsFromPrestub(entryPoint);
}
else
{
_ASSERTE(IsVersionableWithPrecode());
GetOrCreatePrecode()->SetTargetInterlocked(entryPoint, TRUE /* fOnlyRedirectFromPrestub */);
}
}
void MethodDesc::SetCodeEntryPoint(PCODE entryPoint)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(entryPoint != NULL);
if (MayHaveEntryPointSlotsToBackpatch())
{
BackpatchEntryPointSlots(entryPoint);
}
else if (IsVersionable())
{
_ASSERTE(IsVersionableWithPrecode());
GetOrCreatePrecode()->SetTargetInterlocked(entryPoint, FALSE /* fOnlyRedirectFromPrestub */);
// SetTargetInterlocked() would return false if it lost the race with another thread. That is fine, this thread
// can continue assuming it was successful, similarly to it successfully updating the target and another thread
// updating the target again shortly afterwards.
}
else if (HasPrecode())
{
GetPrecode()->SetTargetInterlocked(entryPoint);
}
else if (!HasStableEntryPoint())
{
SetStableEntryPointInterlocked(entryPoint);
}
}
void MethodDesc::ResetCodeEntryPoint()
{
WRAPPER_NO_CONTRACT;
_ASSERTE(IsVersionable());
if (MayHaveEntryPointSlotsToBackpatch())
{
BackpatchToResetEntryPointSlots();
return;
}
_ASSERTE(IsVersionableWithPrecode());
if (HasPrecode())
{
GetPrecode()->ResetTargetInterlocked();
}
}
void MethodDesc::ResetCodeEntryPointForEnC()
{
WRAPPER_NO_CONTRACT;
_ASSERTE(!IsVersionable());
_ASSERTE(!IsVersionableWithPrecode());
_ASSERTE(!MayHaveEntryPointSlotsToBackpatch());
if (HasPrecode())
{
GetPrecode()->ResetTargetInterlocked();
}
if (HasNativeCodeSlot())
{
*GetAddrOfNativeCodeSlot() = NULL;
}
}
//*******************************************************************************
BOOL MethodDesc::SetNativeCodeInterlocked(PCODE addr, PCODE pExpected /*=NULL*/)
{
CONTRACTL {
THROWS;
GC_NOTRIGGER;
} CONTRACTL_END;
_ASSERTE(!IsDefaultInterfaceMethod() || HasNativeCodeSlot());
if (HasNativeCodeSlot())
{
#ifdef TARGET_ARM
_ASSERTE(IsThumbCode(addr) || (addr==NULL));
addr &= ~THUMB_CODE;
if (pExpected != NULL)
{
_ASSERTE(IsThumbCode(pExpected));
pExpected &= ~THUMB_CODE;
}
#endif
PTR_PCODE pSlot = GetAddrOfNativeCodeSlot();
NativeCodeSlot expected;
expected = *pSlot;
return FastInterlockCompareExchangePointer(reinterpret_cast<TADDR*>(pSlot),
(TADDR&)addr, (TADDR&)expected) == (TADDR&)expected;
}
_ASSERTE(pExpected == NULL);
return SetStableEntryPointInterlocked(addr);
}
//*******************************************************************************
void MethodDesc::SetMethodEntryPoint(PCODE addr)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(addr != NULL);
// Similarly to GetMethodEntryPoint(), it is up to the caller to ensure that calls to this function are appropriately
// synchronized. Currently, the only caller synchronizes with the following lock.
_ASSERTE(MethodDescBackpatchInfoTracker::IsLockOwnedByCurrentThread());
*GetAddrOfSlot() = addr;
}
//*******************************************************************************
BOOL MethodDesc::SetStableEntryPointInterlocked(PCODE addr)
{
CONTRACTL {
THROWS;
GC_NOTRIGGER;
} CONTRACTL_END;
_ASSERTE(!HasPrecode());
_ASSERTE(!IsVersionable());
PCODE pExpected = GetTemporaryEntryPoint();
PTR_PCODE pSlot = GetAddrOfSlot();
BOOL fResult = FastInterlockCompareExchangePointer(pSlot, addr, pExpected) == pExpected;
InterlockedUpdateFlags2(enum_flag2_HasStableEntryPoint, TRUE);
return fResult;
}
BOOL NDirectMethodDesc::ComputeMarshalingRequired()
{
WRAPPER_NO_CONTRACT;
return NDirect::MarshalingRequired(this);
}
/**********************************************************************************/
// Forward declare the NDirectImportWorker function - See dllimport.cpp
EXTERN_C LPVOID STDCALL NDirectImportWorker(NDirectMethodDesc*);
void *NDirectMethodDesc::ResolveAndSetNDirectTarget(_In_ NDirectMethodDesc* pMD)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM(););
PRECONDITION(CheckPointer(pMD));
}
CONTRACTL_END
// This build conditional is here due to dllimport.cpp
// not being relevant during the crossgen build.
LPVOID targetMaybe = NDirectImportWorker(pMD);
_ASSERTE(targetMaybe != nullptr);
pMD->SetNDirectTarget(targetMaybe);
return targetMaybe;
}
BOOL NDirectMethodDesc::TryResolveNDirectTargetForNoGCTransition(_In_ MethodDesc* pMD, _Out_ void** ndirectTarget)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM(););
PRECONDITION(CheckPointer(pMD));
PRECONDITION(CheckPointer(ndirectTarget));
}
CONTRACTL_END
if (!pMD->ShouldSuppressGCTransition())
return FALSE;
_ASSERTE(pMD->IsNDirect());
*ndirectTarget = ResolveAndSetNDirectTarget((NDirectMethodDesc*)pMD);
return TRUE;
}
//*******************************************************************************
void NDirectMethodDesc::InterlockedSetNDirectFlags(WORD wFlags)
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
}
CONTRACTL_END
// Since InterlockedCompareExchange only works on ULONGs,
// we'll have to operate on the entire ULONG. Ugh.
WORD *pFlags = &ndirect.m_wFlags;
// Make sure that m_flags is aligned on a 4 byte boundry
_ASSERTE( ( ((size_t) pFlags) & (sizeof(ULONG)-1) ) == 0);
// Ensure we won't be reading or writing outside the bounds of the NDirectMethodDesc.
_ASSERTE((BYTE*)pFlags >= (BYTE*)this);
_ASSERTE((BYTE*)pFlags+sizeof(ULONG) <= (BYTE*)(this+1));
DWORD dwMask = 0;
// Set the flags in the mask
((WORD*)&dwMask)[0] |= wFlags;
// Now, slam all 32 bits atomically.
FastInterlockOr((DWORD*)pFlags, dwMask);
}
#ifdef TARGET_WINDOWS
FARPROC NDirectMethodDesc::FindEntryPointWithMangling(NATIVE_LIBRARY_HANDLE hMod, PTR_CUTF8 entryPointName)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END;
FARPROC pFunc = GetProcAddress(hMod, entryPointName);
#if defined(TARGET_X86)
if (pFunc)
{
return pFunc;
}
if (IsStdCall())
{
EnsureStackArgumentSize();
DWORD probedEntrypointNameLength = (DWORD)(strlen(entryPointName) + 1); // 1 for null terminator
int dstbufsize = (int)(sizeof(char) * (probedEntrypointNameLength + 10)); // 10 for stdcall mangling
LPSTR szProbedEntrypointName = ((LPSTR)_alloca(dstbufsize + 1));
szProbedEntrypointName[0] = '_';
strcpy_s(szProbedEntrypointName + 1, dstbufsize, entryPointName);
szProbedEntrypointName[probedEntrypointNameLength] = '\0'; // Add an extra '\0'.
UINT16 numParamBytesMangle = GetStackArgumentSize();
sprintf_s(szProbedEntrypointName + probedEntrypointNameLength, dstbufsize - probedEntrypointNameLength + 1, "@%lu", (ULONG)numParamBytesMangle);
pFunc = GetProcAddress(hMod, szProbedEntrypointName);
}
#endif
return pFunc;
}
FARPROC NDirectMethodDesc::FindEntryPointWithSuffix(NATIVE_LIBRARY_HANDLE hMod, PTR_CUTF8 entryPointName, char suffix)
{
// Allocate space for a copy of the entry point name.
DWORD entryPointWithSuffixLen = (DWORD)(strlen(entryPointName) + 1); // +1 for charset decorations
int dstbufsize = (int)(sizeof(char) * (entryPointWithSuffixLen + 1)); // +1 for the null terminator
LPSTR entryPointWithSuffix = ((LPSTR)_alloca(dstbufsize));
// Copy the name so we can mangle it.
strcpy_s(entryPointWithSuffix, dstbufsize, entryPointName);
entryPointWithSuffix[entryPointWithSuffixLen] = '\0'; // Null terminator
entryPointWithSuffix[entryPointWithSuffixLen - 1] = suffix; // Charset suffix
// Look for entry point with the suffix based on charset
return FindEntryPointWithMangling(hMod, entryPointWithSuffix);
}
#endif
//*******************************************************************************
LPVOID NDirectMethodDesc::FindEntryPoint(NATIVE_LIBRARY_HANDLE hMod)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END;
char const * funcName = GetEntrypointName();
#ifndef TARGET_WINDOWS
return reinterpret_cast<LPVOID>(PAL_GetProcAddressDirect(hMod, funcName));
#else
// Handle ordinals.
if (funcName[0] == '#')
{
long ordinal = atol(funcName + 1);
return reinterpret_cast<LPVOID>(GetProcAddress(hMod, (LPCSTR)(size_t)((UINT16)ordinal)));
}
FARPROC pFunc = NULL;
if (IsNativeNoMangled())
{
// Look for the user-provided entry point name only
pFunc = FindEntryPointWithMangling(hMod, funcName);
}
else if (IsNativeAnsi())
{
// For ANSI, look for the user-provided entry point name first.
// If that does not exist, try the charset suffix.
pFunc = FindEntryPointWithMangling(hMod, funcName);
if (pFunc == NULL)
pFunc = FindEntryPointWithSuffix(hMod, funcName, 'A');
}
else
{
// For Unicode, look for the entry point name with the charset suffix first.
// The 'W' API takes precedence over the undecorated one.
pFunc = FindEntryPointWithSuffix(hMod, funcName, 'W');
if (pFunc == NULL)
pFunc = FindEntryPointWithMangling(hMod, funcName);
}
return reinterpret_cast<LPVOID>(pFunc);
#endif
}
#if defined(TARGET_X86)
//*******************************************************************************
void NDirectMethodDesc::EnsureStackArgumentSize()
{
STANDARD_VM_CONTRACT;
if (ndirect.m_cbStackArgumentSize == 0xFFFF)
{
// Marshalling required check sets the stack size as side-effect when marshalling is not required.
if (MarshalingRequired())
{
// Generating interop stub sets the stack size as side-effect in all cases
GetStubForInteropMethod(this, NDIRECTSTUB_FL_FOR_NUMPARAMBYTES);
}
}
}
#endif
//*******************************************************************************
void NDirectMethodDesc::InitEarlyBoundNDirectTarget()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM(););
}
CONTRACTL_END
_ASSERTE(IsEarlyBound());
if (IsClassConstructorTriggeredAtLinkTime())
{
GetMethodTable()->CheckRunClassInitThrowing();
}
const void *target = GetModule()->GetInternalPInvokeTarget(GetRVA());
_ASSERTE(target != 0);
if (HeuristicDoesThisLookLikeAGetLastErrorCall((LPBYTE)target))
target = (BYTE*)FalseGetLastError;
// As long as we've set the NDirect target field we don't need to backpatch the import thunk glue.
// All NDirect calls all through the NDirect target, so if it's updated, then we won't go into
// NDirectImportThunk(). In fact, backpatching the import thunk glue leads to race conditions.
SetNDirectTarget((LPVOID)target);
}
//*******************************************************************************
BOOL MethodDesc::HasUnmanagedCallersOnlyAttribute()
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
FORBID_FAULT;
}
CONTRACTL_END;
if (IsILStub())
{
// Stubs generated for being called from native code are equivalent to
// managed methods marked with UnmanagedCallersOnly.
return AsDynamicMethodDesc()->GetILStubType() == DynamicMethodDesc::StubNativeToCLRInterop;
}
HRESULT hr = GetCustomAttribute(
WellKnownAttribute::UnmanagedCallersOnly,
nullptr,
nullptr);
if (hr != S_OK)
{
// See https://github.com/dotnet/runtime/issues/37622
hr = GetCustomAttribute(
WellKnownAttribute::NativeCallableInternal,
nullptr,
nullptr);
}
return (hr == S_OK) ? TRUE : FALSE;
}
//*******************************************************************************
BOOL MethodDesc::ShouldSuppressGCTransition()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM());
}
CONTRACTL_END;
MethodDesc* tgt = nullptr;
if (IsNDirect())
{
tgt = this;
}
else if (IsILStub())
{
// From the IL stub, determine if the actual target has been
// marked to suppress the GC transition.
PTR_DynamicMethodDesc ilStubMD = AsDynamicMethodDesc();
PTR_ILStubResolver ilStubResolver = ilStubMD->GetILStubResolver();
tgt = ilStubResolver->GetStubTargetMethodDesc();
// In the event we can't get or don't have a target, there is no way
// to determine if we should suppress the GC transition.
if (tgt == nullptr)
return FALSE;
}
else
{
return FALSE;
}
_ASSERTE(tgt != nullptr);
bool suppressGCTransition;
NDirect::GetCallingConvention_IgnoreErrors(tgt, NULL /*callConv*/, &suppressGCTransition);
return suppressGCTransition ? TRUE : FALSE;
}
#ifdef FEATURE_COMINTEROP
//*******************************************************************************
void ComPlusCallMethodDesc::InitComEventCallInfo()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM());
}
CONTRACTL_END
MethodTable *pItfMT = GetInterfaceMethodTable();
MethodDesc *pItfMD = this;
MethodTable *pSrcItfClass = NULL;
MethodTable *pEvProvClass = NULL;
// Retrieve the event provider class.
WORD cbExtraSlots = ComMethodTable::GetNumExtraSlots(pItfMT->GetComInterfaceType());
WORD itfSlotNum = (WORD) m_pComPlusCallInfo->m_cachedComSlot - cbExtraSlots;
pItfMT->GetEventInterfaceInfo(&pSrcItfClass, &pEvProvClass);
m_pComPlusCallInfo->m_pEventProviderMD = MemberLoader::FindMethodForInterfaceSlot(pEvProvClass, pItfMT, itfSlotNum);
// If we could not find the method, then the event provider does not support
// this event. This is a fatal error.
if (!m_pComPlusCallInfo->m_pEventProviderMD)
{
// Init the interface MD for error reporting.
pItfMD = (ComPlusCallMethodDesc*)pItfMT->GetMethodDescForSlot(itfSlotNum);
// Retrieve the event provider class name.
StackSString ssEvProvClassName;
pEvProvClass->_GetFullyQualifiedNameForClass(ssEvProvClassName);
// Retrieve the COM event interface class name.
StackSString ssEvItfName;
pItfMT->_GetFullyQualifiedNameForClass(ssEvItfName);
// Convert the method name to unicode.
StackSString ssMethodName(SString::Utf8, pItfMD->GetName());
// Throw the exception.
COMPlusThrow(kTypeLoadException, IDS_EE_METHOD_NOT_FOUND_ON_EV_PROV,
ssMethodName.GetUnicode(), ssEvItfName.GetUnicode(), ssEvProvClassName.GetUnicode());
}
}
#endif // FEATURE_COMINTEROP
#endif // !DACCESS_COMPILE
#ifdef DACCESS_COMPILE
//*******************************************************************************
void
MethodDesc::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
{
SUPPORTS_DAC;
if (DacHasMethodDescBeenEnumerated(this))
{
return;
}
// Save away the whole MethodDescChunk as in many
// places RecoverChunk is called on a method desc so
// the whole chunk must be available. This also
// automatically picks up any prestubs and such.
GetMethodDescChunk()->EnumMemoryRegions(flags);
if (HasPrecode())
{
GetPrecode()->EnumMemoryRegions(flags);
}
// Need to save the Debug-Info for this method so that we can see it in a debugger later.
DebugInfoManager::EnumMemoryRegionsForMethodDebugInfo(flags, this);
if (!IsNoMetadata() ||IsILStub())
{
// The assembling of the string below implicitly dumps the memory we need.
StackSString str;
TypeString::AppendMethodInternal(str, this, TypeString::FormatSignature|TypeString::FormatNamespace|TypeString::FormatFullInst);
#ifdef FEATURE_MINIMETADATA_IN_TRIAGEDUMPS
if (flags == CLRDATA_ENUM_MEM_MINI || flags == CLRDATA_ENUM_MEM_TRIAGE)
{
// we want to save just the method name, so truncate at the open paranthesis
SString::Iterator it = str.Begin();
if (str.Find(it, W('(')))
{
// ensure the symbol ends in "()" to minimize regressions
// in !analyze assuming the existence of the argument list
str.Truncate(++it);
str.Append(W(')'));
}
DacMdCacheAddEEName(dac_cast<TADDR>(this), str);
}
#endif // FEATURE_MINIMETADATA_IN_TRIAGEDUMPS
// The module path is used in the output of !clrstack and !pe if the
// module is not available when the minidump is inspected. By retrieving
// the path here, the required memory is implicitly dumped.
Module* pModule = GetModule();
if (pModule)
{
pModule->GetPath();
}
}
#ifdef FEATURE_CODE_VERSIONING
// Make sure the active IL and native code version are in triage dumps.
CodeVersionManager* pCodeVersionManager = GetCodeVersionManager();
ILCodeVersion ilVersion = pCodeVersionManager->GetActiveILCodeVersion(dac_cast<PTR_MethodDesc>(this));
if (!ilVersion.IsNull())
{
ilVersion.GetActiveNativeCodeVersion(dac_cast<PTR_MethodDesc>(this));
ilVersion.GetVersionId();
ilVersion.GetRejitState();
ilVersion.GetIL();
}
#endif
// Also, call DacValidateMD to dump the memory it needs. !clrstack calls
// DacValidateMD before it retrieves the method name. We don't expect
// DacValidateMD to fail, but if it does, ignore the failure and try to assemble the
// string anyway so that clients that don't validate the MD still work.
DacValidateMD(this);
DacSetMethodDescEnumerated(this);
}
//*******************************************************************************
void
StoredSigMethodDesc::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
{
SUPPORTS_DAC;
// 'this' already done, see below.
DacEnumMemoryRegion(GetSigRVA(), m_cSig);
}
//*******************************************************************************
void
MethodDescChunk::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
{
SUPPORTS_DAC;
DAC_CHECK_ENUM_THIS();
EMEM_OUT(("MEM: %p MethodDescChunk\n", dac_cast<TADDR>(this)));
DacEnumMemoryRegion(dac_cast<TADDR>(this), SizeOf());
PTR_MethodTable pMT = GetMethodTable();
if (pMT.IsValid())
{
pMT->EnumMemoryRegions(flags);
}
SIZE_T size;
#ifdef HAS_COMPACT_ENTRYPOINTS
if (HasCompactEntryPoints())
{
size = SizeOfCompactEntryPoints(GetCount());
}
else
#endif // HAS_COMPACT_ENTRYPOINTS
{
size = Precode::SizeOfTemporaryEntryPoints(GetTemporaryEntryPoints(), GetCount());
}
DacEnumMemoryRegion(GetTemporaryEntryPoints(), size);
MethodDesc * pMD = GetFirstMethodDesc();
MethodDesc * pOldMD = NULL;
while (pMD != NULL && pMD != pOldMD)
{
pOldMD = pMD;
EX_TRY
{
if (pMD->IsMethodImpl())
{
pMD->GetMethodImpl()->EnumMemoryRegions(flags);
}
}
EX_CATCH_RETHROW_ONLY_COR_E_OPERATIONCANCELLED
EX_TRY
{
if (pMD->HasStoredSig())
{
dac_cast<PTR_StoredSigMethodDesc>(pMD)->EnumMemoryRegions(flags);
}
// Check whether the next MethodDesc is within the bounds of the current chunks
TADDR pNext = dac_cast<TADDR>(pMD) + pMD->SizeOf();
TADDR pEnd = dac_cast<TADDR>(this) + this->SizeOf();
pMD = (pNext < pEnd) ? PTR_MethodDesc(pNext) : NULL;
}
EX_CATCH_RETHROW_ONLY_COR_E_OPERATIONCANCELLED
}
}
#endif // DACCESS_COMPILE
#ifndef DACCESS_COMPILE
//*******************************************************************************
MethodDesc *MethodDesc::GetInterfaceMD()
{
CONTRACT (MethodDesc*) {
THROWS;
GC_TRIGGERS;
INSTANCE_CHECK;
PRECONDITION(!IsInterface());
POSTCONDITION(CheckPointer(RETVAL, NULL_OK));
} CONTRACT_END;
MethodTable *pMT = GetMethodTable();
RETURN(pMT->ReverseInterfaceMDLookup(GetSlot()));
}
#endif // !DACCESS_COMPILE
PTR_LoaderAllocator MethodDesc::GetLoaderAllocator()
{
WRAPPER_NO_CONTRACT;
return GetLoaderModule()->GetLoaderAllocator();
}
#if !defined(DACCESS_COMPILE)
REFLECTMETHODREF MethodDesc::GetStubMethodInfo()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM());
MODE_COOPERATIVE;
}
CONTRACTL_END;
REFLECTMETHODREF retVal;
REFLECTMETHODREF methodRef = (REFLECTMETHODREF)AllocateObject(CoreLibBinder::GetClass(CLASS__STUBMETHODINFO));
GCPROTECT_BEGIN(methodRef);
methodRef->SetMethod(this);
LoaderAllocator *pLoaderAllocatorOfMethod = this->GetLoaderAllocator();
if (pLoaderAllocatorOfMethod->IsCollectible())
methodRef->SetKeepAlive(pLoaderAllocatorOfMethod->GetExposedObject());
retVal = methodRef;
GCPROTECT_END();
return retVal;
}
#endif // !DACCESS_COMPILE
#ifndef DACCESS_COMPILE
typedef void (*WalkValueTypeParameterFnPtr)(Module *pModule, mdToken token, Module *pDefModule, mdToken tkDefToken, SigPointer *ptr, SigTypeContext *pTypeContext, void *pData);
void MethodDesc::WalkValueTypeParameters(MethodTable *pMT, WalkValueTypeParameterFnPtr function, void *pData)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
}
CONTRACTL_END;
uint32_t numArgs = 0;
Module *pModule = this->GetModule();
SigPointer ptr = this->GetSigPointer();
// skip over calling convention.
uint32_t callConv = 0;
IfFailThrowBF(ptr.GetCallingConvInfo(&callConv), BFA_BAD_SIGNATURE, pModule);
// If calling convention is generic, skip GenParamCount
if (callConv & IMAGE_CEE_CS_CALLCONV_GENERIC)
{
IfFailThrowBF(ptr.GetData(NULL), BFA_BAD_SIGNATURE, pModule);
}
IfFailThrowBF(ptr.GetData(&numArgs), BFA_BAD_SIGNATURE, pModule);
SigTypeContext typeContext(this, TypeHandle(pMT));
// iterate over the return type and parameters
for (DWORD j = 0; j <= numArgs; j++)
{
CorElementType type = ptr.PeekElemTypeClosed(pModule, &typeContext);
if (type != ELEMENT_TYPE_VALUETYPE)
goto moveToNextToken;
mdToken token;
Module *pTokenModule;
token = ptr.PeekValueTypeTokenClosed(pModule, &typeContext, &pTokenModule);
if (token == mdTokenNil)
goto moveToNextToken;
DWORD dwAttrType;
Module *pDefModule;
mdToken defToken;
dwAttrType = 0;
if (ClassLoader::ResolveTokenToTypeDefThrowing(pTokenModule, token, &pDefModule, &defToken))
{
if (function != NULL)
function(pModule, token, pDefModule, defToken, &ptr, &typeContext, pData);
}
moveToNextToken:
// move to next argument token
IfFailThrowBF(ptr.SkipExactlyOne(), BFA_BAD_SIGNATURE, pModule);
}
if (!HaveValueTypeParametersBeenWalked())
{
SetValueTypeParametersWalked();
}
}
PrecodeType MethodDesc::GetPrecodeType()
{
LIMITED_METHOD_CONTRACT;
PrecodeType precodeType = PRECODE_INVALID;
#ifdef HAS_FIXUP_PRECODE
if (!RequiresMethodDescCallingConvention())
{
// Use the more efficient fixup precode if possible
precodeType = PRECODE_FIXUP;
}
else
#endif // HAS_FIXUP_PRECODE
{
precodeType = PRECODE_STUB;
}
return precodeType;
}
#endif // !DACCESS_COMPILE
#ifdef FEATURE_COMINTEROP
#ifndef DACCESS_COMPILE
void ComPlusCallMethodDesc::InitRetThunk()
{
WRAPPER_NO_CONTRACT;
#ifdef TARGET_X86
if (m_pComPlusCallInfo->m_pRetThunk != NULL)
return;
// Record the fact that we are writting into the ComPlusCallMethodDesc
g_IBCLogger.LogMethodDescAccess(this);
UINT numStackBytes = CbStackPop();
LPVOID pRetThunk = ComPlusCall::GetRetThunk(numStackBytes);
FastInterlockCompareExchangePointer<void *>(&m_pComPlusCallInfo->m_pRetThunk, pRetThunk, NULL);
#endif // TARGET_X86
}
#endif //!DACCESS_COMPILE
#endif // FEATURE_COMINTEROP
#ifndef DACCESS_COMPILE
void MethodDesc::PrepareForUseAsADependencyOfANativeImageWorker()
{
STANDARD_VM_CONTRACT;
// This function ensures that a method is ready for use as a dependency of a native image
// The current requirement is only that valuetypes can be resolved to their type defs as much
// as is possible. (If the method is actually called, then this will not throw, but there
// are cases where we call this method and we are unaware if this method will actually be called
// or accessed as a native image dependency. This explains the contract (STANDARD_VM_CONTRACT)
// - This method should be callable only when general purpose VM code can be called
// , as well as the TRY/CATCH.
// - This function should not introduce failures
EX_TRY
{
WalkValueTypeParameters(this->GetMethodTable(), NULL, NULL);
}
EX_CATCH
{
}
EX_END_CATCH(RethrowTerminalExceptions);
_ASSERTE(HaveValueTypeParametersBeenWalked());
}
static void CheckForEquivalenceAndLoadType(Module *pModule, mdToken token, Module *pDefModule, mdToken defToken, const SigParser *ptr, SigTypeContext *pTypeContext, void *pData)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
}
CONTRACTL_END;
BOOL *pHasEquivalentParam = (BOOL *)pData;
#ifdef FEATURE_TYPEEQUIVALENCE
*pHasEquivalentParam = IsTypeDefEquivalent(defToken, pDefModule);
#else
_ASSERTE(*pHasEquivalentParam == FALSE); // Assert this is always false.
#endif // FEATURE_TYPEEQUIVALENCE
SigPointer sigPtr(*ptr);
TypeHandle th = sigPtr.GetTypeHandleThrowing(pModule, pTypeContext);
_ASSERTE(!th.IsNull());
}
void MethodDesc::PrepareForUseAsAFunctionPointer()
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END;
// Since function pointers are unsafe and can enable type punning, all
// value type parameters must be loaded prior to providing a function pointer.
if (HaveValueTypeParametersBeenLoaded())
return;
BOOL fHasTypeEquivalentStructParameters = FALSE;
WalkValueTypeParameters(this->GetMethodTable(), CheckForEquivalenceAndLoadType, &fHasTypeEquivalentStructParameters);
#ifdef FEATURE_TYPEEQUIVALENCE
if (!fHasTypeEquivalentStructParameters)
SetDoesNotHaveEquivalentValuetypeParameters();
#endif // FEATURE_TYPEEQUIVALENCE
SetValueTypeParametersLoaded();
}
#endif //!DACCESS_COMPILE
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/Methodical/casts/ilseq/commonBase.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
internal class Base { };
internal class Sibling1 : Base { };
internal class Sibling2 : Base { };
internal static class Repro
{
private static int Bug(object o)
{
Base b = o as Sibling1;
if (b == null)
{
b = o as Sibling2;
}
// At this point b is either null, Sibling1, or Sibling2
if (b != null)
{
// But the bug makes us think it is only Sibling1 here (since we've eliminated null)
if (b is Sibling2)
{
Console.WriteLine("Pass");
return 100;
}
else
{
Console.WriteLine("b is {0}", b.GetType().ToString());
Console.WriteLine("b is Sibling1 = {0}", b is Sibling1);
Console.WriteLine("b is Sibling2 = {0}", b is Sibling2);
Console.WriteLine("Fail");
return 9;
}
}
Console.WriteLine("bad");
return 0;
}
private static int Main()
{
return Bug(new Sibling2());
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
internal class Base { };
internal class Sibling1 : Base { };
internal class Sibling2 : Base { };
internal static class Repro
{
private static int Bug(object o)
{
Base b = o as Sibling1;
if (b == null)
{
b = o as Sibling2;
}
// At this point b is either null, Sibling1, or Sibling2
if (b != null)
{
// But the bug makes us think it is only Sibling1 here (since we've eliminated null)
if (b is Sibling2)
{
Console.WriteLine("Pass");
return 100;
}
else
{
Console.WriteLine("b is {0}", b.GetType().ToString());
Console.WriteLine("b is Sibling1 = {0}", b is Sibling1);
Console.WriteLine("b is Sibling2 = {0}", b is Sibling2);
Console.WriteLine("Fail");
return 9;
}
}
Console.WriteLine("bad");
return 0;
}
private static int Main()
{
return Bug(new Sibling2());
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Private.DataContractSerialization/src/System/Runtime/Serialization/SchemaHelper.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Xml;
using System.Xml.Schema;
using System.Collections;
using System.Collections.Generic;
namespace System.Runtime.Serialization
{
internal static class SchemaHelper
{
internal static bool NamespacesEqual(string? ns1, string? ns2)
{
if (ns1 == null || ns1.Length == 0)
return (ns2 == null || ns2.Length == 0);
else
return ns1 == ns2;
}
internal static XmlSchemaType? GetSchemaType(XmlSchemaSet schemas, XmlQualifiedName typeQName, out XmlSchema? outSchema)
{
outSchema = null;
ICollection currentSchemas = schemas.Schemas();
string ns = typeQName.Namespace;
foreach (XmlSchema schema in currentSchemas)
{
if (NamespacesEqual(ns, schema.TargetNamespace))
{
outSchema = schema;
foreach (XmlSchemaObject schemaObj in schema.Items)
{
XmlSchemaType? schemaType = schemaObj as XmlSchemaType;
if (schemaType != null && schemaType.Name == typeQName.Name)
{
return schemaType;
}
}
}
}
return null;
}
internal static XmlSchemaElement? GetSchemaElement(XmlSchemaSet schemas, XmlQualifiedName elementQName, out XmlSchema? outSchema)
{
outSchema = null;
ICollection currentSchemas = schemas.Schemas();
string ns = elementQName.Namespace;
foreach (XmlSchema schema in currentSchemas)
{
if (NamespacesEqual(ns, schema.TargetNamespace))
{
outSchema = schema;
foreach (XmlSchemaObject schemaObj in schema.Items)
{
XmlSchemaElement? schemaElement = schemaObj as XmlSchemaElement;
if (schemaElement != null && schemaElement.Name == elementQName.Name)
{
return schemaElement;
}
}
}
}
return null;
}
internal static XmlSchema GetSchema(string ns, XmlSchemaSet schemas)
{
if (ns == null) { ns = string.Empty; }
ICollection currentSchemas = schemas.Schemas();
foreach (XmlSchema schema in currentSchemas)
{
if ((schema.TargetNamespace == null && ns.Length == 0) || ns.Equals(schema.TargetNamespace))
{
return schema;
}
}
return CreateSchema(ns, schemas);
}
private static XmlSchema CreateSchema(string ns, XmlSchemaSet schemas)
{
XmlSchema schema = new XmlSchema();
schema.ElementFormDefault = XmlSchemaForm.Qualified;
if (ns.Length > 0)
{
schema.TargetNamespace = ns;
schema.Namespaces.Add(Globals.TnsPrefix, ns);
}
schemas.Add(schema);
return schema;
}
internal static void AddElementForm(XmlSchemaElement element, XmlSchema schema)
{
if (schema.ElementFormDefault != XmlSchemaForm.Qualified)
{
element.Form = XmlSchemaForm.Qualified;
}
}
internal static void AddSchemaImport(string ns, XmlSchema schema)
{
if (SchemaHelper.NamespacesEqual(ns, schema.TargetNamespace) || SchemaHelper.NamespacesEqual(ns, Globals.SchemaNamespace) || SchemaHelper.NamespacesEqual(ns, Globals.SchemaInstanceNamespace))
return;
foreach (object item in schema.Includes)
{
if (item is XmlSchemaImport)
{
if (SchemaHelper.NamespacesEqual(ns, ((XmlSchemaImport)item).Namespace))
return;
}
}
XmlSchemaImport import = new XmlSchemaImport();
if (ns != null && ns.Length > 0)
import.Namespace = ns;
schema.Includes.Add(import);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Xml;
using System.Xml.Schema;
using System.Collections;
using System.Collections.Generic;
namespace System.Runtime.Serialization
{
internal static class SchemaHelper
{
internal static bool NamespacesEqual(string? ns1, string? ns2)
{
if (ns1 == null || ns1.Length == 0)
return (ns2 == null || ns2.Length == 0);
else
return ns1 == ns2;
}
internal static XmlSchemaType? GetSchemaType(XmlSchemaSet schemas, XmlQualifiedName typeQName, out XmlSchema? outSchema)
{
outSchema = null;
ICollection currentSchemas = schemas.Schemas();
string ns = typeQName.Namespace;
foreach (XmlSchema schema in currentSchemas)
{
if (NamespacesEqual(ns, schema.TargetNamespace))
{
outSchema = schema;
foreach (XmlSchemaObject schemaObj in schema.Items)
{
XmlSchemaType? schemaType = schemaObj as XmlSchemaType;
if (schemaType != null && schemaType.Name == typeQName.Name)
{
return schemaType;
}
}
}
}
return null;
}
internal static XmlSchemaElement? GetSchemaElement(XmlSchemaSet schemas, XmlQualifiedName elementQName, out XmlSchema? outSchema)
{
outSchema = null;
ICollection currentSchemas = schemas.Schemas();
string ns = elementQName.Namespace;
foreach (XmlSchema schema in currentSchemas)
{
if (NamespacesEqual(ns, schema.TargetNamespace))
{
outSchema = schema;
foreach (XmlSchemaObject schemaObj in schema.Items)
{
XmlSchemaElement? schemaElement = schemaObj as XmlSchemaElement;
if (schemaElement != null && schemaElement.Name == elementQName.Name)
{
return schemaElement;
}
}
}
}
return null;
}
internal static XmlSchema GetSchema(string ns, XmlSchemaSet schemas)
{
if (ns == null) { ns = string.Empty; }
ICollection currentSchemas = schemas.Schemas();
foreach (XmlSchema schema in currentSchemas)
{
if ((schema.TargetNamespace == null && ns.Length == 0) || ns.Equals(schema.TargetNamespace))
{
return schema;
}
}
return CreateSchema(ns, schemas);
}
private static XmlSchema CreateSchema(string ns, XmlSchemaSet schemas)
{
XmlSchema schema = new XmlSchema();
schema.ElementFormDefault = XmlSchemaForm.Qualified;
if (ns.Length > 0)
{
schema.TargetNamespace = ns;
schema.Namespaces.Add(Globals.TnsPrefix, ns);
}
schemas.Add(schema);
return schema;
}
internal static void AddElementForm(XmlSchemaElement element, XmlSchema schema)
{
if (schema.ElementFormDefault != XmlSchemaForm.Qualified)
{
element.Form = XmlSchemaForm.Qualified;
}
}
internal static void AddSchemaImport(string ns, XmlSchema schema)
{
if (SchemaHelper.NamespacesEqual(ns, schema.TargetNamespace) || SchemaHelper.NamespacesEqual(ns, Globals.SchemaNamespace) || SchemaHelper.NamespacesEqual(ns, Globals.SchemaInstanceNamespace))
return;
foreach (object item in schema.Includes)
{
if (item is XmlSchemaImport)
{
if (SchemaHelper.NamespacesEqual(ns, ((XmlSchemaImport)item).Namespace))
return;
}
}
XmlSchemaImport import = new XmlSchemaImport();
if (ns != null && ns.Length > 0)
import.Namespace = ns;
schema.Includes.Add(import);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/GC/LargeMemory/Allocation/finalizertest.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Runtime.CompilerServices;
// a large object that resurrects itself
public sealed class LargeObject2 {
private byte[][] data;
public const long MB = 1024*1024;
public LargeObject2(uint sizeInMB)
{
data = new byte[sizeInMB][];
for (int i=0; i<sizeInMB; i++) {
data[i] = new byte[MB];
}
}
~LargeObject2() {
FinalizerTest.LO2 = this;
}
}
// allocates a large object on the finalizer thread
public sealed class FinalizerObject {
uint size = 0;
public FinalizerObject(uint sizeInMB)
{
size = sizeInMB;
}
~FinalizerObject() {
LargeObject lo =null;
try {
lo = new LargeObject(size);
} catch (OutOfMemoryException) {
Console.WriteLine("OOM");
return;
} catch (Exception e) {
Console.WriteLine("Unexpected Exception");
Console.WriteLine(e.ToString());
return;
}
if (lo!=null)
FinalizerTest.ObjectSize = lo.Size;
GC.KeepAlive(lo);
}
}
public sealed class FinalizerTest {
public static LargeObject2 LO2 = null;
public static long ObjectSize = 0;
public LargeObject2 TempObject;
private uint size = 0;
private int numTests = 0;
public FinalizerTest(uint size) {
this.size = size;
}
[MethodImplAttribute(MethodImplOptions.NoInlining)]
public void CreateLargeObject() {
TempObject = new LargeObject2(size);
}
[MethodImplAttribute(MethodImplOptions.NoInlining)]
public void DestroyLargeObject() {
TempObject = null;
}
bool ressurectionTest() {
numTests++;
try {
CreateLargeObject();
DestroyLargeObject();
} catch (OutOfMemoryException) {
Console.WriteLine("Large Memory Machine required");
return false;
} catch (Exception e) {
Console.WriteLine("Unexpected Exception");
Console.WriteLine(e.ToString());
return false;
}
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
if (LO2 != null) {
Console.WriteLine("ressurectionTest passed");
LO2 = null;
return true;
}
Console.WriteLine("ressurectionTest failed");
return false;
}
bool allocateInFinalizerTest() {
numTests++;
try {
new FinalizerObject(size);
} catch (OutOfMemoryException) {
Console.WriteLine("Large Memory Machine required");
return false;
} catch (Exception e) {
Console.WriteLine("Unexpected Exception");
Console.WriteLine(e.ToString());
return false;
}
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
if (ObjectSize == size*LargeObject.MB) {
Console.WriteLine("allocateInFinalizerTest passed");
return true;
}
Console.WriteLine("{0} {1}", ObjectSize, size*LargeObject.MB);
Console.WriteLine("allocateInFinalizerTest failed");
return false;
}
public bool RunTests() {
int numPassed = 0;
if (allocateInFinalizerTest() ) {
numPassed++;
}
if (ressurectionTest() ) {
numPassed++;
}
return (numTests==numPassed);
}
public static int Main(string[] args) {
FinalizerTest test = new FinalizerTest(MemCheck.ParseSizeMBAndLimitByAvailableMem(args));
if (test.RunTests()) {
Console.WriteLine("Test passed");
return 100;
}
Console.WriteLine("Test failed");
return 0;
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Runtime.CompilerServices;
// a large object that resurrects itself
public sealed class LargeObject2 {
private byte[][] data;
public const long MB = 1024*1024;
public LargeObject2(uint sizeInMB)
{
data = new byte[sizeInMB][];
for (int i=0; i<sizeInMB; i++) {
data[i] = new byte[MB];
}
}
~LargeObject2() {
FinalizerTest.LO2 = this;
}
}
// allocates a large object on the finalizer thread
public sealed class FinalizerObject {
uint size = 0;
public FinalizerObject(uint sizeInMB)
{
size = sizeInMB;
}
~FinalizerObject() {
LargeObject lo =null;
try {
lo = new LargeObject(size);
} catch (OutOfMemoryException) {
Console.WriteLine("OOM");
return;
} catch (Exception e) {
Console.WriteLine("Unexpected Exception");
Console.WriteLine(e.ToString());
return;
}
if (lo!=null)
FinalizerTest.ObjectSize = lo.Size;
GC.KeepAlive(lo);
}
}
public sealed class FinalizerTest {
public static LargeObject2 LO2 = null;
public static long ObjectSize = 0;
public LargeObject2 TempObject;
private uint size = 0;
private int numTests = 0;
public FinalizerTest(uint size) {
this.size = size;
}
[MethodImplAttribute(MethodImplOptions.NoInlining)]
public void CreateLargeObject() {
TempObject = new LargeObject2(size);
}
[MethodImplAttribute(MethodImplOptions.NoInlining)]
public void DestroyLargeObject() {
TempObject = null;
}
bool ressurectionTest() {
numTests++;
try {
CreateLargeObject();
DestroyLargeObject();
} catch (OutOfMemoryException) {
Console.WriteLine("Large Memory Machine required");
return false;
} catch (Exception e) {
Console.WriteLine("Unexpected Exception");
Console.WriteLine(e.ToString());
return false;
}
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
if (LO2 != null) {
Console.WriteLine("ressurectionTest passed");
LO2 = null;
return true;
}
Console.WriteLine("ressurectionTest failed");
return false;
}
bool allocateInFinalizerTest() {
numTests++;
try {
new FinalizerObject(size);
} catch (OutOfMemoryException) {
Console.WriteLine("Large Memory Machine required");
return false;
} catch (Exception e) {
Console.WriteLine("Unexpected Exception");
Console.WriteLine(e.ToString());
return false;
}
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
if (ObjectSize == size*LargeObject.MB) {
Console.WriteLine("allocateInFinalizerTest passed");
return true;
}
Console.WriteLine("{0} {1}", ObjectSize, size*LargeObject.MB);
Console.WriteLine("allocateInFinalizerTest failed");
return false;
}
public bool RunTests() {
int numPassed = 0;
if (allocateInFinalizerTest() ) {
numPassed++;
}
if (ressurectionTest() ) {
numPassed++;
}
return (numTests==numPassed);
}
public static int Main(string[] args) {
FinalizerTest test = new FinalizerTest(MemCheck.ParseSizeMBAndLimitByAvailableMem(args));
if (test.RunTests()) {
Console.WriteLine("Test passed");
return 100;
}
Console.WriteLine("Test failed");
return 0;
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/profiler/unittest/releaseondetach.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.IO;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Threading;
namespace Profiler.Tests
{
class ReleaseOnShutdown
{
private static readonly Guid ReleaseOnShutdownGuid = new Guid("B8C47A29-9C1D-4EEA-ABA0-8E8B3E3B792E");
[DllImport("Profiler")]
private static extern void PassCallbackToProfiler(ProfilerCallback callback);
public unsafe static int RunTest(string[] args)
{
string profilerName;
if (TestLibrary.Utilities.IsWindows)
{
profilerName = "Profiler.dll";
}
else if (TestLibrary.Utilities.IsLinux)
{
profilerName = "libProfiler.so";
}
else
{
profilerName = "libProfiler.dylib";
}
string rootPath = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location);
string profilerPath = Path.Combine(rootPath, profilerName);
ManualResetEvent _profilerDone = new ManualResetEvent(false);
Console.WriteLine($"Attaching profiler {profilerPath} to self.");
ProfilerControlHelpers.AttachProfilerToSelf(ReleaseOnShutdownGuid, profilerPath);
PassCallbackToProfiler(() => _profilerDone.Set());
if (!_profilerDone.WaitOne(TimeSpan.FromMinutes(5)))
{
Console.WriteLine("Profiler did not set the callback, test will fail.");
}
return 100;
}
public static int Main(string[] args)
{
if (args.Length > 0 && args[0].Equals("RunTest", StringComparison.OrdinalIgnoreCase))
{
return RunTest(args);
}
return ProfilerTestRunner.Run(profileePath: System.Reflection.Assembly.GetExecutingAssembly().Location,
testName: "UnitTestReleaseOnShutdown",
profilerClsid: ReleaseOnShutdownGuid,
profileeOptions: ProfileeOptions.NoStartupAttach);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.IO;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Threading;
namespace Profiler.Tests
{
class ReleaseOnShutdown
{
private static readonly Guid ReleaseOnShutdownGuid = new Guid("B8C47A29-9C1D-4EEA-ABA0-8E8B3E3B792E");
[DllImport("Profiler")]
private static extern void PassCallbackToProfiler(ProfilerCallback callback);
public unsafe static int RunTest(string[] args)
{
string profilerName;
if (TestLibrary.Utilities.IsWindows)
{
profilerName = "Profiler.dll";
}
else if (TestLibrary.Utilities.IsLinux)
{
profilerName = "libProfiler.so";
}
else
{
profilerName = "libProfiler.dylib";
}
string rootPath = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location);
string profilerPath = Path.Combine(rootPath, profilerName);
ManualResetEvent _profilerDone = new ManualResetEvent(false);
Console.WriteLine($"Attaching profiler {profilerPath} to self.");
ProfilerControlHelpers.AttachProfilerToSelf(ReleaseOnShutdownGuid, profilerPath);
PassCallbackToProfiler(() => _profilerDone.Set());
if (!_profilerDone.WaitOne(TimeSpan.FromMinutes(5)))
{
Console.WriteLine("Profiler did not set the callback, test will fail.");
}
return 100;
}
public static int Main(string[] args)
{
if (args.Length > 0 && args[0].Equals("RunTest", StringComparison.OrdinalIgnoreCase))
{
return RunTest(args);
}
return ProfilerTestRunner.Run(profileePath: System.Reflection.Assembly.GetExecutingAssembly().Location,
testName: "UnitTestReleaseOnShutdown",
profilerClsid: ReleaseOnShutdownGuid,
profileeOptions: ProfileeOptions.NoStartupAttach);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Linq.Expressions/tests/Block/SharedBlockTests.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
namespace System.Linq.Expressions.Tests
{
public abstract class SharedBlockTests
{
private static IEnumerable<object> ObjectAssignableConstantValues()
{
yield return new object();
yield return "Hello";
yield return new Uri("http://example.net/");
}
private static IEnumerable<object> ConstantValues()
{
yield return 42;
yield return 42L;
yield return DateTime.MinValue;
foreach (object obj in ObjectAssignableConstantValues())
yield return obj;
}
public static IEnumerable<object[]> ConstantValueData()
{
return ConstantValues().Select(i => new object[] { i });
}
public static IEnumerable<object[]> ConstantValuesAndSizes()
{
return
from size in Enumerable.Range(1, 6)
from value in ConstantValues()
select new object[] { value, size };
}
public static IEnumerable<object[]> ObjectAssignableConstantValuesAndSizes()
{
return
from size in Enumerable.Range(1, 6)
from value in ObjectAssignableConstantValues()
select new object[] { value, size };
}
public static IEnumerable<object[]> BlockSizes()
{
return Enumerable.Range(1, 6).Select(i => new object[] { i });
}
protected static IEnumerable<Expression> PadBlock(int padCount, Expression tailExpression)
{
while (padCount-- != 0) yield return Expression.Empty();
yield return tailExpression;
}
protected class TestVistor : ExpressionVisitor
{
protected override Expression VisitDefault(DefaultExpression node)
{
return Expression.Default(node.Type);
}
protected override Expression VisitConstant(ConstantExpression node)
{
return Expression.Constant(node.Value, node.Type);
}
protected override Expression VisitParameter(ParameterExpression node)
{
return Expression.Parameter(node.Type.IsByRef ? node.Type.MakeByRefType() : node.Type, node.Name);
}
}
private static class Unreadable<T>
{
public static T WriteOnly
{
set { }
}
}
protected static Expression UnreadableExpression
{
get
{
return Expression.Property(null, typeof(Unreadable<int>), "WriteOnly");
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
namespace System.Linq.Expressions.Tests
{
public abstract class SharedBlockTests
{
private static IEnumerable<object> ObjectAssignableConstantValues()
{
yield return new object();
yield return "Hello";
yield return new Uri("http://example.net/");
}
private static IEnumerable<object> ConstantValues()
{
yield return 42;
yield return 42L;
yield return DateTime.MinValue;
foreach (object obj in ObjectAssignableConstantValues())
yield return obj;
}
public static IEnumerable<object[]> ConstantValueData()
{
return ConstantValues().Select(i => new object[] { i });
}
public static IEnumerable<object[]> ConstantValuesAndSizes()
{
return
from size in Enumerable.Range(1, 6)
from value in ConstantValues()
select new object[] { value, size };
}
public static IEnumerable<object[]> ObjectAssignableConstantValuesAndSizes()
{
return
from size in Enumerable.Range(1, 6)
from value in ObjectAssignableConstantValues()
select new object[] { value, size };
}
public static IEnumerable<object[]> BlockSizes()
{
return Enumerable.Range(1, 6).Select(i => new object[] { i });
}
protected static IEnumerable<Expression> PadBlock(int padCount, Expression tailExpression)
{
while (padCount-- != 0) yield return Expression.Empty();
yield return tailExpression;
}
protected class TestVistor : ExpressionVisitor
{
protected override Expression VisitDefault(DefaultExpression node)
{
return Expression.Default(node.Type);
}
protected override Expression VisitConstant(ConstantExpression node)
{
return Expression.Constant(node.Value, node.Type);
}
protected override Expression VisitParameter(ParameterExpression node)
{
return Expression.Parameter(node.Type.IsByRef ? node.Type.MakeByRefType() : node.Type, node.Name);
}
}
private static class Unreadable<T>
{
public static T WriteOnly
{
set { }
}
}
protected static Expression UnreadableExpression
{
get
{
return Expression.Property(null, typeof(Unreadable<int>), "WriteOnly");
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/X86/Avx1/Permute.Single.1.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void PermuteSingle1()
{
var test = new ImmUnaryOpTest__PermuteSingle1();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class ImmUnaryOpTest__PermuteSingle1
{
private struct TestStruct
{
public Vector256<Single> _fld;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref testStruct._fld), ref Unsafe.As<Single, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
return testStruct;
}
public void RunStructFldScenario(ImmUnaryOpTest__PermuteSingle1 testClass)
{
var result = Avx.Permute(_fld, 1);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 32;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<Single>>() / sizeof(Single);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<Single>>() / sizeof(Single);
private static Single[] _data = new Single[Op1ElementCount];
private static Vector256<Single> _clsVar;
private Vector256<Single> _fld;
private SimpleUnaryOpTest__DataTable<Single, Single> _dataTable;
static ImmUnaryOpTest__PermuteSingle1()
{
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _clsVar), ref Unsafe.As<Single, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
}
public ImmUnaryOpTest__PermuteSingle1()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _fld), ref Unsafe.As<Single, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetSingle(); }
_dataTable = new SimpleUnaryOpTest__DataTable<Single, Single>(_data, new Single[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Avx.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Avx.Permute(
Unsafe.Read<Vector256<Single>>(_dataTable.inArrayPtr),
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Avx.Permute(
Avx.LoadVector256((Single*)(_dataTable.inArrayPtr)),
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Avx.Permute(
Avx.LoadAlignedVector256((Single*)(_dataTable.inArrayPtr)),
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Avx).GetMethod(nameof(Avx.Permute), new Type[] { typeof(Vector256<Single>), typeof(byte) })
.Invoke(null, new object[] {
Unsafe.Read<Vector256<Single>>(_dataTable.inArrayPtr),
(byte)1
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Avx).GetMethod(nameof(Avx.Permute), new Type[] { typeof(Vector256<Single>), typeof(byte) })
.Invoke(null, new object[] {
Avx.LoadVector256((Single*)(_dataTable.inArrayPtr)),
(byte)1
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Avx).GetMethod(nameof(Avx.Permute), new Type[] { typeof(Vector256<Single>), typeof(byte) })
.Invoke(null, new object[] {
Avx.LoadAlignedVector256((Single*)(_dataTable.inArrayPtr)),
(byte)1
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Avx.Permute(
_clsVar,
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var firstOp = Unsafe.Read<Vector256<Single>>(_dataTable.inArrayPtr);
var result = Avx.Permute(firstOp, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var firstOp = Avx.LoadVector256((Single*)(_dataTable.inArrayPtr));
var result = Avx.Permute(firstOp, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var firstOp = Avx.LoadAlignedVector256((Single*)(_dataTable.inArrayPtr));
var result = Avx.Permute(firstOp, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new ImmUnaryOpTest__PermuteSingle1();
var result = Avx.Permute(test._fld, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Avx.Permute(_fld, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Avx.Permute(test._fld, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector256<Single> firstOp, void* result, [CallerMemberName] string method = "")
{
Single[] inArray = new Single[Op1ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray[0]), firstOp);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<Single>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(void* firstOp, void* result, [CallerMemberName] string method = "")
{
Single[] inArray = new Single[Op1ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(firstOp), (uint)Unsafe.SizeOf<Vector256<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<Single>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(Single[] firstOp, Single[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (BitConverter.SingleToInt32Bits(result[0]) != BitConverter.SingleToInt32Bits(firstOp[1]))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (BitConverter.SingleToInt32Bits(result[4]) != BitConverter.SingleToInt32Bits(firstOp[5]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Avx)}.{nameof(Avx.Permute)}<Single>(Vector256<Single><9>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void PermuteSingle1()
{
var test = new ImmUnaryOpTest__PermuteSingle1();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class ImmUnaryOpTest__PermuteSingle1
{
private struct TestStruct
{
public Vector256<Single> _fld;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref testStruct._fld), ref Unsafe.As<Single, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
return testStruct;
}
public void RunStructFldScenario(ImmUnaryOpTest__PermuteSingle1 testClass)
{
var result = Avx.Permute(_fld, 1);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 32;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<Single>>() / sizeof(Single);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<Single>>() / sizeof(Single);
private static Single[] _data = new Single[Op1ElementCount];
private static Vector256<Single> _clsVar;
private Vector256<Single> _fld;
private SimpleUnaryOpTest__DataTable<Single, Single> _dataTable;
static ImmUnaryOpTest__PermuteSingle1()
{
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _clsVar), ref Unsafe.As<Single, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
}
public ImmUnaryOpTest__PermuteSingle1()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _fld), ref Unsafe.As<Single, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetSingle(); }
_dataTable = new SimpleUnaryOpTest__DataTable<Single, Single>(_data, new Single[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Avx.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Avx.Permute(
Unsafe.Read<Vector256<Single>>(_dataTable.inArrayPtr),
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Avx.Permute(
Avx.LoadVector256((Single*)(_dataTable.inArrayPtr)),
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Avx.Permute(
Avx.LoadAlignedVector256((Single*)(_dataTable.inArrayPtr)),
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Avx).GetMethod(nameof(Avx.Permute), new Type[] { typeof(Vector256<Single>), typeof(byte) })
.Invoke(null, new object[] {
Unsafe.Read<Vector256<Single>>(_dataTable.inArrayPtr),
(byte)1
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Avx).GetMethod(nameof(Avx.Permute), new Type[] { typeof(Vector256<Single>), typeof(byte) })
.Invoke(null, new object[] {
Avx.LoadVector256((Single*)(_dataTable.inArrayPtr)),
(byte)1
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Avx).GetMethod(nameof(Avx.Permute), new Type[] { typeof(Vector256<Single>), typeof(byte) })
.Invoke(null, new object[] {
Avx.LoadAlignedVector256((Single*)(_dataTable.inArrayPtr)),
(byte)1
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Avx.Permute(
_clsVar,
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var firstOp = Unsafe.Read<Vector256<Single>>(_dataTable.inArrayPtr);
var result = Avx.Permute(firstOp, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var firstOp = Avx.LoadVector256((Single*)(_dataTable.inArrayPtr));
var result = Avx.Permute(firstOp, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var firstOp = Avx.LoadAlignedVector256((Single*)(_dataTable.inArrayPtr));
var result = Avx.Permute(firstOp, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new ImmUnaryOpTest__PermuteSingle1();
var result = Avx.Permute(test._fld, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Avx.Permute(_fld, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Avx.Permute(test._fld, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector256<Single> firstOp, void* result, [CallerMemberName] string method = "")
{
Single[] inArray = new Single[Op1ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray[0]), firstOp);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<Single>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(void* firstOp, void* result, [CallerMemberName] string method = "")
{
Single[] inArray = new Single[Op1ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(firstOp), (uint)Unsafe.SizeOf<Vector256<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<Single>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(Single[] firstOp, Single[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (BitConverter.SingleToInt32Bits(result[0]) != BitConverter.SingleToInt32Bits(firstOp[1]))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (BitConverter.SingleToInt32Bits(result[4]) != BitConverter.SingleToInt32Bits(firstOp[5]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Avx)}.{nameof(Avx.Permute)}<Single>(Vector256<Single><9>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Private.CoreLib/src/System/Runtime/CompilerServices/AsyncTaskMethodBuilderT.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
namespace System.Runtime.CompilerServices
{
/// <summary>
/// Provides a builder for asynchronous methods that return <see cref="System.Threading.Tasks.Task{TResult}"/>.
/// This type is intended for compiler use only.
/// </summary>
/// <remarks>
/// AsyncTaskMethodBuilder{TResult} is a value type, and thus it is copied by value.
/// Prior to being copied, one of its Task, SetResult, or SetException members must be accessed,
/// or else the copies may end up building distinct Task instances.
/// </remarks>
public struct AsyncTaskMethodBuilder<TResult>
{
/// <summary>The lazily-initialized built task.</summary>
private Task<TResult>? m_task; // Debugger depends on the exact name of this field.
/// <summary>Initializes a new <see cref="AsyncTaskMethodBuilder"/>.</summary>
/// <returns>The initialized <see cref="AsyncTaskMethodBuilder"/>.</returns>
public static AsyncTaskMethodBuilder<TResult> Create() => default;
/// <summary>Initiates the builder's execution with the associated state machine.</summary>
/// <typeparam name="TStateMachine">Specifies the type of the state machine.</typeparam>
/// <param name="stateMachine">The state machine instance, passed by reference.</param>
[DebuggerStepThrough]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Start<TStateMachine>(ref TStateMachine stateMachine) where TStateMachine : IAsyncStateMachine =>
AsyncMethodBuilderCore.Start(ref stateMachine);
/// <summary>Associates the builder with the state machine it represents.</summary>
/// <param name="stateMachine">The heap-allocated state machine object.</param>
/// <exception cref="System.ArgumentNullException">The <paramref name="stateMachine"/> argument was null (Nothing in Visual Basic).</exception>
/// <exception cref="System.InvalidOperationException">The builder is incorrectly initialized.</exception>
public void SetStateMachine(IAsyncStateMachine stateMachine) =>
AsyncMethodBuilderCore.SetStateMachine(stateMachine, m_task);
/// <summary>
/// Schedules the specified state machine to be pushed forward when the specified awaiter completes.
/// </summary>
/// <typeparam name="TAwaiter">Specifies the type of the awaiter.</typeparam>
/// <typeparam name="TStateMachine">Specifies the type of the state machine.</typeparam>
/// <param name="awaiter">The awaiter.</param>
/// <param name="stateMachine">The state machine.</param>
public void AwaitOnCompleted<TAwaiter, TStateMachine>(
ref TAwaiter awaiter, ref TStateMachine stateMachine)
where TAwaiter : INotifyCompletion
where TStateMachine : IAsyncStateMachine =>
AwaitOnCompleted(ref awaiter, ref stateMachine, ref m_task);
internal static void AwaitOnCompleted<TAwaiter, TStateMachine>(
ref TAwaiter awaiter, ref TStateMachine stateMachine, ref Task<TResult>? taskField)
where TAwaiter : INotifyCompletion
where TStateMachine : IAsyncStateMachine
{
try
{
awaiter.OnCompleted(GetStateMachineBox(ref stateMachine, ref taskField).MoveNextAction);
}
catch (Exception e)
{
System.Threading.Tasks.Task.ThrowAsync(e, targetContext: null);
}
}
/// <summary>
/// Schedules the specified state machine to be pushed forward when the specified awaiter completes.
/// </summary>
/// <typeparam name="TAwaiter">Specifies the type of the awaiter.</typeparam>
/// <typeparam name="TStateMachine">Specifies the type of the state machine.</typeparam>
/// <param name="awaiter">The awaiter.</param>
/// <param name="stateMachine">The state machine.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void AwaitUnsafeOnCompleted<TAwaiter, TStateMachine>(
ref TAwaiter awaiter, ref TStateMachine stateMachine)
where TAwaiter : ICriticalNotifyCompletion
where TStateMachine : IAsyncStateMachine =>
AwaitUnsafeOnCompleted(ref awaiter, ref stateMachine, ref m_task);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void AwaitUnsafeOnCompleted<TAwaiter, TStateMachine>(
ref TAwaiter awaiter, ref TStateMachine stateMachine, [NotNull] ref Task<TResult>? taskField)
where TAwaiter : ICriticalNotifyCompletion
where TStateMachine : IAsyncStateMachine
{
IAsyncStateMachineBox box = GetStateMachineBox(ref stateMachine, ref taskField);
AwaitUnsafeOnCompleted(ref awaiter, box);
}
[MethodImpl(MethodImplOptions.AggressiveOptimization)] // workaround boxing allocations in Tier0: https://github.com/dotnet/runtime/issues/9120
internal static void AwaitUnsafeOnCompleted<TAwaiter>(
ref TAwaiter awaiter, IAsyncStateMachineBox box)
where TAwaiter : ICriticalNotifyCompletion
{
// The null tests here ensure that the jit can optimize away the interface
// tests when TAwaiter is a ref type.
if ((null != (object?)default(TAwaiter)) && (awaiter is ITaskAwaiter))
{
ref TaskAwaiter ta = ref Unsafe.As<TAwaiter, TaskAwaiter>(ref awaiter); // relies on TaskAwaiter/TaskAwaiter<T> having the same layout
TaskAwaiter.UnsafeOnCompletedInternal(ta.m_task, box, continueOnCapturedContext: true);
}
else if ((null != (object?)default(TAwaiter)) && (awaiter is IConfiguredTaskAwaiter))
{
ref ConfiguredTaskAwaitable.ConfiguredTaskAwaiter ta = ref Unsafe.As<TAwaiter, ConfiguredTaskAwaitable.ConfiguredTaskAwaiter>(ref awaiter);
TaskAwaiter.UnsafeOnCompletedInternal(ta.m_task, box, ta.m_continueOnCapturedContext);
}
else if ((null != (object?)default(TAwaiter)) && (awaiter is IStateMachineBoxAwareAwaiter))
{
try
{
((IStateMachineBoxAwareAwaiter)awaiter).AwaitUnsafeOnCompleted(box);
}
catch (Exception e)
{
// Whereas with Task the code that hooks up and invokes the continuation is all local to corelib,
// with ValueTaskAwaiter we may be calling out to an arbitrary implementation of IValueTaskSource
// wrapped in the ValueTask, and as such we protect against errant exceptions that may emerge.
// We don't want such exceptions propagating back into the async method, which can't handle
// exceptions well at that location in the state machine, especially if the exception may occur
// after the ValueTaskAwaiter already successfully hooked up the callback, in which case it's possible
// two different flows of execution could end up happening in the same async method call.
System.Threading.Tasks.Task.ThrowAsync(e, targetContext: null);
}
}
else
{
// The awaiter isn't specially known. Fall back to doing a normal await.
try
{
awaiter.UnsafeOnCompleted(box.MoveNextAction);
}
catch (Exception e)
{
System.Threading.Tasks.Task.ThrowAsync(e, targetContext: null);
}
}
}
/// <summary>Gets the "boxed" state machine object.</summary>
/// <typeparam name="TStateMachine">Specifies the type of the async state machine.</typeparam>
/// <param name="stateMachine">The state machine.</param>
/// <param name="taskField">The reference to the Task field storing the Task instance.</param>
/// <returns>The "boxed" state machine.</returns>
private static IAsyncStateMachineBox GetStateMachineBox<TStateMachine>(
ref TStateMachine stateMachine,
[NotNull] ref Task<TResult>? taskField)
where TStateMachine : IAsyncStateMachine
{
ExecutionContext? currentContext = ExecutionContext.Capture();
// Check first for the most common case: not the first yield in an async method.
// In this case, the first yield will have already "boxed" the state machine in
// a strongly-typed manner into an AsyncStateMachineBox. It will already contain
// the state machine as well as a MoveNextDelegate and a context. The only thing
// we might need to do is update the context if that's changed since it was stored.
if (taskField is AsyncStateMachineBox<TStateMachine> stronglyTypedBox)
{
if (stronglyTypedBox.Context != currentContext)
{
stronglyTypedBox.Context = currentContext;
}
return stronglyTypedBox;
}
// The least common case: we have a weakly-typed boxed. This results if the debugger
// or some other use of reflection accesses a property like ObjectIdForDebugger or a
// method like SetNotificationForWaitCompletion prior to the first await happening. In
// such situations, we need to get an object to represent the builder, but we don't yet
// know the type of the state machine, and thus can't use TStateMachine. Instead, we
// use the IAsyncStateMachine interface, which all TStateMachines implement. This will
// result in a boxing allocation when storing the TStateMachine if it's a struct, but
// this only happens in active debugging scenarios where such performance impact doesn't
// matter.
if (taskField is AsyncStateMachineBox<IAsyncStateMachine> weaklyTypedBox)
{
// If this is the first await, we won't yet have a state machine, so store it.
if (weaklyTypedBox.StateMachine == null)
{
Debugger.NotifyOfCrossThreadDependency(); // same explanation as with usage below
weaklyTypedBox.StateMachine = stateMachine;
}
// Update the context. This only happens with a debugger, so no need to spend
// extra IL checking for equality before doing the assignment.
weaklyTypedBox.Context = currentContext;
return weaklyTypedBox;
}
// Alert a listening debugger that we can't make forward progress unless it slips threads.
// If we don't do this, and a method that uses "await foo;" is invoked through funceval,
// we could end up hooking up a callback to push forward the async method's state machine,
// the debugger would then abort the funceval after it takes too long, and then continuing
// execution could result in another callback being hooked up. At that point we have
// multiple callbacks registered to push the state machine, which could result in bad behavior.
Debugger.NotifyOfCrossThreadDependency();
// At this point, taskField should really be null, in which case we want to create the box.
// However, in a variety of debugger-related (erroneous) situations, it might be non-null,
// e.g. if the Task property is examined in a Watch window, forcing it to be lazily-intialized
// as a Task<TResult> rather than as an AsyncStateMachineBox. The worst that happens in such
// cases is we lose the ability to properly step in the debugger, as the debugger uses that
// object's identity to track this specific builder/state machine. As such, we proceed to
// overwrite whatever's there anyway, even if it's non-null.
#if CORERT
// DebugFinalizableAsyncStateMachineBox looks like a small type, but it actually is not because
// it will have a copy of all the slots from its parent. It will add another hundred(s) bytes
// per each async method in CoreRT / ProjectN binaries without adding much value. Avoid
// generating this extra code until a better solution is implemented.
var box = new AsyncStateMachineBox<TStateMachine>();
#else
AsyncStateMachineBox<TStateMachine> box = AsyncMethodBuilderCore.TrackAsyncMethodCompletion ?
CreateDebugFinalizableAsyncStateMachineBox<TStateMachine>() :
new AsyncStateMachineBox<TStateMachine>();
#endif
taskField = box; // important: this must be done before storing stateMachine into box.StateMachine!
box.StateMachine = stateMachine;
box.Context = currentContext;
// Log the creation of the state machine box object / task for this async method.
if (TplEventSource.Log.IsEnabled())
{
TplEventSource.Log.TraceOperationBegin(box.Id, "Async: " + stateMachine.GetType().Name, 0);
}
// And if async debugging is enabled, track the task.
if (System.Threading.Tasks.Task.s_asyncDebuggingEnabled)
{
System.Threading.Tasks.Task.AddToActiveTasks(box);
}
return box;
}
#if !CORERT
// Avoid forcing the JIT to build DebugFinalizableAsyncStateMachineBox<TStateMachine> unless it's actually needed.
[MethodImpl(MethodImplOptions.NoInlining)]
private static AsyncStateMachineBox<TStateMachine> CreateDebugFinalizableAsyncStateMachineBox<TStateMachine>()
where TStateMachine : IAsyncStateMachine =>
new DebugFinalizableAsyncStateMachineBox<TStateMachine>();
/// <summary>
/// Provides an async state machine box with a finalizer that will fire an EventSource
/// event about the state machine if it's being finalized without having been completed.
/// </summary>
/// <typeparam name="TStateMachine">Specifies the type of the state machine.</typeparam>
private sealed class DebugFinalizableAsyncStateMachineBox<TStateMachine> : // SOS DumpAsync command depends on this name
AsyncStateMachineBox<TStateMachine>
where TStateMachine : IAsyncStateMachine
{
~DebugFinalizableAsyncStateMachineBox()
{
// If the state machine is being finalized, something went wrong during its processing,
// e.g. it awaited something that got collected without itself having been completed.
// Fire an event with details about the state machine to help with debugging.
if (!IsCompleted) // double-check it's not completed, just to help minimize false positives
{
TplEventSource.Log.IncompleteAsyncMethod(this);
}
}
}
#endif
/// <summary>A strongly-typed box for Task-based async state machines.</summary>
/// <typeparam name="TStateMachine">Specifies the type of the state machine.</typeparam>
private class AsyncStateMachineBox<TStateMachine> : // SOS DumpAsync command depends on this name
Task<TResult>, IAsyncStateMachineBox
where TStateMachine : IAsyncStateMachine
{
/// <summary>Delegate used to invoke on an ExecutionContext when passed an instance of this box type.</summary>
private static readonly ContextCallback s_callback = ExecutionContextCallback;
// Used to initialize s_callback above. We don't use a lambda for this on purpose: a lambda would
// introduce a new generic type behind the scenes that comes with a hefty size penalty in AOT builds.
private static void ExecutionContextCallback(object? s)
{
Debug.Assert(s is AsyncStateMachineBox<TStateMachine>);
// Only used privately to pass directly to EC.Run
Unsafe.As<AsyncStateMachineBox<TStateMachine>>(s).StateMachine!.MoveNext();
}
/// <summary>A delegate to the <see cref="MoveNext()"/> method.</summary>
private Action? _moveNextAction;
/// <summary>The state machine itself.</summary>
public TStateMachine? StateMachine; // mutable struct; do not make this readonly. SOS DumpAsync command depends on this name.
/// <summary>Captured ExecutionContext with which to invoke <see cref="MoveNextAction"/>; may be null.</summary>
public ExecutionContext? Context;
/// <summary>A delegate to the <see cref="MoveNext()"/> method.</summary>
public Action MoveNextAction => _moveNextAction ??= new Action(MoveNext);
internal sealed override void ExecuteFromThreadPool(Thread threadPoolThread) => MoveNext(threadPoolThread);
/// <summary>Calls MoveNext on <see cref="StateMachine"/></summary>
public void MoveNext() => MoveNext(threadPoolThread: null);
private void MoveNext(Thread? threadPoolThread)
{
Debug.Assert(!IsCompleted);
bool loggingOn = TplEventSource.Log.IsEnabled();
if (loggingOn)
{
TplEventSource.Log.TraceSynchronousWorkBegin(this.Id, CausalitySynchronousWork.Execution);
}
ExecutionContext? context = Context;
if (context == null)
{
Debug.Assert(StateMachine != null);
StateMachine.MoveNext();
}
else
{
if (threadPoolThread is null)
{
ExecutionContext.RunInternal(context, s_callback, this);
}
else
{
ExecutionContext.RunFromThreadPoolDispatchLoop(threadPoolThread, context, s_callback, this);
}
}
if (IsCompleted)
{
ClearStateUponCompletion();
}
if (loggingOn)
{
TplEventSource.Log.TraceSynchronousWorkEnd(CausalitySynchronousWork.Execution);
}
}
/// <summary>Clears out all state associated with a completed box.</summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void ClearStateUponCompletion()
{
Debug.Assert(IsCompleted);
// This logic may be invoked multiple times on the same instance and needs to be robust against that.
// If async debugging is enabled, remove the task from tracking.
if (s_asyncDebuggingEnabled)
{
RemoveFromActiveTasks(this);
}
// Clear out state now that the async method has completed.
// This avoids keeping arbitrary state referenced by lifted locals
// if this Task / state machine box is held onto.
StateMachine = default;
Context = default;
#if !CORERT
// In case this is a state machine box with a finalizer, suppress its finalization
// as it's now complete. We only need the finalizer to run if the box is collected
// without having been completed.
if (AsyncMethodBuilderCore.TrackAsyncMethodCompletion)
{
GC.SuppressFinalize(this);
}
#endif
}
/// <summary>Gets the state machine as a boxed object. This should only be used for debugging purposes.</summary>
IAsyncStateMachine IAsyncStateMachineBox.GetStateMachineObject() => StateMachine!; // likely boxes, only use for debugging
}
/// <summary>Gets the <see cref="System.Threading.Tasks.Task{TResult}"/> for this builder.</summary>
/// <returns>The <see cref="System.Threading.Tasks.Task{TResult}"/> representing the builder's asynchronous operation.</returns>
public Task<TResult> Task
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get => m_task ?? InitializeTaskAsPromise();
}
/// <summary>
/// Initializes the task, which must not yet be initialized. Used only when the Task is being forced into
/// existence when no state machine is needed, e.g. when the builder is being synchronously completed with
/// an exception, when the builder is being used out of the context of an async method, etc.
/// </summary>
[MethodImpl(MethodImplOptions.NoInlining)]
private Task<TResult> InitializeTaskAsPromise()
{
Debug.Assert(m_task == null);
return m_task = new Task<TResult>();
}
internal static Task<TResult> CreateWeaklyTypedStateMachineBox()
{
#if CORERT
// DebugFinalizableAsyncStateMachineBox looks like a small type, but it actually is not because
// it will have a copy of all the slots from its parent. It will add another hundred(s) bytes
// per each async method in CoreRT / ProjectN binaries without adding much value. Avoid
// generating this extra code until a better solution is implemented.
return new AsyncStateMachineBox<IAsyncStateMachine>();
#else
return AsyncMethodBuilderCore.TrackAsyncMethodCompletion ?
CreateDebugFinalizableAsyncStateMachineBox<IAsyncStateMachine>() :
new AsyncStateMachineBox<IAsyncStateMachine>();
#endif
}
/// <summary>
/// Completes the <see cref="System.Threading.Tasks.Task{TResult}"/> in the
/// <see cref="System.Threading.Tasks.TaskStatus">RanToCompletion</see> state with the specified result.
/// </summary>
/// <param name="result">The result to use to complete the task.</param>
/// <exception cref="System.InvalidOperationException">The task has already completed.</exception>
public void SetResult(TResult result)
{
// Get the currently stored task, which will be non-null if get_Task has already been accessed.
// If there isn't one, get a task and store it.
if (m_task is null)
{
m_task = Threading.Tasks.Task.FromResult(result);
}
else
{
// Slow path: complete the existing task.
SetExistingTaskResult(m_task, result);
}
}
/// <summary>Completes the already initialized task with the specified result.</summary>
/// <param name="result">The result to use to complete the task.</param>
/// <param name="task">The task to complete.</param>
internal static void SetExistingTaskResult(Task<TResult> task, TResult? result)
{
Debug.Assert(task != null, "Expected non-null task");
if (TplEventSource.Log.IsEnabled())
{
TplEventSource.Log.TraceOperationEnd(task.Id, AsyncCausalityStatus.Completed);
}
if (!task.TrySetResult(result))
{
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.TaskT_TransitionToFinal_AlreadyCompleted);
}
}
/// <summary>
/// Completes the <see cref="System.Threading.Tasks.Task{TResult}"/> in the
/// <see cref="System.Threading.Tasks.TaskStatus">Faulted</see> state with the specified exception.
/// </summary>
/// <param name="exception">The <see cref="System.Exception"/> to use to fault the task.</param>
/// <exception cref="System.ArgumentNullException">The <paramref name="exception"/> argument is null (Nothing in Visual Basic).</exception>
/// <exception cref="System.InvalidOperationException">The task has already completed.</exception>
public void SetException(Exception exception) => SetException(exception, ref m_task);
internal static void SetException(Exception exception, ref Task<TResult>? taskField)
{
if (exception == null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.exception);
}
// Get the task, forcing initialization if it hasn't already been initialized.
Task<TResult> task = (taskField ??= new Task<TResult>());
// If the exception represents cancellation, cancel the task. Otherwise, fault the task.
bool successfullySet = exception is OperationCanceledException oce ?
task.TrySetCanceled(oce.CancellationToken, oce) :
task.TrySetException(exception);
// Unlike with TaskCompletionSource, we do not need to spin here until _taskAndStateMachine is completed,
// since AsyncTaskMethodBuilder.SetException should not be immediately followed by any code
// that depends on the task having completely completed. Moreover, with correct usage,
// SetResult or SetException should only be called once, so the Try* methods should always
// return true, so no spinning would be necessary anyway (the spinning in TCS is only relevant
// if another thread completes the task first).
if (!successfullySet)
{
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.TaskT_TransitionToFinal_AlreadyCompleted);
}
}
/// <summary>
/// Called by the debugger to request notification when the first wait operation
/// (await, Wait, Result, etc.) on this builder's task completes.
/// </summary>
/// <param name="enabled">
/// true to enable notification; false to disable a previously set notification.
/// </param>
/// <remarks>
/// This should only be invoked from within an asynchronous method,
/// and only by the debugger.
/// </remarks>
internal void SetNotificationForWaitCompletion(bool enabled) =>
SetNotificationForWaitCompletion(enabled, ref m_task);
internal static void SetNotificationForWaitCompletion(bool enabled, [NotNull] ref Task<TResult>? taskField)
{
// Get the task (forcing initialization if not already initialized), and set debug notification
(taskField ??= CreateWeaklyTypedStateMachineBox()).SetNotificationForWaitCompletion(enabled);
// NOTE: It's important that the debugger use builder.SetNotificationForWaitCompletion
// rather than builder.Task.SetNotificationForWaitCompletion. Even though the latter will
// lazily-initialize the task as well, it'll initialize it to a Task<T> (which is important
// to minimize size for cases where an ATMB is used directly by user code to avoid the
// allocation overhead of a TaskCompletionSource). If that's done prior to the first await,
// the GetMoveNextDelegate code, which needs an AsyncStateMachineBox, will end up creating
// a new box and overwriting the previously created task. That'll change the object identity
// of the task being used for wait completion notification, and no notification will
// ever arrive, breaking step-out behavior when stepping out before the first yielding await.
}
/// <summary>
/// Gets an object that may be used to uniquely identify this builder to the debugger.
/// </summary>
/// <remarks>
/// This property lazily instantiates the ID in a non-thread-safe manner.
/// It must only be used by the debugger and tracing purposes, and only in a single-threaded manner
/// when no other threads are in the middle of accessing this or other members that lazily initialize the task.
/// </remarks>
internal object ObjectIdForDebugger => m_task ??= CreateWeaklyTypedStateMachineBox();
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
namespace System.Runtime.CompilerServices
{
/// <summary>
/// Provides a builder for asynchronous methods that return <see cref="System.Threading.Tasks.Task{TResult}"/>.
/// This type is intended for compiler use only.
/// </summary>
/// <remarks>
/// AsyncTaskMethodBuilder{TResult} is a value type, and thus it is copied by value.
/// Prior to being copied, one of its Task, SetResult, or SetException members must be accessed,
/// or else the copies may end up building distinct Task instances.
/// </remarks>
public struct AsyncTaskMethodBuilder<TResult>
{
/// <summary>The lazily-initialized built task.</summary>
private Task<TResult>? m_task; // Debugger depends on the exact name of this field.
/// <summary>Initializes a new <see cref="AsyncTaskMethodBuilder"/>.</summary>
/// <returns>The initialized <see cref="AsyncTaskMethodBuilder"/>.</returns>
public static AsyncTaskMethodBuilder<TResult> Create() => default;
/// <summary>Initiates the builder's execution with the associated state machine.</summary>
/// <typeparam name="TStateMachine">Specifies the type of the state machine.</typeparam>
/// <param name="stateMachine">The state machine instance, passed by reference.</param>
[DebuggerStepThrough]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Start<TStateMachine>(ref TStateMachine stateMachine) where TStateMachine : IAsyncStateMachine =>
AsyncMethodBuilderCore.Start(ref stateMachine);
/// <summary>Associates the builder with the state machine it represents.</summary>
/// <param name="stateMachine">The heap-allocated state machine object.</param>
/// <exception cref="System.ArgumentNullException">The <paramref name="stateMachine"/> argument was null (Nothing in Visual Basic).</exception>
/// <exception cref="System.InvalidOperationException">The builder is incorrectly initialized.</exception>
public void SetStateMachine(IAsyncStateMachine stateMachine) =>
AsyncMethodBuilderCore.SetStateMachine(stateMachine, m_task);
/// <summary>
/// Schedules the specified state machine to be pushed forward when the specified awaiter completes.
/// </summary>
/// <typeparam name="TAwaiter">Specifies the type of the awaiter.</typeparam>
/// <typeparam name="TStateMachine">Specifies the type of the state machine.</typeparam>
/// <param name="awaiter">The awaiter.</param>
/// <param name="stateMachine">The state machine.</param>
public void AwaitOnCompleted<TAwaiter, TStateMachine>(
ref TAwaiter awaiter, ref TStateMachine stateMachine)
where TAwaiter : INotifyCompletion
where TStateMachine : IAsyncStateMachine =>
AwaitOnCompleted(ref awaiter, ref stateMachine, ref m_task);
internal static void AwaitOnCompleted<TAwaiter, TStateMachine>(
ref TAwaiter awaiter, ref TStateMachine stateMachine, ref Task<TResult>? taskField)
where TAwaiter : INotifyCompletion
where TStateMachine : IAsyncStateMachine
{
try
{
awaiter.OnCompleted(GetStateMachineBox(ref stateMachine, ref taskField).MoveNextAction);
}
catch (Exception e)
{
System.Threading.Tasks.Task.ThrowAsync(e, targetContext: null);
}
}
/// <summary>
/// Schedules the specified state machine to be pushed forward when the specified awaiter completes.
/// </summary>
/// <typeparam name="TAwaiter">Specifies the type of the awaiter.</typeparam>
/// <typeparam name="TStateMachine">Specifies the type of the state machine.</typeparam>
/// <param name="awaiter">The awaiter.</param>
/// <param name="stateMachine">The state machine.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void AwaitUnsafeOnCompleted<TAwaiter, TStateMachine>(
ref TAwaiter awaiter, ref TStateMachine stateMachine)
where TAwaiter : ICriticalNotifyCompletion
where TStateMachine : IAsyncStateMachine =>
AwaitUnsafeOnCompleted(ref awaiter, ref stateMachine, ref m_task);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void AwaitUnsafeOnCompleted<TAwaiter, TStateMachine>(
ref TAwaiter awaiter, ref TStateMachine stateMachine, [NotNull] ref Task<TResult>? taskField)
where TAwaiter : ICriticalNotifyCompletion
where TStateMachine : IAsyncStateMachine
{
IAsyncStateMachineBox box = GetStateMachineBox(ref stateMachine, ref taskField);
AwaitUnsafeOnCompleted(ref awaiter, box);
}
[MethodImpl(MethodImplOptions.AggressiveOptimization)] // workaround boxing allocations in Tier0: https://github.com/dotnet/runtime/issues/9120
internal static void AwaitUnsafeOnCompleted<TAwaiter>(
ref TAwaiter awaiter, IAsyncStateMachineBox box)
where TAwaiter : ICriticalNotifyCompletion
{
// The null tests here ensure that the jit can optimize away the interface
// tests when TAwaiter is a ref type.
if ((null != (object?)default(TAwaiter)) && (awaiter is ITaskAwaiter))
{
ref TaskAwaiter ta = ref Unsafe.As<TAwaiter, TaskAwaiter>(ref awaiter); // relies on TaskAwaiter/TaskAwaiter<T> having the same layout
TaskAwaiter.UnsafeOnCompletedInternal(ta.m_task, box, continueOnCapturedContext: true);
}
else if ((null != (object?)default(TAwaiter)) && (awaiter is IConfiguredTaskAwaiter))
{
ref ConfiguredTaskAwaitable.ConfiguredTaskAwaiter ta = ref Unsafe.As<TAwaiter, ConfiguredTaskAwaitable.ConfiguredTaskAwaiter>(ref awaiter);
TaskAwaiter.UnsafeOnCompletedInternal(ta.m_task, box, ta.m_continueOnCapturedContext);
}
else if ((null != (object?)default(TAwaiter)) && (awaiter is IStateMachineBoxAwareAwaiter))
{
try
{
((IStateMachineBoxAwareAwaiter)awaiter).AwaitUnsafeOnCompleted(box);
}
catch (Exception e)
{
// Whereas with Task the code that hooks up and invokes the continuation is all local to corelib,
// with ValueTaskAwaiter we may be calling out to an arbitrary implementation of IValueTaskSource
// wrapped in the ValueTask, and as such we protect against errant exceptions that may emerge.
// We don't want such exceptions propagating back into the async method, which can't handle
// exceptions well at that location in the state machine, especially if the exception may occur
// after the ValueTaskAwaiter already successfully hooked up the callback, in which case it's possible
// two different flows of execution could end up happening in the same async method call.
System.Threading.Tasks.Task.ThrowAsync(e, targetContext: null);
}
}
else
{
// The awaiter isn't specially known. Fall back to doing a normal await.
try
{
awaiter.UnsafeOnCompleted(box.MoveNextAction);
}
catch (Exception e)
{
System.Threading.Tasks.Task.ThrowAsync(e, targetContext: null);
}
}
}
/// <summary>Gets the "boxed" state machine object.</summary>
/// <typeparam name="TStateMachine">Specifies the type of the async state machine.</typeparam>
/// <param name="stateMachine">The state machine.</param>
/// <param name="taskField">The reference to the Task field storing the Task instance.</param>
/// <returns>The "boxed" state machine.</returns>
private static IAsyncStateMachineBox GetStateMachineBox<TStateMachine>(
ref TStateMachine stateMachine,
[NotNull] ref Task<TResult>? taskField)
where TStateMachine : IAsyncStateMachine
{
ExecutionContext? currentContext = ExecutionContext.Capture();
// Check first for the most common case: not the first yield in an async method.
// In this case, the first yield will have already "boxed" the state machine in
// a strongly-typed manner into an AsyncStateMachineBox. It will already contain
// the state machine as well as a MoveNextDelegate and a context. The only thing
// we might need to do is update the context if that's changed since it was stored.
if (taskField is AsyncStateMachineBox<TStateMachine> stronglyTypedBox)
{
if (stronglyTypedBox.Context != currentContext)
{
stronglyTypedBox.Context = currentContext;
}
return stronglyTypedBox;
}
// The least common case: we have a weakly-typed boxed. This results if the debugger
// or some other use of reflection accesses a property like ObjectIdForDebugger or a
// method like SetNotificationForWaitCompletion prior to the first await happening. In
// such situations, we need to get an object to represent the builder, but we don't yet
// know the type of the state machine, and thus can't use TStateMachine. Instead, we
// use the IAsyncStateMachine interface, which all TStateMachines implement. This will
// result in a boxing allocation when storing the TStateMachine if it's a struct, but
// this only happens in active debugging scenarios where such performance impact doesn't
// matter.
if (taskField is AsyncStateMachineBox<IAsyncStateMachine> weaklyTypedBox)
{
// If this is the first await, we won't yet have a state machine, so store it.
if (weaklyTypedBox.StateMachine == null)
{
Debugger.NotifyOfCrossThreadDependency(); // same explanation as with usage below
weaklyTypedBox.StateMachine = stateMachine;
}
// Update the context. This only happens with a debugger, so no need to spend
// extra IL checking for equality before doing the assignment.
weaklyTypedBox.Context = currentContext;
return weaklyTypedBox;
}
// Alert a listening debugger that we can't make forward progress unless it slips threads.
// If we don't do this, and a method that uses "await foo;" is invoked through funceval,
// we could end up hooking up a callback to push forward the async method's state machine,
// the debugger would then abort the funceval after it takes too long, and then continuing
// execution could result in another callback being hooked up. At that point we have
// multiple callbacks registered to push the state machine, which could result in bad behavior.
Debugger.NotifyOfCrossThreadDependency();
// At this point, taskField should really be null, in which case we want to create the box.
// However, in a variety of debugger-related (erroneous) situations, it might be non-null,
// e.g. if the Task property is examined in a Watch window, forcing it to be lazily-intialized
// as a Task<TResult> rather than as an AsyncStateMachineBox. The worst that happens in such
// cases is we lose the ability to properly step in the debugger, as the debugger uses that
// object's identity to track this specific builder/state machine. As such, we proceed to
// overwrite whatever's there anyway, even if it's non-null.
#if CORERT
// DebugFinalizableAsyncStateMachineBox looks like a small type, but it actually is not because
// it will have a copy of all the slots from its parent. It will add another hundred(s) bytes
// per each async method in CoreRT / ProjectN binaries without adding much value. Avoid
// generating this extra code until a better solution is implemented.
var box = new AsyncStateMachineBox<TStateMachine>();
#else
AsyncStateMachineBox<TStateMachine> box = AsyncMethodBuilderCore.TrackAsyncMethodCompletion ?
CreateDebugFinalizableAsyncStateMachineBox<TStateMachine>() :
new AsyncStateMachineBox<TStateMachine>();
#endif
taskField = box; // important: this must be done before storing stateMachine into box.StateMachine!
box.StateMachine = stateMachine;
box.Context = currentContext;
// Log the creation of the state machine box object / task for this async method.
if (TplEventSource.Log.IsEnabled())
{
TplEventSource.Log.TraceOperationBegin(box.Id, "Async: " + stateMachine.GetType().Name, 0);
}
// And if async debugging is enabled, track the task.
if (System.Threading.Tasks.Task.s_asyncDebuggingEnabled)
{
System.Threading.Tasks.Task.AddToActiveTasks(box);
}
return box;
}
#if !CORERT
// Avoid forcing the JIT to build DebugFinalizableAsyncStateMachineBox<TStateMachine> unless it's actually needed.
[MethodImpl(MethodImplOptions.NoInlining)]
private static AsyncStateMachineBox<TStateMachine> CreateDebugFinalizableAsyncStateMachineBox<TStateMachine>()
where TStateMachine : IAsyncStateMachine =>
new DebugFinalizableAsyncStateMachineBox<TStateMachine>();
/// <summary>
/// Provides an async state machine box with a finalizer that will fire an EventSource
/// event about the state machine if it's being finalized without having been completed.
/// </summary>
/// <typeparam name="TStateMachine">Specifies the type of the state machine.</typeparam>
private sealed class DebugFinalizableAsyncStateMachineBox<TStateMachine> : // SOS DumpAsync command depends on this name
AsyncStateMachineBox<TStateMachine>
where TStateMachine : IAsyncStateMachine
{
~DebugFinalizableAsyncStateMachineBox()
{
// If the state machine is being finalized, something went wrong during its processing,
// e.g. it awaited something that got collected without itself having been completed.
// Fire an event with details about the state machine to help with debugging.
if (!IsCompleted) // double-check it's not completed, just to help minimize false positives
{
TplEventSource.Log.IncompleteAsyncMethod(this);
}
}
}
#endif
/// <summary>A strongly-typed box for Task-based async state machines.</summary>
/// <typeparam name="TStateMachine">Specifies the type of the state machine.</typeparam>
private class AsyncStateMachineBox<TStateMachine> : // SOS DumpAsync command depends on this name
Task<TResult>, IAsyncStateMachineBox
where TStateMachine : IAsyncStateMachine
{
/// <summary>Delegate used to invoke on an ExecutionContext when passed an instance of this box type.</summary>
private static readonly ContextCallback s_callback = ExecutionContextCallback;
// Used to initialize s_callback above. We don't use a lambda for this on purpose: a lambda would
// introduce a new generic type behind the scenes that comes with a hefty size penalty in AOT builds.
private static void ExecutionContextCallback(object? s)
{
Debug.Assert(s is AsyncStateMachineBox<TStateMachine>);
// Only used privately to pass directly to EC.Run
Unsafe.As<AsyncStateMachineBox<TStateMachine>>(s).StateMachine!.MoveNext();
}
/// <summary>A delegate to the <see cref="MoveNext()"/> method.</summary>
private Action? _moveNextAction;
/// <summary>The state machine itself.</summary>
public TStateMachine? StateMachine; // mutable struct; do not make this readonly. SOS DumpAsync command depends on this name.
/// <summary>Captured ExecutionContext with which to invoke <see cref="MoveNextAction"/>; may be null.</summary>
public ExecutionContext? Context;
/// <summary>A delegate to the <see cref="MoveNext()"/> method.</summary>
public Action MoveNextAction => _moveNextAction ??= new Action(MoveNext);
internal sealed override void ExecuteFromThreadPool(Thread threadPoolThread) => MoveNext(threadPoolThread);
/// <summary>Calls MoveNext on <see cref="StateMachine"/></summary>
public void MoveNext() => MoveNext(threadPoolThread: null);
private void MoveNext(Thread? threadPoolThread)
{
Debug.Assert(!IsCompleted);
bool loggingOn = TplEventSource.Log.IsEnabled();
if (loggingOn)
{
TplEventSource.Log.TraceSynchronousWorkBegin(this.Id, CausalitySynchronousWork.Execution);
}
ExecutionContext? context = Context;
if (context == null)
{
Debug.Assert(StateMachine != null);
StateMachine.MoveNext();
}
else
{
if (threadPoolThread is null)
{
ExecutionContext.RunInternal(context, s_callback, this);
}
else
{
ExecutionContext.RunFromThreadPoolDispatchLoop(threadPoolThread, context, s_callback, this);
}
}
if (IsCompleted)
{
ClearStateUponCompletion();
}
if (loggingOn)
{
TplEventSource.Log.TraceSynchronousWorkEnd(CausalitySynchronousWork.Execution);
}
}
/// <summary>Clears out all state associated with a completed box.</summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void ClearStateUponCompletion()
{
Debug.Assert(IsCompleted);
// This logic may be invoked multiple times on the same instance and needs to be robust against that.
// If async debugging is enabled, remove the task from tracking.
if (s_asyncDebuggingEnabled)
{
RemoveFromActiveTasks(this);
}
// Clear out state now that the async method has completed.
// This avoids keeping arbitrary state referenced by lifted locals
// if this Task / state machine box is held onto.
StateMachine = default;
Context = default;
#if !CORERT
// In case this is a state machine box with a finalizer, suppress its finalization
// as it's now complete. We only need the finalizer to run if the box is collected
// without having been completed.
if (AsyncMethodBuilderCore.TrackAsyncMethodCompletion)
{
GC.SuppressFinalize(this);
}
#endif
}
/// <summary>Gets the state machine as a boxed object. This should only be used for debugging purposes.</summary>
IAsyncStateMachine IAsyncStateMachineBox.GetStateMachineObject() => StateMachine!; // likely boxes, only use for debugging
}
/// <summary>Gets the <see cref="System.Threading.Tasks.Task{TResult}"/> for this builder.</summary>
/// <returns>The <see cref="System.Threading.Tasks.Task{TResult}"/> representing the builder's asynchronous operation.</returns>
public Task<TResult> Task
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get => m_task ?? InitializeTaskAsPromise();
}
/// <summary>
/// Initializes the task, which must not yet be initialized. Used only when the Task is being forced into
/// existence when no state machine is needed, e.g. when the builder is being synchronously completed with
/// an exception, when the builder is being used out of the context of an async method, etc.
/// </summary>
[MethodImpl(MethodImplOptions.NoInlining)]
private Task<TResult> InitializeTaskAsPromise()
{
Debug.Assert(m_task == null);
return m_task = new Task<TResult>();
}
internal static Task<TResult> CreateWeaklyTypedStateMachineBox()
{
#if CORERT
// DebugFinalizableAsyncStateMachineBox looks like a small type, but it actually is not because
// it will have a copy of all the slots from its parent. It will add another hundred(s) bytes
// per each async method in CoreRT / ProjectN binaries without adding much value. Avoid
// generating this extra code until a better solution is implemented.
return new AsyncStateMachineBox<IAsyncStateMachine>();
#else
return AsyncMethodBuilderCore.TrackAsyncMethodCompletion ?
CreateDebugFinalizableAsyncStateMachineBox<IAsyncStateMachine>() :
new AsyncStateMachineBox<IAsyncStateMachine>();
#endif
}
/// <summary>
/// Completes the <see cref="System.Threading.Tasks.Task{TResult}"/> in the
/// <see cref="System.Threading.Tasks.TaskStatus">RanToCompletion</see> state with the specified result.
/// </summary>
/// <param name="result">The result to use to complete the task.</param>
/// <exception cref="System.InvalidOperationException">The task has already completed.</exception>
public void SetResult(TResult result)
{
// Get the currently stored task, which will be non-null if get_Task has already been accessed.
// If there isn't one, get a task and store it.
if (m_task is null)
{
m_task = Threading.Tasks.Task.FromResult(result);
}
else
{
// Slow path: complete the existing task.
SetExistingTaskResult(m_task, result);
}
}
/// <summary>Completes the already initialized task with the specified result.</summary>
/// <param name="result">The result to use to complete the task.</param>
/// <param name="task">The task to complete.</param>
internal static void SetExistingTaskResult(Task<TResult> task, TResult? result)
{
Debug.Assert(task != null, "Expected non-null task");
if (TplEventSource.Log.IsEnabled())
{
TplEventSource.Log.TraceOperationEnd(task.Id, AsyncCausalityStatus.Completed);
}
if (!task.TrySetResult(result))
{
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.TaskT_TransitionToFinal_AlreadyCompleted);
}
}
/// <summary>
/// Completes the <see cref="System.Threading.Tasks.Task{TResult}"/> in the
/// <see cref="System.Threading.Tasks.TaskStatus">Faulted</see> state with the specified exception.
/// </summary>
/// <param name="exception">The <see cref="System.Exception"/> to use to fault the task.</param>
/// <exception cref="System.ArgumentNullException">The <paramref name="exception"/> argument is null (Nothing in Visual Basic).</exception>
/// <exception cref="System.InvalidOperationException">The task has already completed.</exception>
public void SetException(Exception exception) => SetException(exception, ref m_task);
internal static void SetException(Exception exception, ref Task<TResult>? taskField)
{
if (exception == null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.exception);
}
// Get the task, forcing initialization if it hasn't already been initialized.
Task<TResult> task = (taskField ??= new Task<TResult>());
// If the exception represents cancellation, cancel the task. Otherwise, fault the task.
bool successfullySet = exception is OperationCanceledException oce ?
task.TrySetCanceled(oce.CancellationToken, oce) :
task.TrySetException(exception);
// Unlike with TaskCompletionSource, we do not need to spin here until _taskAndStateMachine is completed,
// since AsyncTaskMethodBuilder.SetException should not be immediately followed by any code
// that depends on the task having completely completed. Moreover, with correct usage,
// SetResult or SetException should only be called once, so the Try* methods should always
// return true, so no spinning would be necessary anyway (the spinning in TCS is only relevant
// if another thread completes the task first).
if (!successfullySet)
{
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.TaskT_TransitionToFinal_AlreadyCompleted);
}
}
/// <summary>
/// Called by the debugger to request notification when the first wait operation
/// (await, Wait, Result, etc.) on this builder's task completes.
/// </summary>
/// <param name="enabled">
/// true to enable notification; false to disable a previously set notification.
/// </param>
/// <remarks>
/// This should only be invoked from within an asynchronous method,
/// and only by the debugger.
/// </remarks>
internal void SetNotificationForWaitCompletion(bool enabled) =>
SetNotificationForWaitCompletion(enabled, ref m_task);
internal static void SetNotificationForWaitCompletion(bool enabled, [NotNull] ref Task<TResult>? taskField)
{
// Get the task (forcing initialization if not already initialized), and set debug notification
(taskField ??= CreateWeaklyTypedStateMachineBox()).SetNotificationForWaitCompletion(enabled);
// NOTE: It's important that the debugger use builder.SetNotificationForWaitCompletion
// rather than builder.Task.SetNotificationForWaitCompletion. Even though the latter will
// lazily-initialize the task as well, it'll initialize it to a Task<T> (which is important
// to minimize size for cases where an ATMB is used directly by user code to avoid the
// allocation overhead of a TaskCompletionSource). If that's done prior to the first await,
// the GetMoveNextDelegate code, which needs an AsyncStateMachineBox, will end up creating
// a new box and overwriting the previously created task. That'll change the object identity
// of the task being used for wait completion notification, and no notification will
// ever arrive, breaking step-out behavior when stepping out before the first yielding await.
}
/// <summary>
/// Gets an object that may be used to uniquely identify this builder to the debugger.
/// </summary>
/// <remarks>
/// This property lazily instantiates the ID in a non-thread-safe manner.
/// It must only be used by the debugger and tracing purposes, and only in a single-threaded manner
/// when no other threads are in the middle of accessing this or other members that lazily initialize the task.
/// </remarks>
internal object ObjectIdForDebugger => m_task ??= CreateWeaklyTypedStateMachineBox();
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Security.Cryptography/src/System/Security/Cryptography/AppleCCCryptor.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using Internal.Cryptography;
namespace System.Security.Cryptography
{
internal sealed class AppleCCCryptor : BasicSymmetricCipher
{
private readonly bool _encrypting;
private AppleCCCryptorLite _cryptor;
// Reset operation is not supported on stream cipher
private readonly bool _supportsReset;
private Interop.AppleCrypto.PAL_SymmetricAlgorithm _algorithm;
private CipherMode _cipherMode;
private byte[] _key;
private int _feedbackSizeInBytes;
public AppleCCCryptor(
Interop.AppleCrypto.PAL_SymmetricAlgorithm algorithm,
CipherMode cipherMode,
int blockSizeInBytes,
byte[] key,
byte[]? iv,
bool encrypting,
int feedbackSizeInBytes,
int paddingSizeInBytes)
: base(cipherMode.GetCipherIv(iv), blockSizeInBytes, paddingSizeInBytes)
{
_encrypting = encrypting;
// CFB is streaming cipher, calling CCCryptorReset is not implemented (and is effectively noop)
_supportsReset = cipherMode != CipherMode.CFB;
_algorithm = algorithm;
_cipherMode = cipherMode;
_key = key;
_feedbackSizeInBytes = feedbackSizeInBytes;
OpenCryptor();
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
_cryptor?.Dispose();
_cryptor = null!;
}
base.Dispose(disposing);
}
public override int Transform(ReadOnlySpan<byte> input, Span<byte> output)
{
Debug.Assert(input.Length > 0);
Debug.Assert((input.Length % PaddingSizeInBytes) == 0);
return _cryptor.Transform(input, output);
}
public override int TransformFinal(ReadOnlySpan<byte> input, Span<byte> output)
{
int written = _cryptor.TransformFinal(input, output);
Reset();
return written;
}
[MemberNotNull(nameof(_cryptor))]
private unsafe void OpenCryptor()
{
_cryptor = new AppleCCCryptorLite(
_algorithm,
_cipherMode,
BlockSizeInBytes,
_key,
IV,
_encrypting,
_feedbackSizeInBytes,
PaddingSizeInBytes);
}
private unsafe void Reset()
{
if (!_supportsReset)
{
// when CryptorReset is not supported,
// dispose & reopen
_cryptor?.Dispose();
OpenCryptor();
}
else
{
_cryptor.Reset(IV);
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using Internal.Cryptography;
namespace System.Security.Cryptography
{
internal sealed class AppleCCCryptor : BasicSymmetricCipher
{
private readonly bool _encrypting;
private AppleCCCryptorLite _cryptor;
// Reset operation is not supported on stream cipher
private readonly bool _supportsReset;
private Interop.AppleCrypto.PAL_SymmetricAlgorithm _algorithm;
private CipherMode _cipherMode;
private byte[] _key;
private int _feedbackSizeInBytes;
public AppleCCCryptor(
Interop.AppleCrypto.PAL_SymmetricAlgorithm algorithm,
CipherMode cipherMode,
int blockSizeInBytes,
byte[] key,
byte[]? iv,
bool encrypting,
int feedbackSizeInBytes,
int paddingSizeInBytes)
: base(cipherMode.GetCipherIv(iv), blockSizeInBytes, paddingSizeInBytes)
{
_encrypting = encrypting;
// CFB is streaming cipher, calling CCCryptorReset is not implemented (and is effectively noop)
_supportsReset = cipherMode != CipherMode.CFB;
_algorithm = algorithm;
_cipherMode = cipherMode;
_key = key;
_feedbackSizeInBytes = feedbackSizeInBytes;
OpenCryptor();
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
_cryptor?.Dispose();
_cryptor = null!;
}
base.Dispose(disposing);
}
public override int Transform(ReadOnlySpan<byte> input, Span<byte> output)
{
Debug.Assert(input.Length > 0);
Debug.Assert((input.Length % PaddingSizeInBytes) == 0);
return _cryptor.Transform(input, output);
}
public override int TransformFinal(ReadOnlySpan<byte> input, Span<byte> output)
{
int written = _cryptor.TransformFinal(input, output);
Reset();
return written;
}
[MemberNotNull(nameof(_cryptor))]
private unsafe void OpenCryptor()
{
_cryptor = new AppleCCCryptorLite(
_algorithm,
_cipherMode,
BlockSizeInBytes,
_key,
IV,
_encrypting,
_feedbackSizeInBytes,
PaddingSizeInBytes);
}
private unsafe void Reset()
{
if (!_supportsReset)
{
// when CryptorReset is not supported,
// dispose & reopen
_cryptor?.Dispose();
OpenCryptor();
}
else
{
_cryptor.Reset(IV);
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/Interop/PInvoke/Miscellaneous/MultipleAssembliesWithSamePInvoke/MAWSPINative.cpp
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
#include <platformdefines.h>
extern "C" DLL_EXPORT int STDMETHODCALLTYPE GetInt()
{
return 24;
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
#include <platformdefines.h>
extern "C" DLL_EXPORT int STDMETHODCALLTYPE GetInt()
{
return 24;
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Data.Odbc/src/System/Data/Odbc/OdbcParameterCollection.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.ComponentModel;
using System.Data.Common;
namespace System.Data.Odbc
{
[Editor("Microsoft.VSDesigner.Data.Design.DBParametersEditor, Microsoft.VSDesigner, Version=10.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a",
"System.Drawing.Design.UITypeEditor, System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")]
public sealed partial class OdbcParameterCollection : DbParameterCollection
{
private bool _rebindCollection; // The collection needs to be (re)bound
private static readonly Type s_itemType = typeof(OdbcParameter);
internal OdbcParameterCollection() : base()
{
}
internal bool RebindCollection
{
get { return _rebindCollection; }
set { _rebindCollection = value; }
}
[
Browsable(false),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)
]
public new OdbcParameter this[int index]
{
get
{
return (OdbcParameter)GetParameter(index);
}
set
{
SetParameter(index, value);
}
}
[
Browsable(false),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)
]
public new OdbcParameter this[string parameterName]
{
get
{
return (OdbcParameter)GetParameter(parameterName);
}
set
{
SetParameter(parameterName, value);
}
}
public OdbcParameter Add(OdbcParameter value)
{
// MDAC 59206
Add((object)value);
return value;
}
[EditorBrowsableAttribute(EditorBrowsableState.Never)]
[ObsoleteAttribute("Add(String parameterName, Object value) has been deprecated. Use AddWithValue(String parameterName, Object value) instead.")]
public OdbcParameter Add(string? parameterName, object? value)
{
// MDAC 59206
return Add(new OdbcParameter(parameterName, value));
}
public OdbcParameter AddWithValue(string? parameterName, object? value)
{
// MDAC 79027
return Add(new OdbcParameter(parameterName, value));
}
public OdbcParameter Add(string? parameterName, OdbcType odbcType)
{
return Add(new OdbcParameter(parameterName, odbcType));
}
public OdbcParameter Add(string? parameterName, OdbcType odbcType, int size)
{
return Add(new OdbcParameter(parameterName, odbcType, size));
}
public OdbcParameter Add(string? parameterName, OdbcType odbcType, int size, string? sourceColumn)
{
return Add(new OdbcParameter(parameterName, odbcType, size, sourceColumn));
}
public void AddRange(OdbcParameter[] values)
{
// V1.2.3300
AddRange((Array)values);
}
// Walks through the collection and binds each parameter
//
internal void Bind(OdbcCommand command, CMDWrapper cmdWrapper, CNativeBuffer parameterBuffer)
{
for (int i = 0; i < Count; ++i)
{
this[i].Bind(cmdWrapper.StatementHandle!, command, checked((short)(i + 1)), parameterBuffer, true);
}
_rebindCollection = false;
}
internal int CalcParameterBufferSize(OdbcCommand command)
{
// Calculate the size of the buffer we need
int parameterBufferSize = 0;
for (int i = 0; i < Count; ++i)
{
if (_rebindCollection)
{
this[i].HasChanged = true;
}
this[i].PrepareForBind(command, (short)(i + 1), ref parameterBufferSize);
parameterBufferSize = (parameterBufferSize + (IntPtr.Size - 1)) & ~(IntPtr.Size - 1); // align buffer;
}
return parameterBufferSize;
}
// Walks through the collection and clears the parameters
//
internal void ClearBindings()
{
for (int i = 0; i < Count; ++i)
{
this[i].ClearBinding();
}
}
public override bool Contains(string value)
{ // WebData 97349
return (-1 != IndexOf(value));
}
public bool Contains(OdbcParameter value)
{
return (-1 != IndexOf(value));
}
public void CopyTo(OdbcParameter[] array, int index)
{
CopyTo((Array)array, index);
}
private void OnChange()
{
_rebindCollection = true;
}
internal void GetOutputValues(CMDWrapper cmdWrapper)
{
// mdac 88542 - we will not read out the parameters if the collection has changed
if (!_rebindCollection)
{
CNativeBuffer parameterBuffer = cmdWrapper._nativeParameterBuffer!;
for (int i = 0; i < Count; ++i)
{
this[i].GetOutputValue(parameterBuffer);
}
}
}
public int IndexOf(OdbcParameter value)
{
return IndexOf((object)value);
}
public void Insert(int index, OdbcParameter value)
{
Insert(index, (object)value);
}
public void Remove(OdbcParameter value)
{
Remove((object)value);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.ComponentModel;
using System.Data.Common;
namespace System.Data.Odbc
{
[Editor("Microsoft.VSDesigner.Data.Design.DBParametersEditor, Microsoft.VSDesigner, Version=10.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a",
"System.Drawing.Design.UITypeEditor, System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")]
public sealed partial class OdbcParameterCollection : DbParameterCollection
{
private bool _rebindCollection; // The collection needs to be (re)bound
private static readonly Type s_itemType = typeof(OdbcParameter);
internal OdbcParameterCollection() : base()
{
}
internal bool RebindCollection
{
get { return _rebindCollection; }
set { _rebindCollection = value; }
}
[
Browsable(false),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)
]
public new OdbcParameter this[int index]
{
get
{
return (OdbcParameter)GetParameter(index);
}
set
{
SetParameter(index, value);
}
}
[
Browsable(false),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)
]
public new OdbcParameter this[string parameterName]
{
get
{
return (OdbcParameter)GetParameter(parameterName);
}
set
{
SetParameter(parameterName, value);
}
}
public OdbcParameter Add(OdbcParameter value)
{
// MDAC 59206
Add((object)value);
return value;
}
[EditorBrowsableAttribute(EditorBrowsableState.Never)]
[ObsoleteAttribute("Add(String parameterName, Object value) has been deprecated. Use AddWithValue(String parameterName, Object value) instead.")]
public OdbcParameter Add(string? parameterName, object? value)
{
// MDAC 59206
return Add(new OdbcParameter(parameterName, value));
}
public OdbcParameter AddWithValue(string? parameterName, object? value)
{
// MDAC 79027
return Add(new OdbcParameter(parameterName, value));
}
public OdbcParameter Add(string? parameterName, OdbcType odbcType)
{
return Add(new OdbcParameter(parameterName, odbcType));
}
public OdbcParameter Add(string? parameterName, OdbcType odbcType, int size)
{
return Add(new OdbcParameter(parameterName, odbcType, size));
}
public OdbcParameter Add(string? parameterName, OdbcType odbcType, int size, string? sourceColumn)
{
return Add(new OdbcParameter(parameterName, odbcType, size, sourceColumn));
}
public void AddRange(OdbcParameter[] values)
{
// V1.2.3300
AddRange((Array)values);
}
// Walks through the collection and binds each parameter
//
internal void Bind(OdbcCommand command, CMDWrapper cmdWrapper, CNativeBuffer parameterBuffer)
{
for (int i = 0; i < Count; ++i)
{
this[i].Bind(cmdWrapper.StatementHandle!, command, checked((short)(i + 1)), parameterBuffer, true);
}
_rebindCollection = false;
}
internal int CalcParameterBufferSize(OdbcCommand command)
{
// Calculate the size of the buffer we need
int parameterBufferSize = 0;
for (int i = 0; i < Count; ++i)
{
if (_rebindCollection)
{
this[i].HasChanged = true;
}
this[i].PrepareForBind(command, (short)(i + 1), ref parameterBufferSize);
parameterBufferSize = (parameterBufferSize + (IntPtr.Size - 1)) & ~(IntPtr.Size - 1); // align buffer;
}
return parameterBufferSize;
}
// Walks through the collection and clears the parameters
//
internal void ClearBindings()
{
for (int i = 0; i < Count; ++i)
{
this[i].ClearBinding();
}
}
public override bool Contains(string value)
{ // WebData 97349
return (-1 != IndexOf(value));
}
public bool Contains(OdbcParameter value)
{
return (-1 != IndexOf(value));
}
public void CopyTo(OdbcParameter[] array, int index)
{
CopyTo((Array)array, index);
}
private void OnChange()
{
_rebindCollection = true;
}
internal void GetOutputValues(CMDWrapper cmdWrapper)
{
// mdac 88542 - we will not read out the parameters if the collection has changed
if (!_rebindCollection)
{
CNativeBuffer parameterBuffer = cmdWrapper._nativeParameterBuffer!;
for (int i = 0; i < Count; ++i)
{
this[i].GetOutputValue(parameterBuffer);
}
}
}
public int IndexOf(OdbcParameter value)
{
return IndexOf((object)value);
}
public void Insert(int index, OdbcParameter value)
{
Insert(index, (object)value);
}
public void Remove(OdbcParameter value)
{
Remove((object)value);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/Common/src/Interop/Unix/System.Security.Cryptography.Native/Interop.LookupFriendlyNameByOid.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Runtime.InteropServices;
internal static partial class Interop
{
internal static partial class Crypto
{
[LibraryImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_LookupFriendlyNameByOid", StringMarshalling = StringMarshalling.Utf8)]
internal static partial int LookupFriendlyNameByOid(string oidValue, ref IntPtr friendlyNamePtr);
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Runtime.InteropServices;
internal static partial class Interop
{
internal static partial class Crypto
{
[LibraryImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_LookupFriendlyNameByOid", StringMarshalling = StringMarshalling.Utf8)]
internal static partial int LookupFriendlyNameByOid(string oidValue, ref IntPtr friendlyNamePtr);
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/jit64/valuetypes/nullable/castclass/generics/castclass-generics026.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// <Area> Nullable - CastClass </Area>
// <Title> Nullable type with castclass expr </Title>
// <Description>
// checking type of EmptyStructGen<int> using cast expr
// </Description>
// <RelatedBugs> </RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
using System.Runtime.InteropServices;
using System;
internal class NullableTest
{
private static bool BoxUnboxToNQ<T>(T o)
{
return Helper.Compare((EmptyStructGen<int>)(ValueType)(object)o, Helper.Create(default(EmptyStructGen<int>)));
}
private static bool BoxUnboxToQ<T>(T o)
{
return Helper.Compare((EmptyStructGen<int>?)(ValueType)(object)o, Helper.Create(default(EmptyStructGen<int>)));
}
private static int Main()
{
EmptyStructGen<int>? s = Helper.Create(default(EmptyStructGen<int>));
if (BoxUnboxToNQ(s) && BoxUnboxToQ(s))
return ExitCode.Passed;
else
return ExitCode.Failed;
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// <Area> Nullable - CastClass </Area>
// <Title> Nullable type with castclass expr </Title>
// <Description>
// checking type of EmptyStructGen<int> using cast expr
// </Description>
// <RelatedBugs> </RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
using System.Runtime.InteropServices;
using System;
internal class NullableTest
{
private static bool BoxUnboxToNQ<T>(T o)
{
return Helper.Compare((EmptyStructGen<int>)(ValueType)(object)o, Helper.Create(default(EmptyStructGen<int>)));
}
private static bool BoxUnboxToQ<T>(T o)
{
return Helper.Compare((EmptyStructGen<int>?)(ValueType)(object)o, Helper.Create(default(EmptyStructGen<int>)));
}
private static int Main()
{
EmptyStructGen<int>? s = Helper.Create(default(EmptyStructGen<int>));
if (BoxUnboxToNQ(s) && BoxUnboxToQ(s))
return ExitCode.Passed;
else
return ExitCode.Failed;
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Runtime.Numerics/tests/BigInteger/log02.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace System.Numerics.Tests
{
public class log02Test
{
private static int s_samples = 10;
private static Random s_random = new Random(100);
[Fact]
public static void RunLogTests()
{
byte[] tempByteArray1 = new byte[0];
// Log Method - Large BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random);
VerifyLogString(Print(tempByteArray1) + "uLog");
}
// Log Method - Small BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random, 2);
VerifyLogString(Print(tempByteArray1) + "uLog");
}
// Log Method - zero
VerifyLogString("0 uLog");
// Log Method - -1
VerifyLogString("-1 uLog");
// Log Method - 1
VerifyLogString("1 uLog");
// Log Method - Int32.MinValue
VerifyLogString(Int32.MinValue.ToString() + " uLog");
// Log Method - Int32.MinValue-1
VerifyLogString(Int32.MinValue.ToString() + " -1 b+ uLog");
// Log Method - Int32.MinValue+1
VerifyLogString(Int32.MinValue.ToString() + " 1 b+ uLog");
// Log Method - Int32.MaxValue
VerifyLogString(Int32.MaxValue.ToString() + " uLog");
// Log Method - Int32.MaxValue-1
VerifyLogString(Int32.MaxValue.ToString() + " -1 b+ uLog");
// Log Method - Int32.MaxValue+1
VerifyLogString(Int32.MaxValue.ToString() + " 1 b+ uLog");
// Log Method - Int64.MinValue
VerifyLogString(Int64.MinValue.ToString() + " uLog");
// Log Method - Int64.MinValue-1
VerifyLogString(Int64.MinValue.ToString() + " -1 b+ uLog");
// Log Method - Int64.MinValue+1
VerifyLogString(Int64.MinValue.ToString() + " 1 b+ uLog");
// Log Method - Int64.MaxValue
VerifyLogString(Int64.MaxValue.ToString() + " uLog");
// Log Method - Int64.MaxValue-1
VerifyLogString(Int64.MaxValue.ToString() + " -1 b+ uLog");
// Log Method - Int64.MaxValue+1
VerifyLogString(Int64.MaxValue.ToString() + " 1 b+ uLog");
}
private static void VerifyLogString(string opstring)
{
StackCalc sc = new StackCalc(opstring);
while (sc.DoNextOperation())
{
Assert.Equal(sc.snCalc.Peek().ToString(), sc.myCalc.Peek().ToString());
}
}
private static byte[] GetRandomByteArray(Random random)
{
return GetRandomByteArray(random, random.Next(0, 1024));
}
private static byte[] GetRandomByteArray(Random random, int size)
{
return MyBigIntImp.GetRandomByteArray(random, size);
}
private static string Print(byte[] bytes)
{
return MyBigIntImp.Print(bytes);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace System.Numerics.Tests
{
public class log02Test
{
private static int s_samples = 10;
private static Random s_random = new Random(100);
[Fact]
public static void RunLogTests()
{
byte[] tempByteArray1 = new byte[0];
// Log Method - Large BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random);
VerifyLogString(Print(tempByteArray1) + "uLog");
}
// Log Method - Small BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random, 2);
VerifyLogString(Print(tempByteArray1) + "uLog");
}
// Log Method - zero
VerifyLogString("0 uLog");
// Log Method - -1
VerifyLogString("-1 uLog");
// Log Method - 1
VerifyLogString("1 uLog");
// Log Method - Int32.MinValue
VerifyLogString(Int32.MinValue.ToString() + " uLog");
// Log Method - Int32.MinValue-1
VerifyLogString(Int32.MinValue.ToString() + " -1 b+ uLog");
// Log Method - Int32.MinValue+1
VerifyLogString(Int32.MinValue.ToString() + " 1 b+ uLog");
// Log Method - Int32.MaxValue
VerifyLogString(Int32.MaxValue.ToString() + " uLog");
// Log Method - Int32.MaxValue-1
VerifyLogString(Int32.MaxValue.ToString() + " -1 b+ uLog");
// Log Method - Int32.MaxValue+1
VerifyLogString(Int32.MaxValue.ToString() + " 1 b+ uLog");
// Log Method - Int64.MinValue
VerifyLogString(Int64.MinValue.ToString() + " uLog");
// Log Method - Int64.MinValue-1
VerifyLogString(Int64.MinValue.ToString() + " -1 b+ uLog");
// Log Method - Int64.MinValue+1
VerifyLogString(Int64.MinValue.ToString() + " 1 b+ uLog");
// Log Method - Int64.MaxValue
VerifyLogString(Int64.MaxValue.ToString() + " uLog");
// Log Method - Int64.MaxValue-1
VerifyLogString(Int64.MaxValue.ToString() + " -1 b+ uLog");
// Log Method - Int64.MaxValue+1
VerifyLogString(Int64.MaxValue.ToString() + " 1 b+ uLog");
}
private static void VerifyLogString(string opstring)
{
StackCalc sc = new StackCalc(opstring);
while (sc.DoNextOperation())
{
Assert.Equal(sc.snCalc.Peek().ToString(), sc.myCalc.Peek().ToString());
}
}
private static byte[] GetRandomByteArray(Random random)
{
return GetRandomByteArray(random, random.Next(0, 1024));
}
private static byte[] GetRandomByteArray(Random random, int size)
{
return MyBigIntImp.GetRandomByteArray(random, size);
}
private static string Print(byte[] bytes)
{
return MyBigIntImp.Print(bytes);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/Performance/CodeQuality/Math/Functions/Single/CoshSingle.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
namespace Functions
{
public static partial class MathTests
{
// Tests MathF.Cosh(float) over 5000 iterations for the domain -1, +1
private const float coshSingleDelta = 0.0004f;
private const float coshSingleExpectedResult = 5876.02588f;
public static void CoshSingleTest()
{
var result = 0.0f; var value = -1.0f;
for (var iteration = 0; iteration < iterations; iteration++)
{
value += coshSingleDelta;
result += MathF.Cosh(value);
}
var diff = MathF.Abs(coshSingleExpectedResult - result);
if (diff > singleEpsilon)
{
throw new Exception($"Expected Result {coshSingleExpectedResult,10:g9}; Actual Result {result,10:g9}");
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
namespace Functions
{
public static partial class MathTests
{
// Tests MathF.Cosh(float) over 5000 iterations for the domain -1, +1
private const float coshSingleDelta = 0.0004f;
private const float coshSingleExpectedResult = 5876.02588f;
public static void CoshSingleTest()
{
var result = 0.0f; var value = -1.0f;
for (var iteration = 0; iteration < iterations; iteration++)
{
value += coshSingleDelta;
result += MathF.Cosh(value);
}
var diff = MathF.Abs(coshSingleExpectedResult - result);
if (diff > singleEpsilon)
{
throw new Exception($"Expected Result {coshSingleExpectedResult,10:g9}; Actual Result {result,10:g9}");
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.ServiceModel.Syndication/tests/System/ServiceModel/Syndication/Atom10FeedFormatterTests.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.Serialization;
using System.Text;
using System.Xml;
using System.Xml.Linq;
using System.Xml.Serialization;
using Xunit;
namespace System.ServiceModel.Syndication.Tests
{
public partial class Atom10FeedFormatterTests
{
[Fact]
public void Ctor_Default()
{
var formatter = new Formatter();
Assert.Null(formatter.Feed);
Assert.Equal(typeof(SyndicationFeed), formatter.FeedTypeEntryPoint);
Assert.True(formatter.PreserveAttributeExtensions);
Assert.True(formatter.PreserveElementExtensions);
Assert.Equal("Atom10", formatter.Version);
}
[Fact]
public void Ctor_GenericDefault()
{
var formatter = new GenericFormatter<SyndicationFeed>();
Assert.Null(formatter.Feed);
Assert.Equal(typeof(SyndicationFeed), formatter.FeedTypeEntryPoint);
Assert.True(formatter.PreserveAttributeExtensions);
Assert.True(formatter.PreserveElementExtensions);
Assert.Equal("Atom10", formatter.Version);
}
[Fact]
public void Ctor_SyndicationFeed()
{
var feed = new SyndicationFeed();
var formatter = new Formatter(feed);
Assert.Same(feed, formatter.Feed);
Assert.Equal(typeof(SyndicationFeed), formatter.FeedTypeEntryPoint);
Assert.True(formatter.PreserveAttributeExtensions);
Assert.True(formatter.PreserveElementExtensions);
Assert.Equal("Atom10", formatter.Version);
}
[Fact]
public void Ctor_GenericSyndicationFeed()
{
var feed = new SyndicationFeed();
var formatter = new GenericFormatter<SyndicationFeed>(feed);
Assert.Same(feed, formatter.Feed);
Assert.Equal(typeof(SyndicationFeed), formatter.FeedTypeEntryPoint);
Assert.True(formatter.PreserveAttributeExtensions);
Assert.True(formatter.PreserveElementExtensions);
Assert.Equal("Atom10", formatter.Version);
}
[Fact]
public void Ctor_NullFeedToWrite_ThrowsArgumentNullException()
{
AssertExtensions.Throws<ArgumentNullException>("feedToWrite", () => new Atom10FeedFormatter((SyndicationFeed)null));
AssertExtensions.Throws<ArgumentNullException>("feedToWrite", () => new Atom10FeedFormatter<SyndicationFeed>(null));
}
[Theory]
[InlineData(typeof(SyndicationFeed))]
[InlineData(typeof(SyndicationFeedSubclass))]
public void Ctor_Type(Type feedTypeToCreate)
{
var formatter = new Formatter(feedTypeToCreate);
Assert.Null(formatter.Feed);
Assert.Equal(feedTypeToCreate, formatter.FeedTypeEntryPoint);
Assert.True(formatter.PreserveAttributeExtensions);
Assert.True(formatter.PreserveElementExtensions);
Assert.Equal("Atom10", formatter.Version);
}
[Fact]
public void Ctor_NullFeedTypeToCreate_ThrowsArgumentNullException()
{
AssertExtensions.Throws<ArgumentNullException>("feedTypeToCreate", () => new Atom10FeedFormatter((Type)null));
}
[Fact]
public void Ctor_InvalidFeedTypeToCreate_ThrowsArgumentException()
{
AssertExtensions.Throws<ArgumentException>("feedTypeToCreate", () => new Atom10FeedFormatter(typeof(int)));
}
[Fact]
public void GetSchema_Invoke_ReturnsNull()
{
IXmlSerializable formatter = new Atom10FeedFormatter();
Assert.Null(formatter.GetSchema());
}
public static IEnumerable<object[]> WriteTo_TestData()
{
// Full item.
SyndicationPerson CreatePerson(string prefix)
{
var person = new SyndicationPerson();
person.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name1"), null);
person.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name2", prefix + "_namespace"), "");
person.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name3", prefix + "_namespace"), prefix + "_value");
person.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name4", "xmlns"), "");
person.ElementExtensions.Add(new ExtensionObject { Value = 10 });
person.Email = prefix + "_email";
person.Name = prefix + "_name";
person.Uri = prefix + "_uri";
return person;
}
TextSyndicationContent CreateContent(string prefix)
{
var content = new TextSyndicationContent(prefix + "_title", TextSyndicationContentKind.Html);
content.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name1"), null);
content.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name2", prefix + "_namespace"), "");
content.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name3", prefix + "_namespace"), prefix + "_value");
content.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name4", "xmlns"), "");
return content;
}
SyndicationCategory CreateCategory(string prefix)
{
var category = new SyndicationCategory();
category.AttributeExtensions.Add(new XmlQualifiedName(prefix + "category_name1"), null);
category.AttributeExtensions.Add(new XmlQualifiedName(prefix + "category_name2", prefix + "category_namespace"), "");
category.AttributeExtensions.Add(new XmlQualifiedName(prefix + "category_name3", prefix + "category_namespace"), prefix + "category_value");
category.AttributeExtensions.Add(new XmlQualifiedName(prefix + "category_name4", "xmlns"), "");
category.ElementExtensions.Add(new ExtensionObject { Value = 10 });
category.Label = prefix + "category_label";
category.Name = prefix + "category_name";
category.Scheme = prefix + "category_scheme";
return category;
}
SyndicationLink CreateLink(string prefix)
{
var link = new SyndicationLink();
link.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name1"), null);
link.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name2", prefix + "_namespace"), "");
link.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name3", prefix + "_namespace"), prefix + "_value");
link.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name4", "xmlns"), "");
link.BaseUri = new Uri("http://" + prefix + "_url.com");
link.ElementExtensions.Add(new ExtensionObject { Value = 10 });
link.Length = 10;
link.MediaType = prefix + "_mediaType";
link.RelationshipType = prefix + "_relationshipType";
link.Title = prefix + "_title";
link.Uri = new Uri("http://" + prefix +"_uri.com");
return link;
}
var attributeSyndicationCategory = new SyndicationCategory
{
Name = "name",
Label = "label",
Scheme = "scheme"
};
attributeSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("term"), "term_value");
attributeSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("label"), "label_value");
attributeSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("scheme"), "scheme_value");
var attributeSyndicationLink = new SyndicationLink
{
RelationshipType = "link_relationshipType",
MediaType = "link_mediaType",
Title = "link_title",
Length = 10,
Uri = new Uri("http://link_uri.com")
};
attributeSyndicationLink.AttributeExtensions.Add(new XmlQualifiedName("rel"), "rel_value");
attributeSyndicationLink.AttributeExtensions.Add(new XmlQualifiedName("type"), "type_value");
attributeSyndicationLink.AttributeExtensions.Add(new XmlQualifiedName("title"), "title_value");
attributeSyndicationLink.AttributeExtensions.Add(new XmlQualifiedName("length"), "100");
attributeSyndicationLink.AttributeExtensions.Add(new XmlQualifiedName("href"), "href_value");
var fullSyndicationItem = new SyndicationItem();
fullSyndicationItem.AttributeExtensions.Add(new XmlQualifiedName("item_name1"), null);
fullSyndicationItem.AttributeExtensions.Add(new XmlQualifiedName("item_name2", "item_namespace"), "");
fullSyndicationItem.AttributeExtensions.Add(new XmlQualifiedName("item_name3", "item_namespace"), "item_value");
fullSyndicationItem.AttributeExtensions.Add(new XmlQualifiedName("item_name4", "xmlns"), "");
fullSyndicationItem.Authors.Add(new SyndicationPerson());
fullSyndicationItem.Authors.Add(CreatePerson("author"));
fullSyndicationItem.BaseUri = new Uri("http://microsoft/relative");
fullSyndicationItem.Categories.Add(new SyndicationCategory());
fullSyndicationItem.Categories.Add(CreateCategory(""));
fullSyndicationItem.Categories.Add(attributeSyndicationCategory);
fullSyndicationItem.Content = CreateContent("content");
fullSyndicationItem.Contributors.Add(new SyndicationPerson());
fullSyndicationItem.Contributors.Add(CreatePerson("contributor"));
fullSyndicationItem.Copyright = CreateContent("copyright");
fullSyndicationItem.ElementExtensions.Add(new ExtensionObject { Value = 10 });
fullSyndicationItem.Id = "id";
fullSyndicationItem.LastUpdatedTime = DateTimeOffset.MinValue.AddTicks(100);
fullSyndicationItem.Links.Add(new SyndicationLink());
fullSyndicationItem.Links.Add(CreateLink("link"));
fullSyndicationItem.Links.Add(attributeSyndicationLink);
fullSyndicationItem.PublishDate = DateTimeOffset.MinValue.AddTicks(200);
fullSyndicationItem.Summary = CreateContent("summary");
fullSyndicationItem.Title = CreateContent("title");
var fullSyndicationFeed = new SyndicationFeed();
fullSyndicationFeed.AttributeExtensions.Add(new XmlQualifiedName("feed_name1"), null);
fullSyndicationFeed.AttributeExtensions.Add(new XmlQualifiedName("feed_name2", "feed_namespace"), "");
fullSyndicationFeed.AttributeExtensions.Add(new XmlQualifiedName("feed_name3", "feed_namespace"), "feed_value");
fullSyndicationFeed.AttributeExtensions.Add(new XmlQualifiedName("feed_name4", "xmlns"), "");
fullSyndicationFeed.Authors.Add(new SyndicationPerson());
fullSyndicationFeed.Authors.Add(CreatePerson("feedauthor"));
fullSyndicationFeed.BaseUri = new Uri("http://microsoft.com");
fullSyndicationFeed.Categories.Add(new SyndicationCategory());
fullSyndicationFeed.Categories.Add(CreateCategory("feed"));
fullSyndicationItem.Categories.Add(attributeSyndicationCategory);
fullSyndicationFeed.Contributors.Add(new SyndicationPerson());
fullSyndicationFeed.Contributors.Add(CreatePerson("feedauthor_"));
fullSyndicationFeed.Copyright = CreateContent("feedcopyright");
fullSyndicationFeed.Description = CreateContent("feeddescription");
fullSyndicationFeed.ElementExtensions.Add(new ExtensionObject { Value = 10 });
fullSyndicationFeed.Generator = "generator";
fullSyndicationFeed.Id = "id";
fullSyndicationFeed.ImageUrl = new Uri("http://imageurl.com");
fullSyndicationFeed.Items = new SyndicationItem[] { new SyndicationItem() { Id = "id", LastUpdatedTime = DateTimeOffset.MinValue.AddTicks(1) }, fullSyndicationItem };
fullSyndicationFeed.Language = "language";
fullSyndicationFeed.LastUpdatedTime = DateTimeOffset.MinValue.AddYears(1);
fullSyndicationFeed.Links.Add(new SyndicationLink());
fullSyndicationFeed.Links.Add(CreateLink("syndicationlink"));
fullSyndicationFeed.Links.Add(attributeSyndicationLink);
fullSyndicationFeed.Title = CreateContent("feedtitle");
yield return new object[]
{
fullSyndicationFeed,
@"<feed xml:lang=""language"" xml:base=""http://microsoft.com/"" feed_name1="""" d1p1:feed_name2="""" d1p1:feed_name3=""feed_value"" d1p2:feed_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""feed_namespace"" xmlns=""http://www.w3.org/2005/Atom"">
<title type=""html"" feedtitle_name1="""" d2p1:feedtitle_name2="""" d2p1:feedtitle_name3=""feedtitle_value"" d1p2:feedtitle_name4="""" xmlns:d2p1=""feedtitle_namespace"">feedtitle_title</title>
<subtitle type=""html"" feeddescription_name1="""" d2p1:feeddescription_name2="""" d2p1:feeddescription_name3=""feeddescription_value"" d1p2:feeddescription_name4="""" xmlns:d2p1=""feeddescription_namespace"">feeddescription_title</subtitle>
<id>id</id>
<rights type=""html"" feedcopyright_name1="""" d2p1:feedcopyright_name2="""" d2p1:feedcopyright_name3=""feedcopyright_value"" d1p2:feedcopyright_name4="""" xmlns:d2p1=""feedcopyright_namespace"">feedcopyright_title</rights>
<updated>0002-01-01T00:00:00Z</updated>
<category term="""" />
<category feedcategory_name1="""" d2p1:feedcategory_name2="""" d2p1:feedcategory_name3=""feedcategory_value"" d1p2:feedcategory_name4="""" term=""feedcategory_name"" label=""feedcategory_label"" scheme=""feedcategory_scheme"" xmlns:d2p1=""feedcategory_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</category>
<logo>http://imageurl.com/</logo>
<author />
<author feedauthor_name1="""" d2p1:feedauthor_name2="""" d2p1:feedauthor_name3=""feedauthor_value"" d1p2:feedauthor_name4="""" xmlns:d2p1=""feedauthor_namespace"">
<name>feedauthor_name</name>
<uri>feedauthor_uri</uri>
<email>feedauthor_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</author>
<contributor />
<contributor feedauthor__name1="""" d2p1:feedauthor__name2="""" d2p1:feedauthor__name3=""feedauthor__value"" d1p2:feedauthor__name4="""" xmlns:d2p1=""feedauthor__namespace"">
<name>feedauthor__name</name>
<uri>feedauthor__uri</uri>
<email>feedauthor__email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</contributor>
<generator>generator</generator>
<link href="""" />
<link xml:base=""http://syndicationlink_url.com/"" syndicationlink_name1="""" d2p1:syndicationlink_name2="""" d2p1:syndicationlink_name3=""syndicationlink_value"" d1p2:syndicationlink_name4="""" rel=""syndicationlink_relationshipType"" type=""syndicationlink_mediaType"" title=""syndicationlink_title"" length=""10"" href=""http://syndicationlink_uri.com/"" xmlns:d2p1=""syndicationlink_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</link>
<link rel=""rel_value"" type=""type_value"" title=""title_value"" length=""100"" href=""href_value"" />
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
<entry>
<id>id</id>
<title type=""text"" />
<updated>0001-01-01T00:00:00Z</updated>
</entry>
<entry xml:base=""http://microsoft/relative"" item_name1="""" d2p1:item_name2="""" d2p1:item_name3=""item_value"" d1p2:item_name4="""" xmlns:d2p1=""item_namespace"">
<id>id</id>
<title type=""html"" title_name1="""" d3p1:title_name2="""" d3p1:title_name3=""title_value"" d1p2:title_name4="""" xmlns:d3p1=""title_namespace"">title_title</title>
<summary type=""html"" summary_name1="""" d3p1:summary_name2="""" d3p1:summary_name3=""summary_value"" d1p2:summary_name4="""" xmlns:d3p1=""summary_namespace"">summary_title</summary>
<published>0001-01-01T00:00:00Z</published>
<updated>0001-01-01T00:00:00Z</updated>
<author />
<author author_name1="""" d3p1:author_name2="""" d3p1:author_name3=""author_value"" d1p2:author_name4="""" xmlns:d3p1=""author_namespace"">
<name>author_name</name>
<uri>author_uri</uri>
<email>author_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</author>
<contributor />
<contributor contributor_name1="""" d3p1:contributor_name2="""" d3p1:contributor_name3=""contributor_value"" d1p2:contributor_name4="""" xmlns:d3p1=""contributor_namespace"">
<name>contributor_name</name>
<uri>contributor_uri</uri>
<email>contributor_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</contributor>
<link href="""" />
<link xml:base=""http://link_url.com/"" link_name1="""" d3p1:link_name2="""" d3p1:link_name3=""link_value"" d1p2:link_name4="""" rel=""link_relationshipType"" type=""link_mediaType"" title=""link_title"" length=""10"" href=""http://link_uri.com/"" xmlns:d3p1=""link_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</link>
<link rel=""rel_value"" type=""type_value"" title=""title_value"" length=""100"" href=""href_value"" />
<category term="""" />
<category category_name1="""" d3p1:category_name2="""" d3p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d3p1=""category_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</category>
<category term=""term_value"" label=""label_value"" scheme=""scheme_value"" />
<category term=""term_value"" label=""label_value"" scheme=""scheme_value"" />
<content type=""html"" content_name1="""" d3p1:content_name2="""" d3p1:content_name3=""content_value"" d1p2:content_name4="""" xmlns:d3p1=""content_namespace"">content_title</content>
<rights type=""html"" copyright_name1="""" d3p1:copyright_name2="""" d3p1:copyright_name3=""copyright_value"" d1p2:copyright_name4="""" xmlns:d3p1=""copyright_namespace"">copyright_title</rights>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</entry>
</feed>"
};
}
[Theory]
[MemberData(nameof(WriteTo_TestData))]
public void Write_HasFeed_SerializesExpected(SyndicationFeed feed, string expected)
{
var formatter = new Atom10FeedFormatter(feed);
CompareHelper.AssertEqualWriteOutput(expected, writer => formatter.WriteTo(writer));
CompareHelper.AssertEqualWriteOutput(expected, writer => feed.SaveAsAtom10(writer));
CompareHelper.AssertEqualWriteOutput(expected, writer =>
{
writer.WriteStartElement("feed", "http://www.w3.org/2005/Atom");
((IXmlSerializable)formatter).WriteXml(writer);
writer.WriteEndElement();
});
var genericFormatter = new Atom10FeedFormatter<SyndicationFeed>(feed);
CompareHelper.AssertEqualWriteOutput(expected, writer => formatter.WriteTo(writer));
CompareHelper.AssertEqualWriteOutput(expected, writer => feed.SaveAsAtom10(writer));
CompareHelper.AssertEqualWriteOutput(expected, writer =>
{
writer.WriteStartElement("feed", "http://www.w3.org/2005/Atom");
((IXmlSerializable)genericFormatter).WriteXml(writer);
writer.WriteEndElement();
});
}
[Fact]
public void Write_EmptyFeed_SerializesExpected()
{
var formatter = new Atom10FeedFormatter(new SyndicationFeed());
var stringBuilder = new StringBuilder();
using (XmlWriter writer = XmlWriter.Create(stringBuilder))
{
formatter.WriteTo(writer);
}
using (var stringReader = new StringReader(stringBuilder.ToString()))
{
XElement element = XElement.Load(stringReader);
Assert.Equal("feed", element.Name.LocalName);
Assert.Equal("http://www.w3.org/2005/Atom", element.Attribute("xmlns").Value);
XElement[] elements = element.Elements().ToArray();
Assert.Equal(3, elements.Length);
Assert.Equal("title", elements[0].Name.LocalName);
Assert.Equal("text", elements[0].Attribute("type").Value);
Assert.Empty(elements[0].Value);
Assert.Equal("id", elements[1].Name.LocalName);
Assert.StartsWith("uuid:", elements[1].Value);
Assert.Equal("updated", elements[2].Name.LocalName);
DateTimeOffset now = DateTimeOffset.UtcNow;
Assert.True(now >= DateTimeOffset.ParseExact(elements[2].Value, "yyyy-MM-ddTHH:mm:ssZ", CultureInfo.InvariantCulture));
}
}
[Fact]
public void WriteTo_NullWriter_ThrowsArgumentNullException()
{
var formatter = new Atom10FeedFormatter(new SyndicationFeed());
AssertExtensions.Throws<ArgumentNullException>("writer", () => formatter.WriteTo(null));
}
[Fact]
public void WriteTo_NoItem_ThrowsInvalidOperationException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Atom10FeedFormatter();
Assert.Throws<InvalidOperationException>(() => formatter.WriteTo(writer));
}
}
[Fact]
public void WriteXml_NullWriter_ThrowsArgumentNullException()
{
IXmlSerializable formatter = new Atom10FeedFormatter();
AssertExtensions.Throws<ArgumentNullException>("writer", () => formatter.WriteXml(null));
}
[Fact]
public void WriteXml_NoItem_ThrowsInvalidOperationException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
IXmlSerializable formatter = new Atom10FeedFormatter();
Assert.Throws<InvalidOperationException>(() => formatter.WriteXml(writer));
}
}
public static IEnumerable<object[]> FeedBaseUri_TestData()
{
yield return new object[] { null };
yield return new object[] { new Uri("http://microsoft.com") };
yield return new object[] { new Uri("/relative", UriKind.Relative) };
}
[Theory]
[MemberData(nameof(FeedBaseUri_TestData))]
public void WriteItem_Invoke_Success(Uri feedBaseUri)
{
var formatter = new Formatter();
var item = new SyndicationItem() { Id = "id", LastUpdatedTime = DateTimeOffset.MinValue.AddTicks(1) };
CompareHelper.AssertEqualWriteOutput(
@"<entry xmlns=""http://www.w3.org/2005/Atom"">
<id>id</id>
<title type=""text"" />
<updated>0001-01-01T00:00:00Z</updated>
</entry>", writer => formatter.WriteItemEntryPoint(writer, item, feedBaseUri));
}
[Fact]
public void WriteItem_NullWriter_ThrowsNullReferenceException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Formatter();
Assert.Throws<NullReferenceException>(() => formatter.WriteItemEntryPoint(null, new SyndicationItem(), new Uri("http://microsoft.com")));
}
}
[Fact]
public void WriteItem_NullItem_ThrowsNullReferenceException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Formatter();
Assert.Throws<NullReferenceException>(() => formatter.WriteItemEntryPoint(writer, null, new Uri("http://microsoft.com")));
}
}
[Theory]
[MemberData(nameof(FeedBaseUri_TestData))]
public void WriteItems_Invoke_Success(Uri feedBaseUri)
{
var formatter = new Formatter();
var items = new SyndicationItem[]
{
new SyndicationItem() { Id = "id1", LastUpdatedTime = DateTimeOffset.MinValue.AddTicks(1) },
new SyndicationItem() { Id = "id2", LastUpdatedTime = DateTimeOffset.MinValue.AddTicks(1) }
};
CompareHelper.AssertEqualWriteOutput(
@"<entry xmlns=""http://www.w3.org/2005/Atom"">
<id>id1</id>
<title type=""text"" />
<updated>0001-01-01T00:00:00Z</updated>
</entry>
<entry xmlns=""http://www.w3.org/2005/Atom"">
<id>id2</id>
<title type=""text"" />
<updated>0001-01-01T00:00:00Z</updated>
</entry>", writer => formatter.WriteItemsEntryPoint(writer, items, feedBaseUri));
}
[Fact]
public void WriteItems_NullItems_Nop()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Formatter();
formatter.WriteItemsEntryPoint(writer, null, new Uri("http://microsoft.com"));
formatter.WriteItemsEntryPoint(null, null, new Uri("http://microsoft.com"));
}
}
[Fact]
public void WriteItems_EmptyItems_Nop()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Formatter();
formatter.WriteItemsEntryPoint(writer, new SyndicationItem[0], new Uri("http://microsoft.com"));
formatter.WriteItemsEntryPoint(null, new SyndicationItem[0], new Uri("http://microsoft.com"));
}
}
[Fact]
public void WriteItems_NullWriter_ThrowsNullReferenceException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Formatter();
var items = new SyndicationItem[] { new SyndicationItem() };
Assert.Throws<NullReferenceException>(() => formatter.WriteItemsEntryPoint(null, items, new Uri("http://microsoft.com")));
}
}
[Fact]
public void WriteItems_NullItemInItems_ThrowsNullReferenceException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Formatter();
var items = new SyndicationItem[] { null };
Assert.Throws<NullReferenceException>(() => formatter.WriteItemsEntryPoint(writer, items, new Uri("http://microsoft.com")));
}
}
public static IEnumerable<object[]> CanRead_TestData()
{
yield return new object[] { @"<feed />", false };
yield return new object[] { @"<feed xmlns=""different"" />", false };
yield return new object[] { @"<different xmlns=""http://www.w3.org/2005/Atom"" />", false };
yield return new object[] { @"<feed xmlns=""http://www.w3.org/2005/Atom"" />", true };
yield return new object[] { @"<feed xmlns=""http://www.w3.org/2005/Atom""></feed>", true };
}
[Theory]
[MemberData(nameof(CanRead_TestData))]
public void CanRead_ValidReader_ReturnsExpected(string xmlString, bool expected)
{
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter();
Assert.Equal(expected, formatter.CanRead(reader));
}
}
[Fact]
public void CanRead_NullReader_ThrowsArgumentNullException()
{
var formatter = new Atom10FeedFormatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.CanRead(null));
}
[Theory]
[InlineData(true, true)]
[InlineData(false, false)]
public void Read_FullItem_ReturnsExpected(bool preserveAttributeExtensions, bool preserveElementExtensions)
{
VerifyRead(
@"<feed xml:lang=""language"" xml:base=""http://microsoft.com/"" feed_name1="""" d1p1:feed_name2="""" d1p1:feed_name3=""feed_value"" d1p2:feed_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""feed_namespace"" xmlns=""http://www.w3.org/2005/Atom"">
<title type=""html"" feedtitle_name1="""" d2p1:feedtitle_name2="""" d2p1:feedtitle_name3=""feedtitle_value"" d1p2:feedtitle_name4="""" xmlns:d2p1=""feedtitle_namespace"">feedtitle_title</title>
<subtitle type=""html"" feeddescription_name1="""" d2p1:feeddescription_name2="""" d2p1:feeddescription_name3=""feeddescription_value"" d1p2:feeddescription_name4="""" xmlns:d2p1=""feeddescription_namespace"">feeddescription_title</subtitle>
<id>id</id>
<rights type=""html"" feedcopyright_name1="""" d2p1:feedcopyright_name2="""" d2p1:feedcopyright_name3=""feedcopyright_value"" d1p2:feedcopyright_name4="""" xmlns:d2p1=""feedcopyright_namespace"">feedcopyright_title</rights>
<updated>0002-01-01T00:00:00Z</updated>
<category />
<category></category>
<category term="""" />
<category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</category>
<logo>http://imageurl.com/</logo>
<author />
<author></author>
<author author_name1="""" d2p1:author_name2="""" d2p1:author_name3=""author_value"" d1p2:author_name4="""" xmlns:d2p1=""author_namespace"">
<name>author_name</name>
<uri>author_uri</uri>
<email>author_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</author>
<contributor />
<contributor></contributor>
<contributor contributor_name1="""" d2p1:contributor_name2="""" d2p1:contributor_name3=""contributor_value"" d1p2:contributor_name4="""" xmlns:d2p1=""contributor_namespace"">
<name>contributor_name</name>
<uri>contributor_uri</uri>
<email>contributor_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</contributor>
<generator>generator</generator>
<link />
<link></link>
<link href="""" />
<link xml:base=""http://link_url.com/"" link_name1="""" d2p1:link_name2="""" d2p1:link_name3=""link_value"" d1p2:link_name4="""" rel=""link_relationshipType"" type=""link_mediaType"" title=""link_title"" length=""10"" href=""http://link_uri.com/"" xmlns:d2p1=""link_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</link>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
<entry>
<id>id</id>
<title type=""text"" />
<updated>0001-01-01T00:00:00Z</updated>
</entry>
<entry xml:base=""/relative"" item_name1="""" d1p1:item_name2="""" d1p1:item_name3=""item_value"" d1p2:item_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""item_namespace"" xmlns=""http://www.w3.org/2005/Atom"">
<id>id</id>
<title type=""html"" title_name1="""" d2p1:title_name2="""" d2p1:title_name3=""title_value"" d1p2:title_name4="""" xmlns:d2p1=""title_namespace"">title_title</title>
<summary type=""html"" summary_name1="""" d2p1:summary_name2="""" d2p1:summary_name3=""summary_value"" d1p2:summary_name4="""" xmlns:d2p1=""summary_namespace"">summary_title</summary>
<published>0001-01-01T00:00:00Z</published>
<updated>0001-01-01T00:00:00Z</updated>
<author />
<author></author>
<author author_name1="""" d2p1:author_name2="""" d2p1:author_name3=""author_value"" d1p2:author_name4="""" xmlns:d2p1=""author_namespace"">
<name>author_name</name>
<uri>author_uri</uri>
<email>author_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</author>
<contributor />
<contributor></contributor>
<contributor contributor_name1="""" d2p1:contributor_name2="""" d2p1:contributor_name3=""contributor_value"" d1p2:contributor_name4="""" xmlns:d2p1=""contributor_namespace"">
<name>contributor_name</name>
<uri>contributor_uri</uri>
<email>contributor_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</contributor>
<link />
<link></link>
<link href="""" />
<link xml:base=""http://link_url.com/"" link_name1="""" d2p1:link_name2="""" d2p1:link_name3=""link_value"" d1p2:link_name4="""" rel=""link_relationshipType"" type=""link_mediaType"" title=""link_title"" length=""10"" href=""http://link_uri.com/"" xmlns:d2p1=""link_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</link>
<category />
<category></category>
<category term="""" />
<category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</category>
<content type=""html"" content_name1="""" d2p1:content_name2="""" d2p1:content_name3=""content_value"" d1p2:content_name4="""" xmlns:d2p1=""content_namespace"">content_title</content>
<rights type=""html"" copyright_name1="""" d2p1:copyright_name2="""" d2p1:copyright_name3=""copyright_value"" d1p2:copyright_name4="""" xmlns:d2p1=""copyright_namespace"">copyright_title</rights>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</entry>
</feed>
", preserveElementExtensions, preserveElementExtensions, feed =>
{
if (preserveAttributeExtensions)
{
Assert.Equal(4, feed.AttributeExtensions.Count);
Assert.Equal("", feed.AttributeExtensions[new XmlQualifiedName("feed_name1")]);
Assert.Equal("", feed.AttributeExtensions[new XmlQualifiedName("feed_name2", "feed_namespace")]);
Assert.Equal("feed_value", feed.AttributeExtensions[new XmlQualifiedName("feed_name3", "feed_namespace")]);
Assert.Equal("", feed.AttributeExtensions[new XmlQualifiedName("feed_name4", "xmlns")]);
}
else
{
Assert.Empty(feed.AttributeExtensions);
}
Assert.Equal(3, feed.Authors.Count);
SyndicationPerson firstAuthor = feed.Authors[0];
Assert.Empty(firstAuthor.AttributeExtensions);
Assert.Empty(firstAuthor.ElementExtensions);
Assert.Null(firstAuthor.Email);
Assert.Null(firstAuthor.Name);
Assert.Null(firstAuthor.Uri);
SyndicationPerson secondAuthor = feed.Authors[1];
Assert.Empty(secondAuthor.AttributeExtensions);
Assert.Empty(secondAuthor.ElementExtensions);
Assert.Null(secondAuthor.Email);
Assert.Null(secondAuthor.Name);
Assert.Null(secondAuthor.Uri);
SyndicationPerson thirdAuthor = feed.Authors[2];
if (preserveAttributeExtensions)
{
Assert.Equal(4, thirdAuthor.AttributeExtensions.Count);
Assert.Equal("", thirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name1")]);
Assert.Equal("", thirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name2", "author_namespace")]);
Assert.Equal("author_value", thirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name3", "author_namespace")]);
Assert.Equal("", thirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name4", "xmlns")]);
}
else
{
Assert.Empty(thirdAuthor.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, thirdAuthor.ElementExtensions.Count);
Assert.Equal(10, thirdAuthor.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(thirdAuthor.ElementExtensions);
}
Assert.Equal("author_email", thirdAuthor.Email);
Assert.Equal("author_name", thirdAuthor.Name);
Assert.Equal("author_uri", thirdAuthor.Uri);
Assert.Equal(new Uri("http://microsoft.com"), feed.BaseUri);
Assert.Equal(4, feed.Categories.Count);
SyndicationCategory firstCategory = feed.Categories[0];
Assert.Empty(firstCategory.AttributeExtensions);
Assert.Empty(firstCategory.ElementExtensions);
Assert.Null(firstCategory.Name);
Assert.Null(firstCategory.Scheme);
Assert.Null(firstCategory.Label);
SyndicationCategory secondCategory = feed.Categories[1];
Assert.Empty(secondCategory.AttributeExtensions);
Assert.Empty(secondCategory.ElementExtensions);
Assert.Null(secondCategory.Name);
Assert.Null(secondCategory.Scheme);
Assert.Null(secondCategory.Label);
SyndicationCategory thirCategory = feed.Categories[2];
Assert.Empty(thirCategory.AttributeExtensions);
Assert.Empty(thirCategory.ElementExtensions);
Assert.Empty(thirCategory.Name);
Assert.Null(thirCategory.Scheme);
Assert.Null(thirCategory.Label);
SyndicationCategory fourthCategory = feed.Categories[3];
if (preserveAttributeExtensions)
{
Assert.Equal(4, fourthCategory.AttributeExtensions.Count);
Assert.Equal("", fourthCategory.AttributeExtensions[new XmlQualifiedName("category_name1")]);
Assert.Equal("", fourthCategory.AttributeExtensions[new XmlQualifiedName("category_name2", "category_namespace")]);
Assert.Equal("category_value", fourthCategory.AttributeExtensions[new XmlQualifiedName("category_name3", "category_namespace")]);
Assert.Equal("", fourthCategory.AttributeExtensions[new XmlQualifiedName("category_name4", "xmlns")]);
}
else
{
Assert.Empty(fourthCategory.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, fourthCategory.ElementExtensions.Count);
Assert.Equal(10, fourthCategory.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(fourthCategory.ElementExtensions);
}
Assert.Equal("category_name", fourthCategory.Name);
Assert.Equal("category_scheme", fourthCategory.Scheme);
Assert.Equal("category_label", fourthCategory.Label);
Assert.Equal(3, feed.Contributors.Count);
SyndicationPerson firstContributor = feed.Contributors[0];
Assert.Empty(firstContributor.AttributeExtensions);
Assert.Empty(firstContributor.ElementExtensions);
Assert.Null(firstContributor.Email);
Assert.Null(firstContributor.Name);
Assert.Null(firstContributor.Uri);
SyndicationPerson secondContributor = feed.Contributors[1];
Assert.Empty(secondContributor.AttributeExtensions);
Assert.Empty(secondContributor.ElementExtensions);
Assert.Null(secondContributor.Email);
Assert.Null(secondContributor.Name);
Assert.Null(secondContributor.Uri);
SyndicationPerson thirdContributor = feed.Contributors[2];
if (preserveAttributeExtensions)
{
Assert.Equal(4, thirdContributor.AttributeExtensions.Count);
Assert.Equal("", thirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name1")]);
Assert.Equal("", thirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name2", "contributor_namespace")]);
Assert.Equal("contributor_value", thirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name3", "contributor_namespace")]);
Assert.Equal("", thirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name4", "xmlns")]);
}
else
{
Assert.Empty(thirdContributor.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, thirdContributor.ElementExtensions.Count);
Assert.Equal(10, thirdContributor.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(thirdContributor.ElementExtensions);
}
Assert.Equal("contributor_email", thirdContributor.Email);
Assert.Equal("contributor_name", thirdContributor.Name);
Assert.Equal("contributor_uri", thirdContributor.Uri);
if (preserveAttributeExtensions)
{
Assert.Equal(4, feed.Copyright.AttributeExtensions.Count);
Assert.Equal("", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name1")]);
Assert.Equal("", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name2", "feedcopyright_namespace")]);
Assert.Equal("feedcopyright_value", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name3", "feedcopyright_namespace")]);
Assert.Equal("", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name4", "xmlns")]);
}
else
{
Assert.Empty(feed.Copyright.AttributeExtensions);
}
Assert.Equal("feedcopyright_title", feed.Copyright.Text);
Assert.Equal("html", feed.Copyright.Type);
Assert.Equal("generator", feed.Generator);
if (preserveElementExtensions)
{
Assert.Equal(1, feed.ElementExtensions.Count);
Assert.Equal(10, feed.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(feed.ElementExtensions);
}
Assert.Equal("id", feed.Id);
Assert.Equal(new Uri("http://imageurl.com/"), feed.ImageUrl);
SyndicationItem[] items = feed.Items.ToArray();
Assert.Equal(2, items.Length);
SyndicationItem item = items[1];
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.AttributeExtensions.Count);
Assert.Equal("", item.AttributeExtensions[new XmlQualifiedName("item_name1")]);
Assert.Equal("", item.AttributeExtensions[new XmlQualifiedName("item_name2", "item_namespace")]);
Assert.Equal("item_value", item.AttributeExtensions[new XmlQualifiedName("item_name3", "item_namespace")]);
Assert.Equal("", item.AttributeExtensions[new XmlQualifiedName("item_name4", "xmlns")]);
}
else
{
Assert.Empty(item.AttributeExtensions);
}
Assert.Equal(3, item.Authors.Count);
SyndicationPerson itemFirstAuthor = item.Authors[0];
Assert.Empty(itemFirstAuthor.AttributeExtensions);
Assert.Empty(itemFirstAuthor.ElementExtensions);
Assert.Null(itemFirstAuthor.Email);
Assert.Null(itemFirstAuthor.Name);
Assert.Null(itemFirstAuthor.Uri);
SyndicationPerson itemSecondAuthor = item.Authors[1];
Assert.Empty(itemSecondAuthor.AttributeExtensions);
Assert.Empty(itemSecondAuthor.ElementExtensions);
Assert.Null(itemSecondAuthor.Email);
Assert.Null(itemSecondAuthor.Name);
Assert.Null(itemSecondAuthor.Uri);
SyndicationPerson itemThirdAuthor = item.Authors[2];
if (preserveAttributeExtensions)
{
Assert.Equal(4, itemThirdAuthor.AttributeExtensions.Count);
Assert.Equal("", itemThirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name1")]);
Assert.Equal("", itemThirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name2", "author_namespace")]);
Assert.Equal("author_value", itemThirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name3", "author_namespace")]);
Assert.Equal("", itemThirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name4", "xmlns")]);
}
else
{
Assert.Empty(itemThirdAuthor.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, itemThirdAuthor.ElementExtensions.Count);
Assert.Equal(10, itemThirdAuthor.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(itemThirdAuthor.ElementExtensions);
}
Assert.Equal("author_email", itemThirdAuthor.Email);
Assert.Equal("author_name", itemThirdAuthor.Name);
Assert.Equal("author_uri", itemThirdAuthor.Uri);
Assert.Equal(new Uri("http://microsoft.com/relative"), item.BaseUri);
Assert.Equal(4, item.Categories.Count);
SyndicationCategory itemFirstCategory = item.Categories[0];
Assert.Empty(itemFirstCategory.AttributeExtensions);
Assert.Empty(itemFirstCategory.ElementExtensions);
Assert.Null(itemFirstCategory.Name);
Assert.Null(itemFirstCategory.Scheme);
Assert.Null(itemFirstCategory.Label);
SyndicationCategory itemSecondCategory = item.Categories[1];
Assert.Empty(itemSecondCategory.AttributeExtensions);
Assert.Empty(itemSecondCategory.ElementExtensions);
Assert.Null(itemSecondCategory.Name);
Assert.Null(itemSecondCategory.Scheme);
Assert.Null(itemSecondCategory.Label);
SyndicationCategory itemThirdCategory = item.Categories[2];
Assert.Empty(itemThirdCategory.AttributeExtensions);
Assert.Empty(itemThirdCategory.ElementExtensions);
Assert.Empty(itemThirdCategory.Name);
Assert.Null(itemThirdCategory.Scheme);
Assert.Null(itemThirdCategory.Label);
SyndicationCategory itemFourthCategory = item.Categories[3];
if (preserveAttributeExtensions)
{
Assert.Equal(4, itemFourthCategory.AttributeExtensions.Count);
Assert.Equal("", itemFourthCategory.AttributeExtensions[new XmlQualifiedName("category_name1")]);
Assert.Equal("", itemFourthCategory.AttributeExtensions[new XmlQualifiedName("category_name2", "category_namespace")]);
Assert.Equal("category_value", itemFourthCategory.AttributeExtensions[new XmlQualifiedName("category_name3", "category_namespace")]);
Assert.Equal("", itemFourthCategory.AttributeExtensions[new XmlQualifiedName("category_name4", "xmlns")]);
}
else
{
Assert.Empty(itemFourthCategory.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, itemFourthCategory.ElementExtensions.Count);
Assert.Equal(10, itemFourthCategory.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(itemFourthCategory.ElementExtensions);
}
Assert.Equal("category_name", itemFourthCategory.Name);
Assert.Equal("category_scheme", itemFourthCategory.Scheme);
Assert.Equal("category_label", itemFourthCategory.Label);
TextSyndicationContent content = Assert.IsType<TextSyndicationContent>(item.Content);
if (preserveAttributeExtensions)
{
Assert.Equal(4, content.AttributeExtensions.Count);
Assert.Equal("", content.AttributeExtensions[new XmlQualifiedName("content_name1")]);
Assert.Equal("", content.AttributeExtensions[new XmlQualifiedName("content_name2", "content_namespace")]);
Assert.Equal("content_value", content.AttributeExtensions[new XmlQualifiedName("content_name3", "content_namespace")]);
Assert.Equal("", content.AttributeExtensions[new XmlQualifiedName("content_name4", "xmlns")]);
}
else
{
Assert.Empty(content.AttributeExtensions);
}
Assert.Equal("content_title", content.Text);
Assert.Equal("html", content.Type);
Assert.Equal(3, item.Contributors.Count);
SyndicationPerson itemFirstContributor = item.Contributors[0];
Assert.Empty(itemFirstContributor.AttributeExtensions);
Assert.Empty(itemFirstContributor.ElementExtensions);
Assert.Null(itemFirstContributor.Email);
Assert.Null(itemFirstContributor.Name);
Assert.Null(itemFirstContributor.Uri);
SyndicationPerson itemSecondContributor = item.Contributors[1];
Assert.Empty(itemSecondContributor.AttributeExtensions);
Assert.Empty(itemSecondContributor.ElementExtensions);
Assert.Null(itemSecondContributor.Email);
Assert.Null(itemSecondContributor.Name);
Assert.Null(itemSecondContributor.Uri);
SyndicationPerson itemThirdContributor = item.Contributors[2];
if (preserveAttributeExtensions)
{
Assert.Equal(4, itemThirdContributor.AttributeExtensions.Count);
Assert.Equal("", itemThirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name1")]);
Assert.Equal("", itemThirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name2", "contributor_namespace")]);
Assert.Equal("contributor_value", itemThirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name3", "contributor_namespace")]);
Assert.Equal("", itemThirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name4", "xmlns")]);
}
else
{
Assert.Empty(itemThirdContributor.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, itemThirdContributor.ElementExtensions.Count);
Assert.Equal(10, itemThirdContributor.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(itemThirdContributor.ElementExtensions);
}
Assert.Equal("contributor_email", itemThirdContributor.Email);
Assert.Equal("contributor_name", itemThirdContributor.Name);
Assert.Equal("contributor_uri", itemThirdContributor.Uri);
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.Copyright.AttributeExtensions.Count);
Assert.Equal("", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name1")]);
Assert.Equal("", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name2", "copyright_namespace")]);
Assert.Equal("copyright_value", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name3", "copyright_namespace")]);
Assert.Equal("", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Copyright.AttributeExtensions);
}
Assert.Equal("copyright_title", item.Copyright.Text);
Assert.Equal("html", item.Copyright.Type);
if (preserveElementExtensions)
{
Assert.Equal(1, item.ElementExtensions.Count);
Assert.Equal(10, item.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(item.ElementExtensions);
}
Assert.Equal("id", item.Id);
Assert.Equal(DateTimeOffset.MinValue, item.LastUpdatedTime);
Assert.Equal(4, item.Links.Count);
SyndicationLink itemFirstLink = item.Links[0];
Assert.Empty(itemFirstLink.AttributeExtensions);
Assert.Empty(itemFirstLink.ElementExtensions);
Assert.Equal(0, itemFirstLink.Length);
Assert.Null(itemFirstLink.MediaType);
Assert.Null(itemFirstLink.RelationshipType);
Assert.Null(itemFirstLink.Title);
Assert.Null(itemFirstLink.Uri);
SyndicationLink itemSecondLink = item.Links[1];
Assert.Empty(itemSecondLink.AttributeExtensions);
Assert.Empty(itemSecondLink.ElementExtensions);
Assert.Equal(0, itemSecondLink.Length);
Assert.Null(itemSecondLink.MediaType);
Assert.Null(itemSecondLink.RelationshipType);
Assert.Null(itemSecondLink.Title);
Assert.Null(itemSecondLink.Uri);
SyndicationLink itemThirdLink = item.Links[2];
Assert.Empty(itemThirdLink.AttributeExtensions);
Assert.Empty(itemThirdLink.ElementExtensions);
Assert.Equal(0, itemThirdLink.Length);
Assert.Null(itemThirdLink.MediaType);
Assert.Null(itemThirdLink.RelationshipType);
Assert.Null(itemThirdLink.Title);
Assert.Empty(itemThirdLink.Uri.OriginalString);
SyndicationLink itemFourthLink = item.Links[3];
if (preserveAttributeExtensions)
{
Assert.Equal(4, itemFourthLink.AttributeExtensions.Count);
Assert.Equal("", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name1")]);
Assert.Equal("", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name2", "link_namespace")]);
Assert.Equal("link_value", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name3", "link_namespace")]);
Assert.Equal("", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name4", "xmlns")]);
}
else
{
Assert.Empty(itemFourthLink.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, itemFourthLink.ElementExtensions.Count);
Assert.Equal(10, item.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(itemFourthLink.ElementExtensions);
}
Assert.Equal(new Uri("http://link_url.com"), itemFourthLink.BaseUri);
Assert.Equal(10, itemFourthLink.Length);
Assert.Equal("link_mediaType", itemFourthLink.MediaType);
Assert.Equal("link_relationshipType", itemFourthLink.RelationshipType);
Assert.Equal("link_title", itemFourthLink.Title);
Assert.Equal(new Uri("http://link_uri.com"), itemFourthLink.Uri);
Assert.Equal(DateTimeOffset.MinValue, item.PublishDate);
Assert.Null(item.SourceFeed);
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.Summary.AttributeExtensions.Count);
Assert.Equal("", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name1")]);
Assert.Equal("", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name2", "summary_namespace")]);
Assert.Equal("summary_value", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name3", "summary_namespace")]);
Assert.Equal("", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Summary.AttributeExtensions);
}
Assert.Equal("summary_title", item.Summary.Text);
Assert.Equal("html", item.Summary.Type);
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.Title.AttributeExtensions.Count);
Assert.Equal("", item.Title.AttributeExtensions[new XmlQualifiedName("title_name1")]);
Assert.Equal("", item.Title.AttributeExtensions[new XmlQualifiedName("title_name2", "title_namespace")]);
Assert.Equal("title_value", item.Title.AttributeExtensions[new XmlQualifiedName("title_name3", "title_namespace")]);
Assert.Equal("", item.Title.AttributeExtensions[new XmlQualifiedName("title_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Title.AttributeExtensions);
}
Assert.Equal("title_title", item.Title.Text);
Assert.Equal("html", item.Title.Type);
Assert.Equal("language", feed.Language);
Assert.Equal(DateTimeOffset.MinValue.AddYears(1), feed.LastUpdatedTime);
Assert.Equal(4, feed.Links.Count);
SyndicationLink firstLink = feed.Links[0];
Assert.Empty(firstLink.AttributeExtensions);
Assert.Empty(firstLink.ElementExtensions);
Assert.Equal(0, firstLink.Length);
Assert.Null(firstLink.MediaType);
Assert.Null(firstLink.RelationshipType);
Assert.Null(firstLink.Title);
Assert.Null(firstLink.Uri);
SyndicationLink secondLink = feed.Links[1];
Assert.Empty(secondLink.AttributeExtensions);
Assert.Empty(secondLink.ElementExtensions);
Assert.Equal(0, secondLink.Length);
Assert.Null(secondLink.MediaType);
Assert.Null(secondLink.RelationshipType);
Assert.Null(secondLink.Title);
Assert.Null(secondLink.Uri);
SyndicationLink thirdLink = feed.Links[2];
Assert.Empty(thirdLink.AttributeExtensions);
Assert.Empty(thirdLink.ElementExtensions);
Assert.Equal(0, thirdLink.Length);
Assert.Null(thirdLink.MediaType);
Assert.Null(thirdLink.RelationshipType);
Assert.Null(thirdLink.Title);
Assert.Empty(thirdLink.Uri.OriginalString);
SyndicationLink fourthLink = feed.Links[3];
if (preserveAttributeExtensions)
{
Assert.Equal(4, fourthLink.AttributeExtensions.Count);
Assert.Equal("", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name1")]);
Assert.Equal("", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name2", "link_namespace")]);
Assert.Equal("link_value", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name3", "link_namespace")]);
Assert.Equal("", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name4", "xmlns")]);
}
else
{
Assert.Empty(fourthLink.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, fourthLink.ElementExtensions.Count);
Assert.Equal(10, item.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(fourthLink.ElementExtensions);
}
Assert.Equal(new Uri("http://link_url.com"), fourthLink.BaseUri);
Assert.Equal(10, fourthLink.Length);
Assert.Equal("link_mediaType", fourthLink.MediaType);
Assert.Equal("link_relationshipType", fourthLink.RelationshipType);
Assert.Equal("link_title", fourthLink.Title);
Assert.Equal(new Uri("http://link_uri.com"), fourthLink.Uri);
if (preserveAttributeExtensions)
{
Assert.Equal(4, feed.Title.AttributeExtensions.Count);
Assert.Equal("", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name1")]);
Assert.Equal("", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name2", "feedtitle_namespace")]);
Assert.Equal("feedtitle_value", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name3", "feedtitle_namespace")]);
Assert.Equal("", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Title.AttributeExtensions);
}
Assert.Equal("feedtitle_title", feed.Title.Text);
Assert.Equal("html", feed.Title.Type);
});
}
[Theory]
[InlineData(true, true)]
[InlineData(false, false)]
public void Read_TryParseTrue_ReturnsExpected(bool preserveAttributeExtensions, bool preserveElementExtensions)
{
using (var stringReader = new StringReader(
@"<feed xml:lang=""language"" xml:base=""http://microsoft.com/"" feed_name1="""" d1p1:feed_name2="""" d1p1:feed_name3=""feed_value"" d1p2:feed_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""feed_namespace"" xmlns=""http://www.w3.org/2005/Atom"">
<title type=""html"" feedtitle_name1="""" d2p1:feedtitle_name2="""" d2p1:feedtitle_name3=""feedtitle_value"" d1p2:feedtitle_name4="""" xmlns:d2p1=""feedtitle_namespace"">feedtitle_title</title>
<subtitle type=""html"" feeddescription_name1="""" d2p1:feeddescription_name2="""" d2p1:feeddescription_name3=""feeddescription_value"" d1p2:feeddescription_name4="""" xmlns:d2p1=""feeddescription_namespace"">feeddescription_title</subtitle>
<id>id</id>
<rights type=""html"" feedcopyright_name1="""" d2p1:feedcopyright_name2="""" d2p1:feedcopyright_name3=""feedcopyright_value"" d1p2:feedcopyright_name4="""" xmlns:d2p1=""feedcopyright_namespace"">feedcopyright_title</rights>
<updated>0002-01-01T00:00:00Z</updated>
<category />
<category></category>
<category term="""" />
<category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</category>
<logo>http://imageurl.com/</logo>
<author />
<author></author>
<author author_name1="""" d2p1:author_name2="""" d2p1:author_name3=""author_value"" d1p2:author_name4="""" xmlns:d2p1=""author_namespace"">
<name>author_name</name>
<uri>author_uri</uri>
<email>author_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</author>
<contributor />
<contributor></contributor>
<contributor contributor_name1="""" d2p1:contributor_name2="""" d2p1:contributor_name3=""contributor_value"" d1p2:contributor_name4="""" xmlns:d2p1=""contributor_namespace"">
<name>contributor_name</name>
<uri>contributor_uri</uri>
<email>contributor_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</contributor>
<generator>generator</generator>
<link />
<link></link>
<link href="""" />
<link xml:base=""http://link_url.com/"" link_name1="""" d2p1:link_name2="""" d2p1:link_name3=""link_value"" d1p2:link_name4="""" rel=""link_relationshipType"" type=""link_mediaType"" title=""link_title"" length=""10"" href=""http://link_uri.com/"" xmlns:d2p1=""link_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</link>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
<entry>
<id>id</id>
<title type=""text"" />
<updated>0001-01-01T00:00:00Z</updated>
</entry>
<entry xml:base=""/relative"" item_name1="""" d1p1:item_name2="""" d1p1:item_name3=""item_value"" d1p2:item_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""item_namespace"" xmlns=""http://www.w3.org/2005/Atom"">
<id>id</id>
<title type=""html"" title_name1="""" d2p1:title_name2="""" d2p1:title_name3=""title_value"" d1p2:title_name4="""" xmlns:d2p1=""title_namespace"">title_title</title>
<summary type=""html"" summary_name1="""" d2p1:summary_name2="""" d2p1:summary_name3=""summary_value"" d1p2:summary_name4="""" xmlns:d2p1=""summary_namespace"">summary_title</summary>
<published>0001-01-01T00:00:00Z</published>
<updated>0001-01-01T00:00:00Z</updated>
<author />
<author></author>
<author author_name1="""" d2p1:author_name2="""" d2p1:author_name3=""author_value"" d1p2:author_name4="""" xmlns:d2p1=""author_namespace"">
<name>author_name</name>
<uri>author_uri</uri>
<email>author_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</author>
<contributor />
<contributor></contributor>
<contributor contributor_name1="""" d2p1:contributor_name2="""" d2p1:contributor_name3=""contributor_value"" d1p2:contributor_name4="""" xmlns:d2p1=""contributor_namespace"">
<name>contributor_name</name>
<uri>contributor_uri</uri>
<email>contributor_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</contributor>
<link />
<link></link>
<link href="""" />
<link xml:base=""http://link_url.com/"" link_name1="""" d2p1:link_name2="""" d2p1:link_name3=""link_value"" d1p2:link_name4="""" rel=""link_relationshipType"" type=""link_mediaType"" title=""link_title"" length=""10"" href=""http://link_uri.com/"" xmlns:d2p1=""link_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</link>
<category />
<category></category>
<category term="""" />
<category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</category>
<content type=""html"" content_name1="""" d2p1:content_name2="""" d2p1:content_name3=""content_value"" d1p2:content_name4="""" xmlns:d2p1=""content_namespace"">content_title</content>
<rights type=""html"" copyright_name1="""" d2p1:copyright_name2="""" d2p1:copyright_name3=""copyright_value"" d1p2:copyright_name4="""" xmlns:d2p1=""copyright_namespace"">copyright_title</rights>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</entry>
</feed>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter<SyndicationFeedTryParseTrueSubclass>()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
formatter.ReadFrom(reader);
SyndicationFeed feed = formatter.Feed;
Assert.Empty(feed.AttributeExtensions);
Assert.Equal(3, feed.Authors.Count);
SyndicationPerson firstAuthor = feed.Authors[0];
Assert.Empty(firstAuthor.AttributeExtensions);
Assert.Empty(firstAuthor.ElementExtensions);
Assert.Null(firstAuthor.Email);
Assert.Null(firstAuthor.Name);
Assert.Null(firstAuthor.Uri);
SyndicationPerson secondAuthor = feed.Authors[1];
Assert.Empty(secondAuthor.AttributeExtensions);
Assert.Empty(secondAuthor.ElementExtensions);
Assert.Null(secondAuthor.Email);
Assert.Null(secondAuthor.Name);
Assert.Null(secondAuthor.Uri);
SyndicationPerson thirdAuthor = feed.Authors[2];
Assert.Empty(thirdAuthor.AttributeExtensions);
Assert.Empty(thirdAuthor.ElementExtensions);
Assert.Equal("author_email", thirdAuthor.Email);
Assert.Equal("author_name", thirdAuthor.Name);
Assert.Equal("author_uri", thirdAuthor.Uri);
Assert.Equal(new Uri("http://microsoft.com"), feed.BaseUri);
Assert.Equal(4, feed.Categories.Count);
SyndicationCategory firstCategory = feed.Categories[0];
Assert.Empty(firstCategory.AttributeExtensions);
Assert.Empty(firstCategory.ElementExtensions);
Assert.Null(firstCategory.Name);
Assert.Null(firstCategory.Scheme);
Assert.Null(firstCategory.Label);
SyndicationCategory secondCategory = feed.Categories[1];
Assert.Empty(secondCategory.AttributeExtensions);
Assert.Empty(secondCategory.ElementExtensions);
Assert.Null(secondCategory.Name);
Assert.Null(secondCategory.Scheme);
Assert.Null(secondCategory.Label);
SyndicationCategory thirCategory = feed.Categories[2];
Assert.Empty(thirCategory.AttributeExtensions);
Assert.Empty(thirCategory.ElementExtensions);
Assert.Empty(thirCategory.Name);
Assert.Null(thirCategory.Scheme);
Assert.Null(thirCategory.Label);
SyndicationCategory fourthCategory = feed.Categories[3];
Assert.Empty(fourthCategory.AttributeExtensions);
Assert.Empty(fourthCategory.ElementExtensions);
Assert.Equal("category_name", fourthCategory.Name);
Assert.Equal("category_scheme", fourthCategory.Scheme);
Assert.Equal("category_label", fourthCategory.Label);
Assert.Equal(3, feed.Contributors.Count);
SyndicationPerson firstContributor = feed.Contributors[0];
Assert.Empty(firstContributor.AttributeExtensions);
Assert.Empty(firstContributor.ElementExtensions);
Assert.Null(firstContributor.Email);
Assert.Null(firstContributor.Name);
Assert.Null(firstContributor.Uri);
SyndicationPerson secondContributor = feed.Contributors[1];
Assert.Empty(secondContributor.AttributeExtensions);
Assert.Empty(secondContributor.ElementExtensions);
Assert.Null(secondContributor.Email);
Assert.Null(secondContributor.Name);
Assert.Null(secondContributor.Uri);
SyndicationPerson thirdContributor = feed.Contributors[2];
Assert.Empty(thirdContributor.AttributeExtensions);
Assert.Empty(thirdContributor.ElementExtensions);
Assert.Equal("contributor_email", thirdContributor.Email);
Assert.Equal("contributor_name", thirdContributor.Name);
Assert.Equal("contributor_uri", thirdContributor.Uri);
if (preserveAttributeExtensions)
{
Assert.Equal(4, feed.Copyright.AttributeExtensions.Count);
Assert.Equal("", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name1")]);
Assert.Equal("", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name2", "feedcopyright_namespace")]);
Assert.Equal("feedcopyright_value", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name3", "feedcopyright_namespace")]);
Assert.Equal("", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name4", "xmlns")]);
}
else
{
Assert.Empty(feed.Copyright.AttributeExtensions);
}
Assert.Equal("feedcopyright_title", feed.Copyright.Text);
Assert.Equal("html", feed.Copyright.Type);
Assert.Equal("generator", feed.Generator);
Assert.Empty(feed.ElementExtensions);
Assert.Equal("id", feed.Id);
Assert.Equal(new Uri("http://imageurl.com/"), feed.ImageUrl);
SyndicationItem[] items = feed.Items.ToArray();
Assert.Equal(2, items.Length);
SyndicationItem item = items[1];
Assert.Empty(item.AttributeExtensions);
Assert.Equal(3, item.Authors.Count);
SyndicationPerson itemFirstAuthor = item.Authors[0];
Assert.Empty(itemFirstAuthor.AttributeExtensions);
Assert.Empty(itemFirstAuthor.ElementExtensions);
Assert.Null(itemFirstAuthor.Email);
Assert.Null(itemFirstAuthor.Name);
Assert.Null(itemFirstAuthor.Uri);
SyndicationPerson itemSecondAuthor = item.Authors[1];
Assert.Empty(itemSecondAuthor.AttributeExtensions);
Assert.Empty(itemSecondAuthor.ElementExtensions);
Assert.Null(itemSecondAuthor.Email);
Assert.Null(itemSecondAuthor.Name);
Assert.Null(itemSecondAuthor.Uri);
SyndicationPerson itemThirdAuthor = item.Authors[2];
Assert.Empty(itemThirdAuthor.AttributeExtensions);
Assert.Empty(itemThirdAuthor.ElementExtensions);
Assert.Equal("author_email", itemThirdAuthor.Email);
Assert.Equal("author_name", itemThirdAuthor.Name);
Assert.Equal("author_uri", itemThirdAuthor.Uri);
Assert.Equal(new Uri("http://microsoft.com/relative"), item.BaseUri);
Assert.Equal(4, item.Categories.Count);
SyndicationCategory itemFirstCategory = item.Categories[0];
Assert.Empty(itemFirstCategory.AttributeExtensions);
Assert.Empty(itemFirstCategory.ElementExtensions);
Assert.Null(itemFirstCategory.Name);
Assert.Null(itemFirstCategory.Scheme);
Assert.Null(itemFirstCategory.Label);
SyndicationCategory itemSecondCategory = item.Categories[1];
Assert.Empty(itemSecondCategory.AttributeExtensions);
Assert.Empty(itemSecondCategory.ElementExtensions);
Assert.Null(itemSecondCategory.Name);
Assert.Null(itemSecondCategory.Scheme);
Assert.Null(itemSecondCategory.Label);
SyndicationCategory itemThirdCategory = item.Categories[2];
Assert.Empty(itemThirdCategory.AttributeExtensions);
Assert.Empty(itemThirdCategory.ElementExtensions);
Assert.Empty(itemThirdCategory.Name);
Assert.Null(itemThirdCategory.Scheme);
Assert.Null(itemThirdCategory.Label);
SyndicationCategory itemFourthCategory = item.Categories[3];
Assert.Empty(itemFourthCategory.AttributeExtensions);
Assert.Empty(itemFourthCategory.ElementExtensions);
Assert.Equal("category_name", itemFourthCategory.Name);
Assert.Equal("category_scheme", itemFourthCategory.Scheme);
Assert.Equal("category_label", itemFourthCategory.Label);
TextSyndicationContent content = Assert.IsType<TextSyndicationContent>(item.Content);
Assert.Empty(content.AttributeExtensions);
Assert.Equal("overriden", content.Text);
Assert.Equal("text", content.Type);
Assert.Equal(3, item.Contributors.Count);
SyndicationPerson itemFirstContributor = item.Contributors[0];
Assert.Empty(itemFirstContributor.AttributeExtensions);
Assert.Empty(itemFirstContributor.ElementExtensions);
Assert.Null(itemFirstContributor.Email);
Assert.Null(itemFirstContributor.Name);
Assert.Null(itemFirstContributor.Uri);
SyndicationPerson itemSecondContributor = item.Contributors[1];
Assert.Empty(itemSecondContributor.AttributeExtensions);
Assert.Empty(itemSecondContributor.ElementExtensions);
Assert.Null(itemSecondContributor.Email);
Assert.Null(itemSecondContributor.Name);
Assert.Null(itemSecondContributor.Uri);
SyndicationPerson itemThirdContributor = item.Contributors[2];
Assert.Empty(itemThirdContributor.AttributeExtensions);
Assert.Empty(itemThirdContributor.ElementExtensions);
Assert.Equal("contributor_email", itemThirdContributor.Email);
Assert.Equal("contributor_name", itemThirdContributor.Name);
Assert.Equal("contributor_uri", thirdContributor.Uri);
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.Copyright.AttributeExtensions.Count);
Assert.Equal("", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name1")]);
Assert.Equal("", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name2", "copyright_namespace")]);
Assert.Equal("copyright_value", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name3", "copyright_namespace")]);
Assert.Equal("", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Copyright.AttributeExtensions);
}
Assert.Equal("copyright_title", item.Copyright.Text);
Assert.Equal("html", item.Copyright.Type);
Assert.Empty(item.ElementExtensions);
Assert.Equal("id", item.Id);
Assert.Equal(DateTimeOffset.MinValue, item.LastUpdatedTime);
Assert.Equal(4, item.Links.Count);
SyndicationLink itemFirstLink = item.Links[0];
Assert.Empty(itemFirstLink.AttributeExtensions);
Assert.Empty(itemFirstLink.ElementExtensions);
Assert.Equal(0, itemFirstLink.Length);
Assert.Null(itemFirstLink.MediaType);
Assert.Null(itemFirstLink.RelationshipType);
Assert.Null(itemFirstLink.Title);
Assert.Null(itemFirstLink.Uri);
SyndicationLink itemSecondLink = item.Links[1];
Assert.Empty(itemSecondLink.AttributeExtensions);
Assert.Empty(itemSecondLink.ElementExtensions);
Assert.Equal(0, itemSecondLink.Length);
Assert.Null(itemSecondLink.MediaType);
Assert.Null(itemSecondLink.RelationshipType);
Assert.Null(itemSecondLink.Title);
Assert.Null(itemSecondLink.Uri);
SyndicationLink itemThirdLink = item.Links[2];
Assert.Empty(itemThirdLink.AttributeExtensions);
Assert.Empty(itemThirdLink.ElementExtensions);
Assert.Equal(0, itemThirdLink.Length);
Assert.Null(itemThirdLink.MediaType);
Assert.Null(itemThirdLink.RelationshipType);
Assert.Null(itemThirdLink.Title);
Assert.Empty(itemThirdLink.Uri.OriginalString);
SyndicationLink itemFourthLink = item.Links[3];
if (preserveAttributeExtensions)
{
Assert.Equal(4, itemFourthLink.AttributeExtensions.Count);
Assert.Equal("", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name1")]);
Assert.Equal("", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name2", "link_namespace")]);
Assert.Equal("link_value", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name3", "link_namespace")]);
Assert.Equal("", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name4", "xmlns")]);
}
else
{
Assert.Empty(itemFourthLink.AttributeExtensions);
}
Assert.Empty(itemFourthLink.ElementExtensions);
Assert.Equal(new Uri("http://link_url.com"), itemFourthLink.BaseUri);
Assert.Equal(10, itemFourthLink.Length);
Assert.Equal("link_mediaType", itemFourthLink.MediaType);
Assert.Equal("link_relationshipType", itemFourthLink.RelationshipType);
Assert.Equal("link_title", itemFourthLink.Title);
Assert.Equal(new Uri("http://link_uri.com"), itemFourthLink.Uri);
Assert.Equal(DateTimeOffset.MinValue, item.PublishDate);
Assert.Null(item.SourceFeed);
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.Summary.AttributeExtensions.Count);
Assert.Equal("", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name1")]);
Assert.Equal("", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name2", "summary_namespace")]);
Assert.Equal("summary_value", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name3", "summary_namespace")]);
Assert.Equal("", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Summary.AttributeExtensions);
}
Assert.Equal("summary_title", item.Summary.Text);
Assert.Equal("html", item.Summary.Type);
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.Title.AttributeExtensions.Count);
Assert.Equal("", item.Title.AttributeExtensions[new XmlQualifiedName("title_name1")]);
Assert.Equal("", item.Title.AttributeExtensions[new XmlQualifiedName("title_name2", "title_namespace")]);
Assert.Equal("title_value", item.Title.AttributeExtensions[new XmlQualifiedName("title_name3", "title_namespace")]);
Assert.Equal("", item.Title.AttributeExtensions[new XmlQualifiedName("title_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Title.AttributeExtensions);
}
Assert.Equal("title_title", item.Title.Text);
Assert.Equal("html", item.Title.Type);
Assert.Equal("language", feed.Language);
Assert.Equal(DateTimeOffset.MinValue.AddYears(1), feed.LastUpdatedTime);
Assert.Equal(4, feed.Links.Count);
SyndicationLink firstLink = feed.Links[0];
Assert.Empty(firstLink.AttributeExtensions);
Assert.Empty(firstLink.ElementExtensions);
Assert.Equal(0, firstLink.Length);
Assert.Null(firstLink.MediaType);
Assert.Null(firstLink.RelationshipType);
Assert.Null(firstLink.Title);
Assert.Null(firstLink.Uri);
SyndicationLink secondLink = feed.Links[1];
Assert.Empty(secondLink.AttributeExtensions);
Assert.Empty(secondLink.ElementExtensions);
Assert.Equal(0, secondLink.Length);
Assert.Null(secondLink.MediaType);
Assert.Null(secondLink.RelationshipType);
Assert.Null(secondLink.Title);
Assert.Null(secondLink.Uri);
SyndicationLink thirdLink = feed.Links[2];
Assert.Empty(thirdLink.AttributeExtensions);
Assert.Empty(thirdLink.ElementExtensions);
Assert.Equal(0, thirdLink.Length);
Assert.Null(thirdLink.MediaType);
Assert.Null(thirdLink.RelationshipType);
Assert.Null(thirdLink.Title);
Assert.Empty(thirdLink.Uri.OriginalString);
SyndicationLink fourthLink = feed.Links[3];
if (preserveAttributeExtensions)
{
Assert.Equal(4, fourthLink.AttributeExtensions.Count);
Assert.Equal("", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name1")]);
Assert.Equal("", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name2", "link_namespace")]);
Assert.Equal("link_value", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name3", "link_namespace")]);
Assert.Equal("", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name4", "xmlns")]);
}
else
{
Assert.Empty(fourthLink.AttributeExtensions);
}
Assert.Empty(fourthLink.ElementExtensions);
Assert.Equal(new Uri("http://link_url.com"), fourthLink.BaseUri);
Assert.Equal(10, fourthLink.Length);
Assert.Equal("link_mediaType", fourthLink.MediaType);
Assert.Equal("link_relationshipType", fourthLink.RelationshipType);
Assert.Equal("link_title", fourthLink.Title);
Assert.Equal(new Uri("http://link_uri.com"), fourthLink.Uri);
if (preserveAttributeExtensions)
{
Assert.Equal(4, feed.Title.AttributeExtensions.Count);
Assert.Equal("", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name1")]);
Assert.Equal("", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name2", "feedtitle_namespace")]);
Assert.Equal("feedtitle_value", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name3", "feedtitle_namespace")]);
Assert.Equal("", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Title.AttributeExtensions);
}
Assert.Equal("feedtitle_title", feed.Title.Text);
Assert.Equal("html", feed.Title.Type);
}
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void Read_EmptyItem_ReturnsExpected(bool preserveElementExtensions)
{
VerifyRead(@"<feed xmlns=""http://www.w3.org/2005/Atom""></feed>", preserveElementExtensions, preserveElementExtensions, feed =>
{
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Empty(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
});
}
[Theory]
[InlineData(true, true)]
[InlineData(false, false)]
public void Read_CustomReadItems_ReturnsExpected(bool preserveAttributeExtensions, bool preserveElementExtensions)
{
using (var stringReader = new StringReader(@"<feed xmlns=""http://www.w3.org/2005/Atom""><entry></entry><entry></entry></feed>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new CustomAtom10FeedFormatter()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
formatter.ReadFrom(reader);
SyndicationFeed feed = formatter.Feed;
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Single(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
}
}
private class CustomAtom10FeedFormatter : Atom10FeedFormatter
{
protected override IEnumerable<SyndicationItem> ReadItems(XmlReader reader, SyndicationFeed feed, out bool areAllItemsRead)
{
areAllItemsRead = false;
return new SyndicationItem[] { new SyndicationItem() };
}
}
private static void VerifyRead(string xmlString, bool preserveAttributeExtensions, bool preserveElementExtensions, Action<SyndicationFeed> verifyAction)
{
// ReadFrom.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
formatter.ReadFrom(reader);
verifyAction(formatter.Feed);
}
// ReadXml.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new Atom10FeedFormatter()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
((IXmlSerializable)formatter).ReadXml(reader);
verifyAction(formatter.Feed);
}
// Derived ReadFrom.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter(typeof(SyndicationFeedSubclass))
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
formatter.ReadFrom(reader);
verifyAction(formatter.Feed);
}
// Derived ReadXml.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new Atom10FeedFormatter(typeof(SyndicationFeedSubclass))
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
((IXmlSerializable)formatter).ReadXml(reader);
verifyAction(formatter.Feed);
}
// Generic ReadFrom.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter<SyndicationFeed>()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
formatter.ReadFrom(reader);
verifyAction(formatter.Feed);
}
// Generic ReadXml.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new Atom10FeedFormatter<SyndicationFeed>()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
((IXmlSerializable)formatter).ReadXml(reader);
verifyAction(formatter.Feed);
}
// Generic Derived ReadFrom.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter<SyndicationFeedSubclass>()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
formatter.ReadFrom(reader);
verifyAction(formatter.Feed);
}
// Generic Derived ReadXml.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new Atom10FeedFormatter<SyndicationFeedSubclass>()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
((IXmlSerializable)formatter).ReadXml(reader);
verifyAction(formatter.Feed);
}
if (preserveAttributeExtensions && preserveElementExtensions)
{
// Load.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
SyndicationFeed feed = SyndicationFeed.Load(reader);
verifyAction(feed);
}
// Generic Load.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
SyndicationFeed feed = SyndicationFeed.Load<SyndicationFeed>(reader);
verifyAction(feed);
}
}
}
[Fact]
public void ReadFrom_NullReader_ThrowsArgumentNullException()
{
var formatter = new Atom10FeedFormatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.ReadFrom(null));
}
[Fact]
public void ReadFrom_NullCreatedFeed_ThrowsArgumentNullException()
{
using (var stringReader = new StringReader(@"<feed xmlns=""http://www.w3.org/2005/Atom""></feed>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new NullCreatedFeedFormatter();
AssertExtensions.Throws<ArgumentNullException>("feed", () => formatter.ReadFrom(reader));
}
}
[Theory]
[InlineData(@"<different xmlns=""http://www.w3.org/2005/Atom""></different>")]
[InlineData(@"<feed xmlns=""different""></entry>")]
[InlineData(@"<feed></feed>")]
[InlineData(@"<feed/>")]
[InlineData(@"<feed xmlns=""http://www.w3.org/2005/Atom"" />")]
public void ReadFrom_CantRead_ThrowsXmlException(string xmlString)
{
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter();
Assert.Throws<XmlException>(() => formatter.ReadFrom(reader));
}
}
[Theory]
[InlineData("<feed></feed>")]
[InlineData(@"<app:feed xmlns:app=""http://www.w3.org/2005/Atom""></app:feed>")]
[InlineData(@"<feed xmlns=""different""></feed>")]
[InlineData(@"<different xmlns=""http://www.w3.org/2005/Atom""></different>")]
public void ReadXml_ValidReader_Success(string xmlString)
{
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new Atom10FeedFormatter();
((IXmlSerializable)formatter).ReadXml(reader);
SyndicationFeed feed = formatter.Feed;
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Empty(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
}
}
[Fact]
public void ReadXml_NullReader_ThrowsArgumentNullException()
{
IXmlSerializable formatter = new Atom10FeedFormatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.ReadXml(null));
}
[Fact]
public void ReadXml_NullCreatedFeed_ThrowsArgumentNullException()
{
using (var stringReader = new StringReader(@"<entry xmlns=""http://www.w3.org/2005/Atom""></entry>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
IXmlSerializable formatter = new NullCreatedFeedFormatter();
AssertExtensions.Throws<ArgumentNullException>("feed", () => formatter.ReadXml(reader));
}
}
[Theory]
[InlineData("<feed />")]
[InlineData(@"<feed xmlns:app=""http://www.w3.org/2005/Atom"" />")]
[InlineData("<different />")]
public void ReadXml_CantRead_ThrowsXmlException(string xmlString)
{
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
IXmlSerializable formatter = new Atom10FeedFormatter();
Assert.Throws<XmlException>(() => formatter.ReadXml(reader));
}
}
[Fact]
public void ReadXml_ThrowsArgumentException_RethrowsAsXmlException()
{
var reader = new ThrowingXmlReader(new ArgumentException());
IXmlSerializable formatter = new Atom10FeedFormatter();
Assert.Throws<XmlException>(() => formatter.ReadXml(reader));
}
[Fact]
public void ReadXml_ThrowsFormatException_RethrowsAsXmlException()
{
var reader = new ThrowingXmlReader(new FormatException());
IXmlSerializable formatter = new Atom10FeedFormatter();
Assert.Throws<XmlException>(() => formatter.ReadXml(reader));
}
[Fact]
public void ReadItem_ValidItem_ReturnsExpected()
{
using (var stringReader = new StringReader(@"<entry><id xmlns=""http://www.w3.org/2005/Atom"">id</id></entry>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new Formatter();
var feed = new SyndicationFeed();
SyndicationItem item = formatter.ReadItemEntryPoint(reader, feed);
Assert.Equal("id", item.Id);
Assert.Null(item.SourceFeed);
Assert.Empty(feed.Items);
}
}
[Fact]
public void ReadItem_NullReader_ThrowsArgumentNullException()
{
var formatter = new Formatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.ReadItemEntryPoint(null, new SyndicationFeed()));
}
[Fact]
public void ReadItem_NullFeed_ThrowsArgumentNullException()
{
using (var stringReader = new StringReader("<entry></entry>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Formatter();
AssertExtensions.Throws<ArgumentNullException>("feed", () => formatter.ReadItemEntryPoint(reader, null));
}
}
[Fact]
public void ReadItems_ValidItems_ReturnsExpected()
{
using (var stringReader = new StringReader(
@"<parent>
<entry xmlns=""http://www.w3.org/2005/Atom""><id>id1</id></entry>
<entry xmlns=""http://www.w3.org/2005/Atom""><id>id2</id></entry>
<unknown></unknown>
<entry xmlns=""http://www.w3.org/2005/Atom""><id>id3</id></entry>
</parent>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
reader.ReadStartElement();
reader.MoveToElement();
var formatter = new Formatter();
var feed = new SyndicationFeed();
SyndicationItem[] items = formatter.ReadItemsEntryPoint(reader, feed, out var areAllItemsRead).ToArray();
Assert.True(areAllItemsRead);
Assert.Empty(feed.Items);
Assert.Equal(2, items.Length);
Assert.Equal("id1", items[0].Id);
Assert.Null(items[0].SourceFeed);
Assert.Equal("id2", items[1].Id);
Assert.Null(items[1].SourceFeed);
}
}
[Fact]
public void ReadItems_NullReader_ThrowsArgumentNullException()
{
var formatter = new Formatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.ReadItemsEntryPoint(null, new SyndicationFeed(), out var areAllItemsReader));
}
[Fact]
public void ReadItems_NullFeed_ThrowsArgumentNullException()
{
using (var stringReader = new StringReader("<entry></entry>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Formatter();
AssertExtensions.Throws<ArgumentNullException>("feed", () => formatter.ReadItemsEntryPoint(reader, null, out var areAllItemsReader));
}
}
[Theory]
[InlineData("")]
[InlineData("invalid")]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Custom date parsing added in .NET Core changes this behaviour")]
public void Read_InvalidLastUpdatedTime_GetThrowsXmlExcepton(string updated)
{
using (var stringReader = new StringReader(@"<feed xmlns=""http://www.w3.org/2005/Atom""><updated>" + updated + "</updated></feed>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter();
formatter.ReadFrom(reader);
Assert.Throws<XmlException>(() => formatter.Feed.LastUpdatedTime);
}
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void PreserveAttributeExtensions_Set_GetReturnsExpected(bool preserveAttributeExtensions)
{
var formatter = new Atom10FeedFormatter() { PreserveAttributeExtensions = preserveAttributeExtensions };
Assert.Equal(preserveAttributeExtensions, formatter.PreserveAttributeExtensions);
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void PreserveElementExtensions_Set_GetReturnsExpected(bool preserveElementExtensions)
{
var formatter = new Atom10FeedFormatter() { PreserveElementExtensions = preserveElementExtensions };
Assert.Equal(preserveElementExtensions, formatter.PreserveElementExtensions);
}
[Fact]
public void CreateFeedInstance_NonGeneric_Success()
{
var formatter = new Formatter();
SyndicationFeed feed = Assert.IsType<SyndicationFeed>(formatter.CreateFeedInstanceEntryPoint());
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Empty(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
var typedFormatter = new Formatter(typeof(SyndicationFeedSubclass));
feed = Assert.IsType<SyndicationFeedSubclass>(typedFormatter.CreateFeedInstanceEntryPoint());
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Empty(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
}
[Fact]
public void CreateItemInstance_Generic_Success()
{
var formatter = new GenericFormatter<SyndicationFeed>();
SyndicationFeed feed = Assert.IsType<SyndicationFeed>(formatter.CreateFeedInstanceEntryPoint());
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Empty(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
var typedFormatter = new GenericFormatter<SyndicationFeedSubclass>();
feed = Assert.IsType<SyndicationFeedSubclass>(typedFormatter.CreateFeedInstanceEntryPoint());
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Empty(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
}
public class SyndicationFeedSubclass : SyndicationFeed { }
public class SyndicationFeedTryParseTrueSubclass : SyndicationFeed
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
protected override SyndicationCategory CreateCategory() => new SyndicationCategoryTryParseTrueSubclass();
protected override SyndicationItem CreateItem() => new SyndicationItemTryParseTrueSubclass();
protected override SyndicationLink CreateLink() => new SyndicationLinkTryParseTrueSubclass();
protected override SyndicationPerson CreatePerson() => new SyndicationPersonTryParseTrueSubclass();
}
public class SyndicationItemTryParseTrueSubclass : SyndicationItem
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseContent(XmlReader reader, string contentType, string version, out SyndicationContent content)
{
reader.Skip();
content = new TextSyndicationContent("overriden");
return true;
}
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
protected override SyndicationCategory CreateCategory() => new SyndicationCategoryTryParseTrueSubclass();
protected override SyndicationPerson CreatePerson() => new SyndicationPersonTryParseTrueSubclass();
protected override SyndicationLink CreateLink() => new SyndicationLinkTryParseTrueSubclass();
}
public class SyndicationCategoryTryParseTrueSubclass : SyndicationCategory
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
}
public class SyndicationPersonTryParseTrueSubclass : SyndicationPerson
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
}
public class SyndicationLinkTryParseTrueSubclass : SyndicationLink
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
}
public class NullCreatedFeedFormatter : Atom10FeedFormatter
{
protected override SyndicationFeed CreateFeedInstance() => null;
}
public class Formatter : Atom10FeedFormatter
{
public Formatter() : base() { }
public Formatter(SyndicationFeed feedToWrite) : base(feedToWrite) { }
public Formatter(Type feedTypeToCreate) : base(feedTypeToCreate) { }
public Type FeedTypeEntryPoint => FeedType;
public SyndicationFeed CreateFeedInstanceEntryPoint() => CreateFeedInstance();
public void WriteItemEntryPoint(XmlWriter writer, SyndicationItem item, Uri feedBaseUri) => WriteItem(writer, item, feedBaseUri);
public void WriteItemsEntryPoint(XmlWriter writer, IEnumerable<SyndicationItem> items, Uri feedBaseUri) => WriteItems(writer, items, feedBaseUri);
public SyndicationItem ReadItemEntryPoint(XmlReader reader, SyndicationFeed feed) => ReadItem(reader, feed);
public IEnumerable<SyndicationItem> ReadItemsEntryPoint(XmlReader reader, SyndicationFeed feed, out bool areAllItemsRead)
{
return ReadItems(reader, feed, out areAllItemsRead);
}
}
public class GenericFormatter<T> : Atom10FeedFormatter<T> where T : SyndicationFeed, new()
{
public GenericFormatter() : base() { }
public GenericFormatter(T feedToWrite) : base(feedToWrite) { }
public Type FeedTypeEntryPoint => FeedType;
public SyndicationFeed CreateFeedInstanceEntryPoint() => CreateFeedInstance();
}
[DataContract]
public class ExtensionObject
{
[DataMember]
public int Value { get; set; }
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.Serialization;
using System.Text;
using System.Xml;
using System.Xml.Linq;
using System.Xml.Serialization;
using Xunit;
namespace System.ServiceModel.Syndication.Tests
{
public partial class Atom10FeedFormatterTests
{
[Fact]
public void Ctor_Default()
{
var formatter = new Formatter();
Assert.Null(formatter.Feed);
Assert.Equal(typeof(SyndicationFeed), formatter.FeedTypeEntryPoint);
Assert.True(formatter.PreserveAttributeExtensions);
Assert.True(formatter.PreserveElementExtensions);
Assert.Equal("Atom10", formatter.Version);
}
[Fact]
public void Ctor_GenericDefault()
{
var formatter = new GenericFormatter<SyndicationFeed>();
Assert.Null(formatter.Feed);
Assert.Equal(typeof(SyndicationFeed), formatter.FeedTypeEntryPoint);
Assert.True(formatter.PreserveAttributeExtensions);
Assert.True(formatter.PreserveElementExtensions);
Assert.Equal("Atom10", formatter.Version);
}
[Fact]
public void Ctor_SyndicationFeed()
{
var feed = new SyndicationFeed();
var formatter = new Formatter(feed);
Assert.Same(feed, formatter.Feed);
Assert.Equal(typeof(SyndicationFeed), formatter.FeedTypeEntryPoint);
Assert.True(formatter.PreserveAttributeExtensions);
Assert.True(formatter.PreserveElementExtensions);
Assert.Equal("Atom10", formatter.Version);
}
[Fact]
public void Ctor_GenericSyndicationFeed()
{
var feed = new SyndicationFeed();
var formatter = new GenericFormatter<SyndicationFeed>(feed);
Assert.Same(feed, formatter.Feed);
Assert.Equal(typeof(SyndicationFeed), formatter.FeedTypeEntryPoint);
Assert.True(formatter.PreserveAttributeExtensions);
Assert.True(formatter.PreserveElementExtensions);
Assert.Equal("Atom10", formatter.Version);
}
[Fact]
public void Ctor_NullFeedToWrite_ThrowsArgumentNullException()
{
AssertExtensions.Throws<ArgumentNullException>("feedToWrite", () => new Atom10FeedFormatter((SyndicationFeed)null));
AssertExtensions.Throws<ArgumentNullException>("feedToWrite", () => new Atom10FeedFormatter<SyndicationFeed>(null));
}
[Theory]
[InlineData(typeof(SyndicationFeed))]
[InlineData(typeof(SyndicationFeedSubclass))]
public void Ctor_Type(Type feedTypeToCreate)
{
var formatter = new Formatter(feedTypeToCreate);
Assert.Null(formatter.Feed);
Assert.Equal(feedTypeToCreate, formatter.FeedTypeEntryPoint);
Assert.True(formatter.PreserveAttributeExtensions);
Assert.True(formatter.PreserveElementExtensions);
Assert.Equal("Atom10", formatter.Version);
}
[Fact]
public void Ctor_NullFeedTypeToCreate_ThrowsArgumentNullException()
{
AssertExtensions.Throws<ArgumentNullException>("feedTypeToCreate", () => new Atom10FeedFormatter((Type)null));
}
[Fact]
public void Ctor_InvalidFeedTypeToCreate_ThrowsArgumentException()
{
AssertExtensions.Throws<ArgumentException>("feedTypeToCreate", () => new Atom10FeedFormatter(typeof(int)));
}
[Fact]
public void GetSchema_Invoke_ReturnsNull()
{
IXmlSerializable formatter = new Atom10FeedFormatter();
Assert.Null(formatter.GetSchema());
}
public static IEnumerable<object[]> WriteTo_TestData()
{
// Full item.
SyndicationPerson CreatePerson(string prefix)
{
var person = new SyndicationPerson();
person.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name1"), null);
person.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name2", prefix + "_namespace"), "");
person.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name3", prefix + "_namespace"), prefix + "_value");
person.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name4", "xmlns"), "");
person.ElementExtensions.Add(new ExtensionObject { Value = 10 });
person.Email = prefix + "_email";
person.Name = prefix + "_name";
person.Uri = prefix + "_uri";
return person;
}
TextSyndicationContent CreateContent(string prefix)
{
var content = new TextSyndicationContent(prefix + "_title", TextSyndicationContentKind.Html);
content.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name1"), null);
content.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name2", prefix + "_namespace"), "");
content.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name3", prefix + "_namespace"), prefix + "_value");
content.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name4", "xmlns"), "");
return content;
}
SyndicationCategory CreateCategory(string prefix)
{
var category = new SyndicationCategory();
category.AttributeExtensions.Add(new XmlQualifiedName(prefix + "category_name1"), null);
category.AttributeExtensions.Add(new XmlQualifiedName(prefix + "category_name2", prefix + "category_namespace"), "");
category.AttributeExtensions.Add(new XmlQualifiedName(prefix + "category_name3", prefix + "category_namespace"), prefix + "category_value");
category.AttributeExtensions.Add(new XmlQualifiedName(prefix + "category_name4", "xmlns"), "");
category.ElementExtensions.Add(new ExtensionObject { Value = 10 });
category.Label = prefix + "category_label";
category.Name = prefix + "category_name";
category.Scheme = prefix + "category_scheme";
return category;
}
SyndicationLink CreateLink(string prefix)
{
var link = new SyndicationLink();
link.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name1"), null);
link.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name2", prefix + "_namespace"), "");
link.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name3", prefix + "_namespace"), prefix + "_value");
link.AttributeExtensions.Add(new XmlQualifiedName(prefix + "_name4", "xmlns"), "");
link.BaseUri = new Uri("http://" + prefix + "_url.com");
link.ElementExtensions.Add(new ExtensionObject { Value = 10 });
link.Length = 10;
link.MediaType = prefix + "_mediaType";
link.RelationshipType = prefix + "_relationshipType";
link.Title = prefix + "_title";
link.Uri = new Uri("http://" + prefix +"_uri.com");
return link;
}
var attributeSyndicationCategory = new SyndicationCategory
{
Name = "name",
Label = "label",
Scheme = "scheme"
};
attributeSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("term"), "term_value");
attributeSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("label"), "label_value");
attributeSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("scheme"), "scheme_value");
var attributeSyndicationLink = new SyndicationLink
{
RelationshipType = "link_relationshipType",
MediaType = "link_mediaType",
Title = "link_title",
Length = 10,
Uri = new Uri("http://link_uri.com")
};
attributeSyndicationLink.AttributeExtensions.Add(new XmlQualifiedName("rel"), "rel_value");
attributeSyndicationLink.AttributeExtensions.Add(new XmlQualifiedName("type"), "type_value");
attributeSyndicationLink.AttributeExtensions.Add(new XmlQualifiedName("title"), "title_value");
attributeSyndicationLink.AttributeExtensions.Add(new XmlQualifiedName("length"), "100");
attributeSyndicationLink.AttributeExtensions.Add(new XmlQualifiedName("href"), "href_value");
var fullSyndicationItem = new SyndicationItem();
fullSyndicationItem.AttributeExtensions.Add(new XmlQualifiedName("item_name1"), null);
fullSyndicationItem.AttributeExtensions.Add(new XmlQualifiedName("item_name2", "item_namespace"), "");
fullSyndicationItem.AttributeExtensions.Add(new XmlQualifiedName("item_name3", "item_namespace"), "item_value");
fullSyndicationItem.AttributeExtensions.Add(new XmlQualifiedName("item_name4", "xmlns"), "");
fullSyndicationItem.Authors.Add(new SyndicationPerson());
fullSyndicationItem.Authors.Add(CreatePerson("author"));
fullSyndicationItem.BaseUri = new Uri("http://microsoft/relative");
fullSyndicationItem.Categories.Add(new SyndicationCategory());
fullSyndicationItem.Categories.Add(CreateCategory(""));
fullSyndicationItem.Categories.Add(attributeSyndicationCategory);
fullSyndicationItem.Content = CreateContent("content");
fullSyndicationItem.Contributors.Add(new SyndicationPerson());
fullSyndicationItem.Contributors.Add(CreatePerson("contributor"));
fullSyndicationItem.Copyright = CreateContent("copyright");
fullSyndicationItem.ElementExtensions.Add(new ExtensionObject { Value = 10 });
fullSyndicationItem.Id = "id";
fullSyndicationItem.LastUpdatedTime = DateTimeOffset.MinValue.AddTicks(100);
fullSyndicationItem.Links.Add(new SyndicationLink());
fullSyndicationItem.Links.Add(CreateLink("link"));
fullSyndicationItem.Links.Add(attributeSyndicationLink);
fullSyndicationItem.PublishDate = DateTimeOffset.MinValue.AddTicks(200);
fullSyndicationItem.Summary = CreateContent("summary");
fullSyndicationItem.Title = CreateContent("title");
var fullSyndicationFeed = new SyndicationFeed();
fullSyndicationFeed.AttributeExtensions.Add(new XmlQualifiedName("feed_name1"), null);
fullSyndicationFeed.AttributeExtensions.Add(new XmlQualifiedName("feed_name2", "feed_namespace"), "");
fullSyndicationFeed.AttributeExtensions.Add(new XmlQualifiedName("feed_name3", "feed_namespace"), "feed_value");
fullSyndicationFeed.AttributeExtensions.Add(new XmlQualifiedName("feed_name4", "xmlns"), "");
fullSyndicationFeed.Authors.Add(new SyndicationPerson());
fullSyndicationFeed.Authors.Add(CreatePerson("feedauthor"));
fullSyndicationFeed.BaseUri = new Uri("http://microsoft.com");
fullSyndicationFeed.Categories.Add(new SyndicationCategory());
fullSyndicationFeed.Categories.Add(CreateCategory("feed"));
fullSyndicationItem.Categories.Add(attributeSyndicationCategory);
fullSyndicationFeed.Contributors.Add(new SyndicationPerson());
fullSyndicationFeed.Contributors.Add(CreatePerson("feedauthor_"));
fullSyndicationFeed.Copyright = CreateContent("feedcopyright");
fullSyndicationFeed.Description = CreateContent("feeddescription");
fullSyndicationFeed.ElementExtensions.Add(new ExtensionObject { Value = 10 });
fullSyndicationFeed.Generator = "generator";
fullSyndicationFeed.Id = "id";
fullSyndicationFeed.ImageUrl = new Uri("http://imageurl.com");
fullSyndicationFeed.Items = new SyndicationItem[] { new SyndicationItem() { Id = "id", LastUpdatedTime = DateTimeOffset.MinValue.AddTicks(1) }, fullSyndicationItem };
fullSyndicationFeed.Language = "language";
fullSyndicationFeed.LastUpdatedTime = DateTimeOffset.MinValue.AddYears(1);
fullSyndicationFeed.Links.Add(new SyndicationLink());
fullSyndicationFeed.Links.Add(CreateLink("syndicationlink"));
fullSyndicationFeed.Links.Add(attributeSyndicationLink);
fullSyndicationFeed.Title = CreateContent("feedtitle");
yield return new object[]
{
fullSyndicationFeed,
@"<feed xml:lang=""language"" xml:base=""http://microsoft.com/"" feed_name1="""" d1p1:feed_name2="""" d1p1:feed_name3=""feed_value"" d1p2:feed_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""feed_namespace"" xmlns=""http://www.w3.org/2005/Atom"">
<title type=""html"" feedtitle_name1="""" d2p1:feedtitle_name2="""" d2p1:feedtitle_name3=""feedtitle_value"" d1p2:feedtitle_name4="""" xmlns:d2p1=""feedtitle_namespace"">feedtitle_title</title>
<subtitle type=""html"" feeddescription_name1="""" d2p1:feeddescription_name2="""" d2p1:feeddescription_name3=""feeddescription_value"" d1p2:feeddescription_name4="""" xmlns:d2p1=""feeddescription_namespace"">feeddescription_title</subtitle>
<id>id</id>
<rights type=""html"" feedcopyright_name1="""" d2p1:feedcopyright_name2="""" d2p1:feedcopyright_name3=""feedcopyright_value"" d1p2:feedcopyright_name4="""" xmlns:d2p1=""feedcopyright_namespace"">feedcopyright_title</rights>
<updated>0002-01-01T00:00:00Z</updated>
<category term="""" />
<category feedcategory_name1="""" d2p1:feedcategory_name2="""" d2p1:feedcategory_name3=""feedcategory_value"" d1p2:feedcategory_name4="""" term=""feedcategory_name"" label=""feedcategory_label"" scheme=""feedcategory_scheme"" xmlns:d2p1=""feedcategory_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</category>
<logo>http://imageurl.com/</logo>
<author />
<author feedauthor_name1="""" d2p1:feedauthor_name2="""" d2p1:feedauthor_name3=""feedauthor_value"" d1p2:feedauthor_name4="""" xmlns:d2p1=""feedauthor_namespace"">
<name>feedauthor_name</name>
<uri>feedauthor_uri</uri>
<email>feedauthor_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</author>
<contributor />
<contributor feedauthor__name1="""" d2p1:feedauthor__name2="""" d2p1:feedauthor__name3=""feedauthor__value"" d1p2:feedauthor__name4="""" xmlns:d2p1=""feedauthor__namespace"">
<name>feedauthor__name</name>
<uri>feedauthor__uri</uri>
<email>feedauthor__email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</contributor>
<generator>generator</generator>
<link href="""" />
<link xml:base=""http://syndicationlink_url.com/"" syndicationlink_name1="""" d2p1:syndicationlink_name2="""" d2p1:syndicationlink_name3=""syndicationlink_value"" d1p2:syndicationlink_name4="""" rel=""syndicationlink_relationshipType"" type=""syndicationlink_mediaType"" title=""syndicationlink_title"" length=""10"" href=""http://syndicationlink_uri.com/"" xmlns:d2p1=""syndicationlink_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</link>
<link rel=""rel_value"" type=""type_value"" title=""title_value"" length=""100"" href=""href_value"" />
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
<entry>
<id>id</id>
<title type=""text"" />
<updated>0001-01-01T00:00:00Z</updated>
</entry>
<entry xml:base=""http://microsoft/relative"" item_name1="""" d2p1:item_name2="""" d2p1:item_name3=""item_value"" d1p2:item_name4="""" xmlns:d2p1=""item_namespace"">
<id>id</id>
<title type=""html"" title_name1="""" d3p1:title_name2="""" d3p1:title_name3=""title_value"" d1p2:title_name4="""" xmlns:d3p1=""title_namespace"">title_title</title>
<summary type=""html"" summary_name1="""" d3p1:summary_name2="""" d3p1:summary_name3=""summary_value"" d1p2:summary_name4="""" xmlns:d3p1=""summary_namespace"">summary_title</summary>
<published>0001-01-01T00:00:00Z</published>
<updated>0001-01-01T00:00:00Z</updated>
<author />
<author author_name1="""" d3p1:author_name2="""" d3p1:author_name3=""author_value"" d1p2:author_name4="""" xmlns:d3p1=""author_namespace"">
<name>author_name</name>
<uri>author_uri</uri>
<email>author_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</author>
<contributor />
<contributor contributor_name1="""" d3p1:contributor_name2="""" d3p1:contributor_name3=""contributor_value"" d1p2:contributor_name4="""" xmlns:d3p1=""contributor_namespace"">
<name>contributor_name</name>
<uri>contributor_uri</uri>
<email>contributor_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</contributor>
<link href="""" />
<link xml:base=""http://link_url.com/"" link_name1="""" d3p1:link_name2="""" d3p1:link_name3=""link_value"" d1p2:link_name4="""" rel=""link_relationshipType"" type=""link_mediaType"" title=""link_title"" length=""10"" href=""http://link_uri.com/"" xmlns:d3p1=""link_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</link>
<link rel=""rel_value"" type=""type_value"" title=""title_value"" length=""100"" href=""href_value"" />
<category term="""" />
<category category_name1="""" d3p1:category_name2="""" d3p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d3p1=""category_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</category>
<category term=""term_value"" label=""label_value"" scheme=""scheme_value"" />
<category term=""term_value"" label=""label_value"" scheme=""scheme_value"" />
<content type=""html"" content_name1="""" d3p1:content_name2="""" d3p1:content_name3=""content_value"" d1p2:content_name4="""" xmlns:d3p1=""content_namespace"">content_title</content>
<rights type=""html"" copyright_name1="""" d3p1:copyright_name2="""" d3p1:copyright_name3=""copyright_value"" d1p2:copyright_name4="""" xmlns:d3p1=""copyright_namespace"">copyright_title</rights>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</entry>
</feed>"
};
}
[Theory]
[MemberData(nameof(WriteTo_TestData))]
public void Write_HasFeed_SerializesExpected(SyndicationFeed feed, string expected)
{
var formatter = new Atom10FeedFormatter(feed);
CompareHelper.AssertEqualWriteOutput(expected, writer => formatter.WriteTo(writer));
CompareHelper.AssertEqualWriteOutput(expected, writer => feed.SaveAsAtom10(writer));
CompareHelper.AssertEqualWriteOutput(expected, writer =>
{
writer.WriteStartElement("feed", "http://www.w3.org/2005/Atom");
((IXmlSerializable)formatter).WriteXml(writer);
writer.WriteEndElement();
});
var genericFormatter = new Atom10FeedFormatter<SyndicationFeed>(feed);
CompareHelper.AssertEqualWriteOutput(expected, writer => formatter.WriteTo(writer));
CompareHelper.AssertEqualWriteOutput(expected, writer => feed.SaveAsAtom10(writer));
CompareHelper.AssertEqualWriteOutput(expected, writer =>
{
writer.WriteStartElement("feed", "http://www.w3.org/2005/Atom");
((IXmlSerializable)genericFormatter).WriteXml(writer);
writer.WriteEndElement();
});
}
[Fact]
public void Write_EmptyFeed_SerializesExpected()
{
var formatter = new Atom10FeedFormatter(new SyndicationFeed());
var stringBuilder = new StringBuilder();
using (XmlWriter writer = XmlWriter.Create(stringBuilder))
{
formatter.WriteTo(writer);
}
using (var stringReader = new StringReader(stringBuilder.ToString()))
{
XElement element = XElement.Load(stringReader);
Assert.Equal("feed", element.Name.LocalName);
Assert.Equal("http://www.w3.org/2005/Atom", element.Attribute("xmlns").Value);
XElement[] elements = element.Elements().ToArray();
Assert.Equal(3, elements.Length);
Assert.Equal("title", elements[0].Name.LocalName);
Assert.Equal("text", elements[0].Attribute("type").Value);
Assert.Empty(elements[0].Value);
Assert.Equal("id", elements[1].Name.LocalName);
Assert.StartsWith("uuid:", elements[1].Value);
Assert.Equal("updated", elements[2].Name.LocalName);
DateTimeOffset now = DateTimeOffset.UtcNow;
Assert.True(now >= DateTimeOffset.ParseExact(elements[2].Value, "yyyy-MM-ddTHH:mm:ssZ", CultureInfo.InvariantCulture));
}
}
[Fact]
public void WriteTo_NullWriter_ThrowsArgumentNullException()
{
var formatter = new Atom10FeedFormatter(new SyndicationFeed());
AssertExtensions.Throws<ArgumentNullException>("writer", () => formatter.WriteTo(null));
}
[Fact]
public void WriteTo_NoItem_ThrowsInvalidOperationException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Atom10FeedFormatter();
Assert.Throws<InvalidOperationException>(() => formatter.WriteTo(writer));
}
}
[Fact]
public void WriteXml_NullWriter_ThrowsArgumentNullException()
{
IXmlSerializable formatter = new Atom10FeedFormatter();
AssertExtensions.Throws<ArgumentNullException>("writer", () => formatter.WriteXml(null));
}
[Fact]
public void WriteXml_NoItem_ThrowsInvalidOperationException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
IXmlSerializable formatter = new Atom10FeedFormatter();
Assert.Throws<InvalidOperationException>(() => formatter.WriteXml(writer));
}
}
public static IEnumerable<object[]> FeedBaseUri_TestData()
{
yield return new object[] { null };
yield return new object[] { new Uri("http://microsoft.com") };
yield return new object[] { new Uri("/relative", UriKind.Relative) };
}
[Theory]
[MemberData(nameof(FeedBaseUri_TestData))]
public void WriteItem_Invoke_Success(Uri feedBaseUri)
{
var formatter = new Formatter();
var item = new SyndicationItem() { Id = "id", LastUpdatedTime = DateTimeOffset.MinValue.AddTicks(1) };
CompareHelper.AssertEqualWriteOutput(
@"<entry xmlns=""http://www.w3.org/2005/Atom"">
<id>id</id>
<title type=""text"" />
<updated>0001-01-01T00:00:00Z</updated>
</entry>", writer => formatter.WriteItemEntryPoint(writer, item, feedBaseUri));
}
[Fact]
public void WriteItem_NullWriter_ThrowsNullReferenceException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Formatter();
Assert.Throws<NullReferenceException>(() => formatter.WriteItemEntryPoint(null, new SyndicationItem(), new Uri("http://microsoft.com")));
}
}
[Fact]
public void WriteItem_NullItem_ThrowsNullReferenceException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Formatter();
Assert.Throws<NullReferenceException>(() => formatter.WriteItemEntryPoint(writer, null, new Uri("http://microsoft.com")));
}
}
[Theory]
[MemberData(nameof(FeedBaseUri_TestData))]
public void WriteItems_Invoke_Success(Uri feedBaseUri)
{
var formatter = new Formatter();
var items = new SyndicationItem[]
{
new SyndicationItem() { Id = "id1", LastUpdatedTime = DateTimeOffset.MinValue.AddTicks(1) },
new SyndicationItem() { Id = "id2", LastUpdatedTime = DateTimeOffset.MinValue.AddTicks(1) }
};
CompareHelper.AssertEqualWriteOutput(
@"<entry xmlns=""http://www.w3.org/2005/Atom"">
<id>id1</id>
<title type=""text"" />
<updated>0001-01-01T00:00:00Z</updated>
</entry>
<entry xmlns=""http://www.w3.org/2005/Atom"">
<id>id2</id>
<title type=""text"" />
<updated>0001-01-01T00:00:00Z</updated>
</entry>", writer => formatter.WriteItemsEntryPoint(writer, items, feedBaseUri));
}
[Fact]
public void WriteItems_NullItems_Nop()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Formatter();
formatter.WriteItemsEntryPoint(writer, null, new Uri("http://microsoft.com"));
formatter.WriteItemsEntryPoint(null, null, new Uri("http://microsoft.com"));
}
}
[Fact]
public void WriteItems_EmptyItems_Nop()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Formatter();
formatter.WriteItemsEntryPoint(writer, new SyndicationItem[0], new Uri("http://microsoft.com"));
formatter.WriteItemsEntryPoint(null, new SyndicationItem[0], new Uri("http://microsoft.com"));
}
}
[Fact]
public void WriteItems_NullWriter_ThrowsNullReferenceException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Formatter();
var items = new SyndicationItem[] { new SyndicationItem() };
Assert.Throws<NullReferenceException>(() => formatter.WriteItemsEntryPoint(null, items, new Uri("http://microsoft.com")));
}
}
[Fact]
public void WriteItems_NullItemInItems_ThrowsNullReferenceException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new Formatter();
var items = new SyndicationItem[] { null };
Assert.Throws<NullReferenceException>(() => formatter.WriteItemsEntryPoint(writer, items, new Uri("http://microsoft.com")));
}
}
public static IEnumerable<object[]> CanRead_TestData()
{
yield return new object[] { @"<feed />", false };
yield return new object[] { @"<feed xmlns=""different"" />", false };
yield return new object[] { @"<different xmlns=""http://www.w3.org/2005/Atom"" />", false };
yield return new object[] { @"<feed xmlns=""http://www.w3.org/2005/Atom"" />", true };
yield return new object[] { @"<feed xmlns=""http://www.w3.org/2005/Atom""></feed>", true };
}
[Theory]
[MemberData(nameof(CanRead_TestData))]
public void CanRead_ValidReader_ReturnsExpected(string xmlString, bool expected)
{
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter();
Assert.Equal(expected, formatter.CanRead(reader));
}
}
[Fact]
public void CanRead_NullReader_ThrowsArgumentNullException()
{
var formatter = new Atom10FeedFormatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.CanRead(null));
}
[Theory]
[InlineData(true, true)]
[InlineData(false, false)]
public void Read_FullItem_ReturnsExpected(bool preserveAttributeExtensions, bool preserveElementExtensions)
{
VerifyRead(
@"<feed xml:lang=""language"" xml:base=""http://microsoft.com/"" feed_name1="""" d1p1:feed_name2="""" d1p1:feed_name3=""feed_value"" d1p2:feed_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""feed_namespace"" xmlns=""http://www.w3.org/2005/Atom"">
<title type=""html"" feedtitle_name1="""" d2p1:feedtitle_name2="""" d2p1:feedtitle_name3=""feedtitle_value"" d1p2:feedtitle_name4="""" xmlns:d2p1=""feedtitle_namespace"">feedtitle_title</title>
<subtitle type=""html"" feeddescription_name1="""" d2p1:feeddescription_name2="""" d2p1:feeddescription_name3=""feeddescription_value"" d1p2:feeddescription_name4="""" xmlns:d2p1=""feeddescription_namespace"">feeddescription_title</subtitle>
<id>id</id>
<rights type=""html"" feedcopyright_name1="""" d2p1:feedcopyright_name2="""" d2p1:feedcopyright_name3=""feedcopyright_value"" d1p2:feedcopyright_name4="""" xmlns:d2p1=""feedcopyright_namespace"">feedcopyright_title</rights>
<updated>0002-01-01T00:00:00Z</updated>
<category />
<category></category>
<category term="""" />
<category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</category>
<logo>http://imageurl.com/</logo>
<author />
<author></author>
<author author_name1="""" d2p1:author_name2="""" d2p1:author_name3=""author_value"" d1p2:author_name4="""" xmlns:d2p1=""author_namespace"">
<name>author_name</name>
<uri>author_uri</uri>
<email>author_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</author>
<contributor />
<contributor></contributor>
<contributor contributor_name1="""" d2p1:contributor_name2="""" d2p1:contributor_name3=""contributor_value"" d1p2:contributor_name4="""" xmlns:d2p1=""contributor_namespace"">
<name>contributor_name</name>
<uri>contributor_uri</uri>
<email>contributor_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</contributor>
<generator>generator</generator>
<link />
<link></link>
<link href="""" />
<link xml:base=""http://link_url.com/"" link_name1="""" d2p1:link_name2="""" d2p1:link_name3=""link_value"" d1p2:link_name4="""" rel=""link_relationshipType"" type=""link_mediaType"" title=""link_title"" length=""10"" href=""http://link_uri.com/"" xmlns:d2p1=""link_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</link>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
<entry>
<id>id</id>
<title type=""text"" />
<updated>0001-01-01T00:00:00Z</updated>
</entry>
<entry xml:base=""/relative"" item_name1="""" d1p1:item_name2="""" d1p1:item_name3=""item_value"" d1p2:item_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""item_namespace"" xmlns=""http://www.w3.org/2005/Atom"">
<id>id</id>
<title type=""html"" title_name1="""" d2p1:title_name2="""" d2p1:title_name3=""title_value"" d1p2:title_name4="""" xmlns:d2p1=""title_namespace"">title_title</title>
<summary type=""html"" summary_name1="""" d2p1:summary_name2="""" d2p1:summary_name3=""summary_value"" d1p2:summary_name4="""" xmlns:d2p1=""summary_namespace"">summary_title</summary>
<published>0001-01-01T00:00:00Z</published>
<updated>0001-01-01T00:00:00Z</updated>
<author />
<author></author>
<author author_name1="""" d2p1:author_name2="""" d2p1:author_name3=""author_value"" d1p2:author_name4="""" xmlns:d2p1=""author_namespace"">
<name>author_name</name>
<uri>author_uri</uri>
<email>author_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</author>
<contributor />
<contributor></contributor>
<contributor contributor_name1="""" d2p1:contributor_name2="""" d2p1:contributor_name3=""contributor_value"" d1p2:contributor_name4="""" xmlns:d2p1=""contributor_namespace"">
<name>contributor_name</name>
<uri>contributor_uri</uri>
<email>contributor_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</contributor>
<link />
<link></link>
<link href="""" />
<link xml:base=""http://link_url.com/"" link_name1="""" d2p1:link_name2="""" d2p1:link_name3=""link_value"" d1p2:link_name4="""" rel=""link_relationshipType"" type=""link_mediaType"" title=""link_title"" length=""10"" href=""http://link_uri.com/"" xmlns:d2p1=""link_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</link>
<category />
<category></category>
<category term="""" />
<category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</category>
<content type=""html"" content_name1="""" d2p1:content_name2="""" d2p1:content_name3=""content_value"" d1p2:content_name4="""" xmlns:d2p1=""content_namespace"">content_title</content>
<rights type=""html"" copyright_name1="""" d2p1:copyright_name2="""" d2p1:copyright_name3=""copyright_value"" d1p2:copyright_name4="""" xmlns:d2p1=""copyright_namespace"">copyright_title</rights>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</entry>
</feed>
", preserveElementExtensions, preserveElementExtensions, feed =>
{
if (preserveAttributeExtensions)
{
Assert.Equal(4, feed.AttributeExtensions.Count);
Assert.Equal("", feed.AttributeExtensions[new XmlQualifiedName("feed_name1")]);
Assert.Equal("", feed.AttributeExtensions[new XmlQualifiedName("feed_name2", "feed_namespace")]);
Assert.Equal("feed_value", feed.AttributeExtensions[new XmlQualifiedName("feed_name3", "feed_namespace")]);
Assert.Equal("", feed.AttributeExtensions[new XmlQualifiedName("feed_name4", "xmlns")]);
}
else
{
Assert.Empty(feed.AttributeExtensions);
}
Assert.Equal(3, feed.Authors.Count);
SyndicationPerson firstAuthor = feed.Authors[0];
Assert.Empty(firstAuthor.AttributeExtensions);
Assert.Empty(firstAuthor.ElementExtensions);
Assert.Null(firstAuthor.Email);
Assert.Null(firstAuthor.Name);
Assert.Null(firstAuthor.Uri);
SyndicationPerson secondAuthor = feed.Authors[1];
Assert.Empty(secondAuthor.AttributeExtensions);
Assert.Empty(secondAuthor.ElementExtensions);
Assert.Null(secondAuthor.Email);
Assert.Null(secondAuthor.Name);
Assert.Null(secondAuthor.Uri);
SyndicationPerson thirdAuthor = feed.Authors[2];
if (preserveAttributeExtensions)
{
Assert.Equal(4, thirdAuthor.AttributeExtensions.Count);
Assert.Equal("", thirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name1")]);
Assert.Equal("", thirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name2", "author_namespace")]);
Assert.Equal("author_value", thirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name3", "author_namespace")]);
Assert.Equal("", thirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name4", "xmlns")]);
}
else
{
Assert.Empty(thirdAuthor.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, thirdAuthor.ElementExtensions.Count);
Assert.Equal(10, thirdAuthor.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(thirdAuthor.ElementExtensions);
}
Assert.Equal("author_email", thirdAuthor.Email);
Assert.Equal("author_name", thirdAuthor.Name);
Assert.Equal("author_uri", thirdAuthor.Uri);
Assert.Equal(new Uri("http://microsoft.com"), feed.BaseUri);
Assert.Equal(4, feed.Categories.Count);
SyndicationCategory firstCategory = feed.Categories[0];
Assert.Empty(firstCategory.AttributeExtensions);
Assert.Empty(firstCategory.ElementExtensions);
Assert.Null(firstCategory.Name);
Assert.Null(firstCategory.Scheme);
Assert.Null(firstCategory.Label);
SyndicationCategory secondCategory = feed.Categories[1];
Assert.Empty(secondCategory.AttributeExtensions);
Assert.Empty(secondCategory.ElementExtensions);
Assert.Null(secondCategory.Name);
Assert.Null(secondCategory.Scheme);
Assert.Null(secondCategory.Label);
SyndicationCategory thirCategory = feed.Categories[2];
Assert.Empty(thirCategory.AttributeExtensions);
Assert.Empty(thirCategory.ElementExtensions);
Assert.Empty(thirCategory.Name);
Assert.Null(thirCategory.Scheme);
Assert.Null(thirCategory.Label);
SyndicationCategory fourthCategory = feed.Categories[3];
if (preserveAttributeExtensions)
{
Assert.Equal(4, fourthCategory.AttributeExtensions.Count);
Assert.Equal("", fourthCategory.AttributeExtensions[new XmlQualifiedName("category_name1")]);
Assert.Equal("", fourthCategory.AttributeExtensions[new XmlQualifiedName("category_name2", "category_namespace")]);
Assert.Equal("category_value", fourthCategory.AttributeExtensions[new XmlQualifiedName("category_name3", "category_namespace")]);
Assert.Equal("", fourthCategory.AttributeExtensions[new XmlQualifiedName("category_name4", "xmlns")]);
}
else
{
Assert.Empty(fourthCategory.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, fourthCategory.ElementExtensions.Count);
Assert.Equal(10, fourthCategory.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(fourthCategory.ElementExtensions);
}
Assert.Equal("category_name", fourthCategory.Name);
Assert.Equal("category_scheme", fourthCategory.Scheme);
Assert.Equal("category_label", fourthCategory.Label);
Assert.Equal(3, feed.Contributors.Count);
SyndicationPerson firstContributor = feed.Contributors[0];
Assert.Empty(firstContributor.AttributeExtensions);
Assert.Empty(firstContributor.ElementExtensions);
Assert.Null(firstContributor.Email);
Assert.Null(firstContributor.Name);
Assert.Null(firstContributor.Uri);
SyndicationPerson secondContributor = feed.Contributors[1];
Assert.Empty(secondContributor.AttributeExtensions);
Assert.Empty(secondContributor.ElementExtensions);
Assert.Null(secondContributor.Email);
Assert.Null(secondContributor.Name);
Assert.Null(secondContributor.Uri);
SyndicationPerson thirdContributor = feed.Contributors[2];
if (preserveAttributeExtensions)
{
Assert.Equal(4, thirdContributor.AttributeExtensions.Count);
Assert.Equal("", thirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name1")]);
Assert.Equal("", thirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name2", "contributor_namespace")]);
Assert.Equal("contributor_value", thirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name3", "contributor_namespace")]);
Assert.Equal("", thirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name4", "xmlns")]);
}
else
{
Assert.Empty(thirdContributor.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, thirdContributor.ElementExtensions.Count);
Assert.Equal(10, thirdContributor.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(thirdContributor.ElementExtensions);
}
Assert.Equal("contributor_email", thirdContributor.Email);
Assert.Equal("contributor_name", thirdContributor.Name);
Assert.Equal("contributor_uri", thirdContributor.Uri);
if (preserveAttributeExtensions)
{
Assert.Equal(4, feed.Copyright.AttributeExtensions.Count);
Assert.Equal("", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name1")]);
Assert.Equal("", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name2", "feedcopyright_namespace")]);
Assert.Equal("feedcopyright_value", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name3", "feedcopyright_namespace")]);
Assert.Equal("", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name4", "xmlns")]);
}
else
{
Assert.Empty(feed.Copyright.AttributeExtensions);
}
Assert.Equal("feedcopyright_title", feed.Copyright.Text);
Assert.Equal("html", feed.Copyright.Type);
Assert.Equal("generator", feed.Generator);
if (preserveElementExtensions)
{
Assert.Equal(1, feed.ElementExtensions.Count);
Assert.Equal(10, feed.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(feed.ElementExtensions);
}
Assert.Equal("id", feed.Id);
Assert.Equal(new Uri("http://imageurl.com/"), feed.ImageUrl);
SyndicationItem[] items = feed.Items.ToArray();
Assert.Equal(2, items.Length);
SyndicationItem item = items[1];
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.AttributeExtensions.Count);
Assert.Equal("", item.AttributeExtensions[new XmlQualifiedName("item_name1")]);
Assert.Equal("", item.AttributeExtensions[new XmlQualifiedName("item_name2", "item_namespace")]);
Assert.Equal("item_value", item.AttributeExtensions[new XmlQualifiedName("item_name3", "item_namespace")]);
Assert.Equal("", item.AttributeExtensions[new XmlQualifiedName("item_name4", "xmlns")]);
}
else
{
Assert.Empty(item.AttributeExtensions);
}
Assert.Equal(3, item.Authors.Count);
SyndicationPerson itemFirstAuthor = item.Authors[0];
Assert.Empty(itemFirstAuthor.AttributeExtensions);
Assert.Empty(itemFirstAuthor.ElementExtensions);
Assert.Null(itemFirstAuthor.Email);
Assert.Null(itemFirstAuthor.Name);
Assert.Null(itemFirstAuthor.Uri);
SyndicationPerson itemSecondAuthor = item.Authors[1];
Assert.Empty(itemSecondAuthor.AttributeExtensions);
Assert.Empty(itemSecondAuthor.ElementExtensions);
Assert.Null(itemSecondAuthor.Email);
Assert.Null(itemSecondAuthor.Name);
Assert.Null(itemSecondAuthor.Uri);
SyndicationPerson itemThirdAuthor = item.Authors[2];
if (preserveAttributeExtensions)
{
Assert.Equal(4, itemThirdAuthor.AttributeExtensions.Count);
Assert.Equal("", itemThirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name1")]);
Assert.Equal("", itemThirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name2", "author_namespace")]);
Assert.Equal("author_value", itemThirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name3", "author_namespace")]);
Assert.Equal("", itemThirdAuthor.AttributeExtensions[new XmlQualifiedName("author_name4", "xmlns")]);
}
else
{
Assert.Empty(itemThirdAuthor.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, itemThirdAuthor.ElementExtensions.Count);
Assert.Equal(10, itemThirdAuthor.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(itemThirdAuthor.ElementExtensions);
}
Assert.Equal("author_email", itemThirdAuthor.Email);
Assert.Equal("author_name", itemThirdAuthor.Name);
Assert.Equal("author_uri", itemThirdAuthor.Uri);
Assert.Equal(new Uri("http://microsoft.com/relative"), item.BaseUri);
Assert.Equal(4, item.Categories.Count);
SyndicationCategory itemFirstCategory = item.Categories[0];
Assert.Empty(itemFirstCategory.AttributeExtensions);
Assert.Empty(itemFirstCategory.ElementExtensions);
Assert.Null(itemFirstCategory.Name);
Assert.Null(itemFirstCategory.Scheme);
Assert.Null(itemFirstCategory.Label);
SyndicationCategory itemSecondCategory = item.Categories[1];
Assert.Empty(itemSecondCategory.AttributeExtensions);
Assert.Empty(itemSecondCategory.ElementExtensions);
Assert.Null(itemSecondCategory.Name);
Assert.Null(itemSecondCategory.Scheme);
Assert.Null(itemSecondCategory.Label);
SyndicationCategory itemThirdCategory = item.Categories[2];
Assert.Empty(itemThirdCategory.AttributeExtensions);
Assert.Empty(itemThirdCategory.ElementExtensions);
Assert.Empty(itemThirdCategory.Name);
Assert.Null(itemThirdCategory.Scheme);
Assert.Null(itemThirdCategory.Label);
SyndicationCategory itemFourthCategory = item.Categories[3];
if (preserveAttributeExtensions)
{
Assert.Equal(4, itemFourthCategory.AttributeExtensions.Count);
Assert.Equal("", itemFourthCategory.AttributeExtensions[new XmlQualifiedName("category_name1")]);
Assert.Equal("", itemFourthCategory.AttributeExtensions[new XmlQualifiedName("category_name2", "category_namespace")]);
Assert.Equal("category_value", itemFourthCategory.AttributeExtensions[new XmlQualifiedName("category_name3", "category_namespace")]);
Assert.Equal("", itemFourthCategory.AttributeExtensions[new XmlQualifiedName("category_name4", "xmlns")]);
}
else
{
Assert.Empty(itemFourthCategory.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, itemFourthCategory.ElementExtensions.Count);
Assert.Equal(10, itemFourthCategory.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(itemFourthCategory.ElementExtensions);
}
Assert.Equal("category_name", itemFourthCategory.Name);
Assert.Equal("category_scheme", itemFourthCategory.Scheme);
Assert.Equal("category_label", itemFourthCategory.Label);
TextSyndicationContent content = Assert.IsType<TextSyndicationContent>(item.Content);
if (preserveAttributeExtensions)
{
Assert.Equal(4, content.AttributeExtensions.Count);
Assert.Equal("", content.AttributeExtensions[new XmlQualifiedName("content_name1")]);
Assert.Equal("", content.AttributeExtensions[new XmlQualifiedName("content_name2", "content_namespace")]);
Assert.Equal("content_value", content.AttributeExtensions[new XmlQualifiedName("content_name3", "content_namespace")]);
Assert.Equal("", content.AttributeExtensions[new XmlQualifiedName("content_name4", "xmlns")]);
}
else
{
Assert.Empty(content.AttributeExtensions);
}
Assert.Equal("content_title", content.Text);
Assert.Equal("html", content.Type);
Assert.Equal(3, item.Contributors.Count);
SyndicationPerson itemFirstContributor = item.Contributors[0];
Assert.Empty(itemFirstContributor.AttributeExtensions);
Assert.Empty(itemFirstContributor.ElementExtensions);
Assert.Null(itemFirstContributor.Email);
Assert.Null(itemFirstContributor.Name);
Assert.Null(itemFirstContributor.Uri);
SyndicationPerson itemSecondContributor = item.Contributors[1];
Assert.Empty(itemSecondContributor.AttributeExtensions);
Assert.Empty(itemSecondContributor.ElementExtensions);
Assert.Null(itemSecondContributor.Email);
Assert.Null(itemSecondContributor.Name);
Assert.Null(itemSecondContributor.Uri);
SyndicationPerson itemThirdContributor = item.Contributors[2];
if (preserveAttributeExtensions)
{
Assert.Equal(4, itemThirdContributor.AttributeExtensions.Count);
Assert.Equal("", itemThirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name1")]);
Assert.Equal("", itemThirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name2", "contributor_namespace")]);
Assert.Equal("contributor_value", itemThirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name3", "contributor_namespace")]);
Assert.Equal("", itemThirdContributor.AttributeExtensions[new XmlQualifiedName("contributor_name4", "xmlns")]);
}
else
{
Assert.Empty(itemThirdContributor.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, itemThirdContributor.ElementExtensions.Count);
Assert.Equal(10, itemThirdContributor.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(itemThirdContributor.ElementExtensions);
}
Assert.Equal("contributor_email", itemThirdContributor.Email);
Assert.Equal("contributor_name", itemThirdContributor.Name);
Assert.Equal("contributor_uri", itemThirdContributor.Uri);
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.Copyright.AttributeExtensions.Count);
Assert.Equal("", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name1")]);
Assert.Equal("", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name2", "copyright_namespace")]);
Assert.Equal("copyright_value", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name3", "copyright_namespace")]);
Assert.Equal("", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Copyright.AttributeExtensions);
}
Assert.Equal("copyright_title", item.Copyright.Text);
Assert.Equal("html", item.Copyright.Type);
if (preserveElementExtensions)
{
Assert.Equal(1, item.ElementExtensions.Count);
Assert.Equal(10, item.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(item.ElementExtensions);
}
Assert.Equal("id", item.Id);
Assert.Equal(DateTimeOffset.MinValue, item.LastUpdatedTime);
Assert.Equal(4, item.Links.Count);
SyndicationLink itemFirstLink = item.Links[0];
Assert.Empty(itemFirstLink.AttributeExtensions);
Assert.Empty(itemFirstLink.ElementExtensions);
Assert.Equal(0, itemFirstLink.Length);
Assert.Null(itemFirstLink.MediaType);
Assert.Null(itemFirstLink.RelationshipType);
Assert.Null(itemFirstLink.Title);
Assert.Null(itemFirstLink.Uri);
SyndicationLink itemSecondLink = item.Links[1];
Assert.Empty(itemSecondLink.AttributeExtensions);
Assert.Empty(itemSecondLink.ElementExtensions);
Assert.Equal(0, itemSecondLink.Length);
Assert.Null(itemSecondLink.MediaType);
Assert.Null(itemSecondLink.RelationshipType);
Assert.Null(itemSecondLink.Title);
Assert.Null(itemSecondLink.Uri);
SyndicationLink itemThirdLink = item.Links[2];
Assert.Empty(itemThirdLink.AttributeExtensions);
Assert.Empty(itemThirdLink.ElementExtensions);
Assert.Equal(0, itemThirdLink.Length);
Assert.Null(itemThirdLink.MediaType);
Assert.Null(itemThirdLink.RelationshipType);
Assert.Null(itemThirdLink.Title);
Assert.Empty(itemThirdLink.Uri.OriginalString);
SyndicationLink itemFourthLink = item.Links[3];
if (preserveAttributeExtensions)
{
Assert.Equal(4, itemFourthLink.AttributeExtensions.Count);
Assert.Equal("", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name1")]);
Assert.Equal("", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name2", "link_namespace")]);
Assert.Equal("link_value", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name3", "link_namespace")]);
Assert.Equal("", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name4", "xmlns")]);
}
else
{
Assert.Empty(itemFourthLink.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, itemFourthLink.ElementExtensions.Count);
Assert.Equal(10, item.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(itemFourthLink.ElementExtensions);
}
Assert.Equal(new Uri("http://link_url.com"), itemFourthLink.BaseUri);
Assert.Equal(10, itemFourthLink.Length);
Assert.Equal("link_mediaType", itemFourthLink.MediaType);
Assert.Equal("link_relationshipType", itemFourthLink.RelationshipType);
Assert.Equal("link_title", itemFourthLink.Title);
Assert.Equal(new Uri("http://link_uri.com"), itemFourthLink.Uri);
Assert.Equal(DateTimeOffset.MinValue, item.PublishDate);
Assert.Null(item.SourceFeed);
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.Summary.AttributeExtensions.Count);
Assert.Equal("", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name1")]);
Assert.Equal("", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name2", "summary_namespace")]);
Assert.Equal("summary_value", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name3", "summary_namespace")]);
Assert.Equal("", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Summary.AttributeExtensions);
}
Assert.Equal("summary_title", item.Summary.Text);
Assert.Equal("html", item.Summary.Type);
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.Title.AttributeExtensions.Count);
Assert.Equal("", item.Title.AttributeExtensions[new XmlQualifiedName("title_name1")]);
Assert.Equal("", item.Title.AttributeExtensions[new XmlQualifiedName("title_name2", "title_namespace")]);
Assert.Equal("title_value", item.Title.AttributeExtensions[new XmlQualifiedName("title_name3", "title_namespace")]);
Assert.Equal("", item.Title.AttributeExtensions[new XmlQualifiedName("title_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Title.AttributeExtensions);
}
Assert.Equal("title_title", item.Title.Text);
Assert.Equal("html", item.Title.Type);
Assert.Equal("language", feed.Language);
Assert.Equal(DateTimeOffset.MinValue.AddYears(1), feed.LastUpdatedTime);
Assert.Equal(4, feed.Links.Count);
SyndicationLink firstLink = feed.Links[0];
Assert.Empty(firstLink.AttributeExtensions);
Assert.Empty(firstLink.ElementExtensions);
Assert.Equal(0, firstLink.Length);
Assert.Null(firstLink.MediaType);
Assert.Null(firstLink.RelationshipType);
Assert.Null(firstLink.Title);
Assert.Null(firstLink.Uri);
SyndicationLink secondLink = feed.Links[1];
Assert.Empty(secondLink.AttributeExtensions);
Assert.Empty(secondLink.ElementExtensions);
Assert.Equal(0, secondLink.Length);
Assert.Null(secondLink.MediaType);
Assert.Null(secondLink.RelationshipType);
Assert.Null(secondLink.Title);
Assert.Null(secondLink.Uri);
SyndicationLink thirdLink = feed.Links[2];
Assert.Empty(thirdLink.AttributeExtensions);
Assert.Empty(thirdLink.ElementExtensions);
Assert.Equal(0, thirdLink.Length);
Assert.Null(thirdLink.MediaType);
Assert.Null(thirdLink.RelationshipType);
Assert.Null(thirdLink.Title);
Assert.Empty(thirdLink.Uri.OriginalString);
SyndicationLink fourthLink = feed.Links[3];
if (preserveAttributeExtensions)
{
Assert.Equal(4, fourthLink.AttributeExtensions.Count);
Assert.Equal("", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name1")]);
Assert.Equal("", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name2", "link_namespace")]);
Assert.Equal("link_value", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name3", "link_namespace")]);
Assert.Equal("", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name4", "xmlns")]);
}
else
{
Assert.Empty(fourthLink.AttributeExtensions);
}
if (preserveElementExtensions)
{
Assert.Equal(1, fourthLink.ElementExtensions.Count);
Assert.Equal(10, item.ElementExtensions[0].GetObject<ExtensionObject>().Value);
}
else
{
Assert.Empty(fourthLink.ElementExtensions);
}
Assert.Equal(new Uri("http://link_url.com"), fourthLink.BaseUri);
Assert.Equal(10, fourthLink.Length);
Assert.Equal("link_mediaType", fourthLink.MediaType);
Assert.Equal("link_relationshipType", fourthLink.RelationshipType);
Assert.Equal("link_title", fourthLink.Title);
Assert.Equal(new Uri("http://link_uri.com"), fourthLink.Uri);
if (preserveAttributeExtensions)
{
Assert.Equal(4, feed.Title.AttributeExtensions.Count);
Assert.Equal("", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name1")]);
Assert.Equal("", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name2", "feedtitle_namespace")]);
Assert.Equal("feedtitle_value", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name3", "feedtitle_namespace")]);
Assert.Equal("", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Title.AttributeExtensions);
}
Assert.Equal("feedtitle_title", feed.Title.Text);
Assert.Equal("html", feed.Title.Type);
});
}
[Theory]
[InlineData(true, true)]
[InlineData(false, false)]
public void Read_TryParseTrue_ReturnsExpected(bool preserveAttributeExtensions, bool preserveElementExtensions)
{
using (var stringReader = new StringReader(
@"<feed xml:lang=""language"" xml:base=""http://microsoft.com/"" feed_name1="""" d1p1:feed_name2="""" d1p1:feed_name3=""feed_value"" d1p2:feed_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""feed_namespace"" xmlns=""http://www.w3.org/2005/Atom"">
<title type=""html"" feedtitle_name1="""" d2p1:feedtitle_name2="""" d2p1:feedtitle_name3=""feedtitle_value"" d1p2:feedtitle_name4="""" xmlns:d2p1=""feedtitle_namespace"">feedtitle_title</title>
<subtitle type=""html"" feeddescription_name1="""" d2p1:feeddescription_name2="""" d2p1:feeddescription_name3=""feeddescription_value"" d1p2:feeddescription_name4="""" xmlns:d2p1=""feeddescription_namespace"">feeddescription_title</subtitle>
<id>id</id>
<rights type=""html"" feedcopyright_name1="""" d2p1:feedcopyright_name2="""" d2p1:feedcopyright_name3=""feedcopyright_value"" d1p2:feedcopyright_name4="""" xmlns:d2p1=""feedcopyright_namespace"">feedcopyright_title</rights>
<updated>0002-01-01T00:00:00Z</updated>
<category />
<category></category>
<category term="""" />
<category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</category>
<logo>http://imageurl.com/</logo>
<author />
<author></author>
<author author_name1="""" d2p1:author_name2="""" d2p1:author_name3=""author_value"" d1p2:author_name4="""" xmlns:d2p1=""author_namespace"">
<name>author_name</name>
<uri>author_uri</uri>
<email>author_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</author>
<contributor />
<contributor></contributor>
<contributor contributor_name1="""" d2p1:contributor_name2="""" d2p1:contributor_name3=""contributor_value"" d1p2:contributor_name4="""" xmlns:d2p1=""contributor_namespace"">
<name>contributor_name</name>
<uri>contributor_uri</uri>
<email>contributor_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</contributor>
<generator>generator</generator>
<link />
<link></link>
<link href="""" />
<link xml:base=""http://link_url.com/"" link_name1="""" d2p1:link_name2="""" d2p1:link_name3=""link_value"" d1p2:link_name4="""" rel=""link_relationshipType"" type=""link_mediaType"" title=""link_title"" length=""10"" href=""http://link_uri.com/"" xmlns:d2p1=""link_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</link>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
<entry>
<id>id</id>
<title type=""text"" />
<updated>0001-01-01T00:00:00Z</updated>
</entry>
<entry xml:base=""/relative"" item_name1="""" d1p1:item_name2="""" d1p1:item_name3=""item_value"" d1p2:item_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""item_namespace"" xmlns=""http://www.w3.org/2005/Atom"">
<id>id</id>
<title type=""html"" title_name1="""" d2p1:title_name2="""" d2p1:title_name3=""title_value"" d1p2:title_name4="""" xmlns:d2p1=""title_namespace"">title_title</title>
<summary type=""html"" summary_name1="""" d2p1:summary_name2="""" d2p1:summary_name3=""summary_value"" d1p2:summary_name4="""" xmlns:d2p1=""summary_namespace"">summary_title</summary>
<published>0001-01-01T00:00:00Z</published>
<updated>0001-01-01T00:00:00Z</updated>
<author />
<author></author>
<author author_name1="""" d2p1:author_name2="""" d2p1:author_name3=""author_value"" d1p2:author_name4="""" xmlns:d2p1=""author_namespace"">
<name>author_name</name>
<uri>author_uri</uri>
<email>author_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</author>
<contributor />
<contributor></contributor>
<contributor contributor_name1="""" d2p1:contributor_name2="""" d2p1:contributor_name3=""contributor_value"" d1p2:contributor_name4="""" xmlns:d2p1=""contributor_namespace"">
<name>contributor_name</name>
<uri>contributor_uri</uri>
<email>contributor_email</email>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</contributor>
<link />
<link></link>
<link href="""" />
<link xml:base=""http://link_url.com/"" link_name1="""" d2p1:link_name2="""" d2p1:link_name3=""link_value"" d1p2:link_name4="""" rel=""link_relationshipType"" type=""link_mediaType"" title=""link_title"" length=""10"" href=""http://link_uri.com/"" xmlns:d2p1=""link_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</link>
<category />
<category></category>
<category term="""" />
<category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</category>
<content type=""html"" content_name1="""" d2p1:content_name2="""" d2p1:content_name3=""content_value"" d1p2:content_name4="""" xmlns:d2p1=""content_namespace"">content_title</content>
<rights type=""html"" copyright_name1="""" d2p1:copyright_name2="""" d2p1:copyright_name3=""copyright_value"" d1p2:copyright_name4="""" xmlns:d2p1=""copyright_namespace"">copyright_title</rights>
<Atom10FeedFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</Atom10FeedFormatterTests.ExtensionObject>
</entry>
</feed>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter<SyndicationFeedTryParseTrueSubclass>()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
formatter.ReadFrom(reader);
SyndicationFeed feed = formatter.Feed;
Assert.Empty(feed.AttributeExtensions);
Assert.Equal(3, feed.Authors.Count);
SyndicationPerson firstAuthor = feed.Authors[0];
Assert.Empty(firstAuthor.AttributeExtensions);
Assert.Empty(firstAuthor.ElementExtensions);
Assert.Null(firstAuthor.Email);
Assert.Null(firstAuthor.Name);
Assert.Null(firstAuthor.Uri);
SyndicationPerson secondAuthor = feed.Authors[1];
Assert.Empty(secondAuthor.AttributeExtensions);
Assert.Empty(secondAuthor.ElementExtensions);
Assert.Null(secondAuthor.Email);
Assert.Null(secondAuthor.Name);
Assert.Null(secondAuthor.Uri);
SyndicationPerson thirdAuthor = feed.Authors[2];
Assert.Empty(thirdAuthor.AttributeExtensions);
Assert.Empty(thirdAuthor.ElementExtensions);
Assert.Equal("author_email", thirdAuthor.Email);
Assert.Equal("author_name", thirdAuthor.Name);
Assert.Equal("author_uri", thirdAuthor.Uri);
Assert.Equal(new Uri("http://microsoft.com"), feed.BaseUri);
Assert.Equal(4, feed.Categories.Count);
SyndicationCategory firstCategory = feed.Categories[0];
Assert.Empty(firstCategory.AttributeExtensions);
Assert.Empty(firstCategory.ElementExtensions);
Assert.Null(firstCategory.Name);
Assert.Null(firstCategory.Scheme);
Assert.Null(firstCategory.Label);
SyndicationCategory secondCategory = feed.Categories[1];
Assert.Empty(secondCategory.AttributeExtensions);
Assert.Empty(secondCategory.ElementExtensions);
Assert.Null(secondCategory.Name);
Assert.Null(secondCategory.Scheme);
Assert.Null(secondCategory.Label);
SyndicationCategory thirCategory = feed.Categories[2];
Assert.Empty(thirCategory.AttributeExtensions);
Assert.Empty(thirCategory.ElementExtensions);
Assert.Empty(thirCategory.Name);
Assert.Null(thirCategory.Scheme);
Assert.Null(thirCategory.Label);
SyndicationCategory fourthCategory = feed.Categories[3];
Assert.Empty(fourthCategory.AttributeExtensions);
Assert.Empty(fourthCategory.ElementExtensions);
Assert.Equal("category_name", fourthCategory.Name);
Assert.Equal("category_scheme", fourthCategory.Scheme);
Assert.Equal("category_label", fourthCategory.Label);
Assert.Equal(3, feed.Contributors.Count);
SyndicationPerson firstContributor = feed.Contributors[0];
Assert.Empty(firstContributor.AttributeExtensions);
Assert.Empty(firstContributor.ElementExtensions);
Assert.Null(firstContributor.Email);
Assert.Null(firstContributor.Name);
Assert.Null(firstContributor.Uri);
SyndicationPerson secondContributor = feed.Contributors[1];
Assert.Empty(secondContributor.AttributeExtensions);
Assert.Empty(secondContributor.ElementExtensions);
Assert.Null(secondContributor.Email);
Assert.Null(secondContributor.Name);
Assert.Null(secondContributor.Uri);
SyndicationPerson thirdContributor = feed.Contributors[2];
Assert.Empty(thirdContributor.AttributeExtensions);
Assert.Empty(thirdContributor.ElementExtensions);
Assert.Equal("contributor_email", thirdContributor.Email);
Assert.Equal("contributor_name", thirdContributor.Name);
Assert.Equal("contributor_uri", thirdContributor.Uri);
if (preserveAttributeExtensions)
{
Assert.Equal(4, feed.Copyright.AttributeExtensions.Count);
Assert.Equal("", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name1")]);
Assert.Equal("", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name2", "feedcopyright_namespace")]);
Assert.Equal("feedcopyright_value", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name3", "feedcopyright_namespace")]);
Assert.Equal("", feed.Copyright.AttributeExtensions[new XmlQualifiedName("feedcopyright_name4", "xmlns")]);
}
else
{
Assert.Empty(feed.Copyright.AttributeExtensions);
}
Assert.Equal("feedcopyright_title", feed.Copyright.Text);
Assert.Equal("html", feed.Copyright.Type);
Assert.Equal("generator", feed.Generator);
Assert.Empty(feed.ElementExtensions);
Assert.Equal("id", feed.Id);
Assert.Equal(new Uri("http://imageurl.com/"), feed.ImageUrl);
SyndicationItem[] items = feed.Items.ToArray();
Assert.Equal(2, items.Length);
SyndicationItem item = items[1];
Assert.Empty(item.AttributeExtensions);
Assert.Equal(3, item.Authors.Count);
SyndicationPerson itemFirstAuthor = item.Authors[0];
Assert.Empty(itemFirstAuthor.AttributeExtensions);
Assert.Empty(itemFirstAuthor.ElementExtensions);
Assert.Null(itemFirstAuthor.Email);
Assert.Null(itemFirstAuthor.Name);
Assert.Null(itemFirstAuthor.Uri);
SyndicationPerson itemSecondAuthor = item.Authors[1];
Assert.Empty(itemSecondAuthor.AttributeExtensions);
Assert.Empty(itemSecondAuthor.ElementExtensions);
Assert.Null(itemSecondAuthor.Email);
Assert.Null(itemSecondAuthor.Name);
Assert.Null(itemSecondAuthor.Uri);
SyndicationPerson itemThirdAuthor = item.Authors[2];
Assert.Empty(itemThirdAuthor.AttributeExtensions);
Assert.Empty(itemThirdAuthor.ElementExtensions);
Assert.Equal("author_email", itemThirdAuthor.Email);
Assert.Equal("author_name", itemThirdAuthor.Name);
Assert.Equal("author_uri", itemThirdAuthor.Uri);
Assert.Equal(new Uri("http://microsoft.com/relative"), item.BaseUri);
Assert.Equal(4, item.Categories.Count);
SyndicationCategory itemFirstCategory = item.Categories[0];
Assert.Empty(itemFirstCategory.AttributeExtensions);
Assert.Empty(itemFirstCategory.ElementExtensions);
Assert.Null(itemFirstCategory.Name);
Assert.Null(itemFirstCategory.Scheme);
Assert.Null(itemFirstCategory.Label);
SyndicationCategory itemSecondCategory = item.Categories[1];
Assert.Empty(itemSecondCategory.AttributeExtensions);
Assert.Empty(itemSecondCategory.ElementExtensions);
Assert.Null(itemSecondCategory.Name);
Assert.Null(itemSecondCategory.Scheme);
Assert.Null(itemSecondCategory.Label);
SyndicationCategory itemThirdCategory = item.Categories[2];
Assert.Empty(itemThirdCategory.AttributeExtensions);
Assert.Empty(itemThirdCategory.ElementExtensions);
Assert.Empty(itemThirdCategory.Name);
Assert.Null(itemThirdCategory.Scheme);
Assert.Null(itemThirdCategory.Label);
SyndicationCategory itemFourthCategory = item.Categories[3];
Assert.Empty(itemFourthCategory.AttributeExtensions);
Assert.Empty(itemFourthCategory.ElementExtensions);
Assert.Equal("category_name", itemFourthCategory.Name);
Assert.Equal("category_scheme", itemFourthCategory.Scheme);
Assert.Equal("category_label", itemFourthCategory.Label);
TextSyndicationContent content = Assert.IsType<TextSyndicationContent>(item.Content);
Assert.Empty(content.AttributeExtensions);
Assert.Equal("overriden", content.Text);
Assert.Equal("text", content.Type);
Assert.Equal(3, item.Contributors.Count);
SyndicationPerson itemFirstContributor = item.Contributors[0];
Assert.Empty(itemFirstContributor.AttributeExtensions);
Assert.Empty(itemFirstContributor.ElementExtensions);
Assert.Null(itemFirstContributor.Email);
Assert.Null(itemFirstContributor.Name);
Assert.Null(itemFirstContributor.Uri);
SyndicationPerson itemSecondContributor = item.Contributors[1];
Assert.Empty(itemSecondContributor.AttributeExtensions);
Assert.Empty(itemSecondContributor.ElementExtensions);
Assert.Null(itemSecondContributor.Email);
Assert.Null(itemSecondContributor.Name);
Assert.Null(itemSecondContributor.Uri);
SyndicationPerson itemThirdContributor = item.Contributors[2];
Assert.Empty(itemThirdContributor.AttributeExtensions);
Assert.Empty(itemThirdContributor.ElementExtensions);
Assert.Equal("contributor_email", itemThirdContributor.Email);
Assert.Equal("contributor_name", itemThirdContributor.Name);
Assert.Equal("contributor_uri", thirdContributor.Uri);
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.Copyright.AttributeExtensions.Count);
Assert.Equal("", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name1")]);
Assert.Equal("", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name2", "copyright_namespace")]);
Assert.Equal("copyright_value", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name3", "copyright_namespace")]);
Assert.Equal("", item.Copyright.AttributeExtensions[new XmlQualifiedName("copyright_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Copyright.AttributeExtensions);
}
Assert.Equal("copyright_title", item.Copyright.Text);
Assert.Equal("html", item.Copyright.Type);
Assert.Empty(item.ElementExtensions);
Assert.Equal("id", item.Id);
Assert.Equal(DateTimeOffset.MinValue, item.LastUpdatedTime);
Assert.Equal(4, item.Links.Count);
SyndicationLink itemFirstLink = item.Links[0];
Assert.Empty(itemFirstLink.AttributeExtensions);
Assert.Empty(itemFirstLink.ElementExtensions);
Assert.Equal(0, itemFirstLink.Length);
Assert.Null(itemFirstLink.MediaType);
Assert.Null(itemFirstLink.RelationshipType);
Assert.Null(itemFirstLink.Title);
Assert.Null(itemFirstLink.Uri);
SyndicationLink itemSecondLink = item.Links[1];
Assert.Empty(itemSecondLink.AttributeExtensions);
Assert.Empty(itemSecondLink.ElementExtensions);
Assert.Equal(0, itemSecondLink.Length);
Assert.Null(itemSecondLink.MediaType);
Assert.Null(itemSecondLink.RelationshipType);
Assert.Null(itemSecondLink.Title);
Assert.Null(itemSecondLink.Uri);
SyndicationLink itemThirdLink = item.Links[2];
Assert.Empty(itemThirdLink.AttributeExtensions);
Assert.Empty(itemThirdLink.ElementExtensions);
Assert.Equal(0, itemThirdLink.Length);
Assert.Null(itemThirdLink.MediaType);
Assert.Null(itemThirdLink.RelationshipType);
Assert.Null(itemThirdLink.Title);
Assert.Empty(itemThirdLink.Uri.OriginalString);
SyndicationLink itemFourthLink = item.Links[3];
if (preserveAttributeExtensions)
{
Assert.Equal(4, itemFourthLink.AttributeExtensions.Count);
Assert.Equal("", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name1")]);
Assert.Equal("", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name2", "link_namespace")]);
Assert.Equal("link_value", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name3", "link_namespace")]);
Assert.Equal("", itemFourthLink.AttributeExtensions[new XmlQualifiedName("link_name4", "xmlns")]);
}
else
{
Assert.Empty(itemFourthLink.AttributeExtensions);
}
Assert.Empty(itemFourthLink.ElementExtensions);
Assert.Equal(new Uri("http://link_url.com"), itemFourthLink.BaseUri);
Assert.Equal(10, itemFourthLink.Length);
Assert.Equal("link_mediaType", itemFourthLink.MediaType);
Assert.Equal("link_relationshipType", itemFourthLink.RelationshipType);
Assert.Equal("link_title", itemFourthLink.Title);
Assert.Equal(new Uri("http://link_uri.com"), itemFourthLink.Uri);
Assert.Equal(DateTimeOffset.MinValue, item.PublishDate);
Assert.Null(item.SourceFeed);
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.Summary.AttributeExtensions.Count);
Assert.Equal("", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name1")]);
Assert.Equal("", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name2", "summary_namespace")]);
Assert.Equal("summary_value", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name3", "summary_namespace")]);
Assert.Equal("", item.Summary.AttributeExtensions[new XmlQualifiedName("summary_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Summary.AttributeExtensions);
}
Assert.Equal("summary_title", item.Summary.Text);
Assert.Equal("html", item.Summary.Type);
if (preserveAttributeExtensions)
{
Assert.Equal(4, item.Title.AttributeExtensions.Count);
Assert.Equal("", item.Title.AttributeExtensions[new XmlQualifiedName("title_name1")]);
Assert.Equal("", item.Title.AttributeExtensions[new XmlQualifiedName("title_name2", "title_namespace")]);
Assert.Equal("title_value", item.Title.AttributeExtensions[new XmlQualifiedName("title_name3", "title_namespace")]);
Assert.Equal("", item.Title.AttributeExtensions[new XmlQualifiedName("title_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Title.AttributeExtensions);
}
Assert.Equal("title_title", item.Title.Text);
Assert.Equal("html", item.Title.Type);
Assert.Equal("language", feed.Language);
Assert.Equal(DateTimeOffset.MinValue.AddYears(1), feed.LastUpdatedTime);
Assert.Equal(4, feed.Links.Count);
SyndicationLink firstLink = feed.Links[0];
Assert.Empty(firstLink.AttributeExtensions);
Assert.Empty(firstLink.ElementExtensions);
Assert.Equal(0, firstLink.Length);
Assert.Null(firstLink.MediaType);
Assert.Null(firstLink.RelationshipType);
Assert.Null(firstLink.Title);
Assert.Null(firstLink.Uri);
SyndicationLink secondLink = feed.Links[1];
Assert.Empty(secondLink.AttributeExtensions);
Assert.Empty(secondLink.ElementExtensions);
Assert.Equal(0, secondLink.Length);
Assert.Null(secondLink.MediaType);
Assert.Null(secondLink.RelationshipType);
Assert.Null(secondLink.Title);
Assert.Null(secondLink.Uri);
SyndicationLink thirdLink = feed.Links[2];
Assert.Empty(thirdLink.AttributeExtensions);
Assert.Empty(thirdLink.ElementExtensions);
Assert.Equal(0, thirdLink.Length);
Assert.Null(thirdLink.MediaType);
Assert.Null(thirdLink.RelationshipType);
Assert.Null(thirdLink.Title);
Assert.Empty(thirdLink.Uri.OriginalString);
SyndicationLink fourthLink = feed.Links[3];
if (preserveAttributeExtensions)
{
Assert.Equal(4, fourthLink.AttributeExtensions.Count);
Assert.Equal("", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name1")]);
Assert.Equal("", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name2", "link_namespace")]);
Assert.Equal("link_value", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name3", "link_namespace")]);
Assert.Equal("", fourthLink.AttributeExtensions[new XmlQualifiedName("link_name4", "xmlns")]);
}
else
{
Assert.Empty(fourthLink.AttributeExtensions);
}
Assert.Empty(fourthLink.ElementExtensions);
Assert.Equal(new Uri("http://link_url.com"), fourthLink.BaseUri);
Assert.Equal(10, fourthLink.Length);
Assert.Equal("link_mediaType", fourthLink.MediaType);
Assert.Equal("link_relationshipType", fourthLink.RelationshipType);
Assert.Equal("link_title", fourthLink.Title);
Assert.Equal(new Uri("http://link_uri.com"), fourthLink.Uri);
if (preserveAttributeExtensions)
{
Assert.Equal(4, feed.Title.AttributeExtensions.Count);
Assert.Equal("", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name1")]);
Assert.Equal("", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name2", "feedtitle_namespace")]);
Assert.Equal("feedtitle_value", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name3", "feedtitle_namespace")]);
Assert.Equal("", feed.Title.AttributeExtensions[new XmlQualifiedName("feedtitle_name4", "xmlns")]);
}
else
{
Assert.Empty(item.Title.AttributeExtensions);
}
Assert.Equal("feedtitle_title", feed.Title.Text);
Assert.Equal("html", feed.Title.Type);
}
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void Read_EmptyItem_ReturnsExpected(bool preserveElementExtensions)
{
VerifyRead(@"<feed xmlns=""http://www.w3.org/2005/Atom""></feed>", preserveElementExtensions, preserveElementExtensions, feed =>
{
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Empty(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
});
}
[Theory]
[InlineData(true, true)]
[InlineData(false, false)]
public void Read_CustomReadItems_ReturnsExpected(bool preserveAttributeExtensions, bool preserveElementExtensions)
{
using (var stringReader = new StringReader(@"<feed xmlns=""http://www.w3.org/2005/Atom""><entry></entry><entry></entry></feed>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new CustomAtom10FeedFormatter()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
formatter.ReadFrom(reader);
SyndicationFeed feed = formatter.Feed;
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Single(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
}
}
private class CustomAtom10FeedFormatter : Atom10FeedFormatter
{
protected override IEnumerable<SyndicationItem> ReadItems(XmlReader reader, SyndicationFeed feed, out bool areAllItemsRead)
{
areAllItemsRead = false;
return new SyndicationItem[] { new SyndicationItem() };
}
}
private static void VerifyRead(string xmlString, bool preserveAttributeExtensions, bool preserveElementExtensions, Action<SyndicationFeed> verifyAction)
{
// ReadFrom.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
formatter.ReadFrom(reader);
verifyAction(formatter.Feed);
}
// ReadXml.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new Atom10FeedFormatter()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
((IXmlSerializable)formatter).ReadXml(reader);
verifyAction(formatter.Feed);
}
// Derived ReadFrom.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter(typeof(SyndicationFeedSubclass))
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
formatter.ReadFrom(reader);
verifyAction(formatter.Feed);
}
// Derived ReadXml.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new Atom10FeedFormatter(typeof(SyndicationFeedSubclass))
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
((IXmlSerializable)formatter).ReadXml(reader);
verifyAction(formatter.Feed);
}
// Generic ReadFrom.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter<SyndicationFeed>()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
formatter.ReadFrom(reader);
verifyAction(formatter.Feed);
}
// Generic ReadXml.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new Atom10FeedFormatter<SyndicationFeed>()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
((IXmlSerializable)formatter).ReadXml(reader);
verifyAction(formatter.Feed);
}
// Generic Derived ReadFrom.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter<SyndicationFeedSubclass>()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
formatter.ReadFrom(reader);
verifyAction(formatter.Feed);
}
// Generic Derived ReadXml.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new Atom10FeedFormatter<SyndicationFeedSubclass>()
{
PreserveAttributeExtensions = preserveAttributeExtensions,
PreserveElementExtensions = preserveElementExtensions
};
((IXmlSerializable)formatter).ReadXml(reader);
verifyAction(formatter.Feed);
}
if (preserveAttributeExtensions && preserveElementExtensions)
{
// Load.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
SyndicationFeed feed = SyndicationFeed.Load(reader);
verifyAction(feed);
}
// Generic Load.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
SyndicationFeed feed = SyndicationFeed.Load<SyndicationFeed>(reader);
verifyAction(feed);
}
}
}
[Fact]
public void ReadFrom_NullReader_ThrowsArgumentNullException()
{
var formatter = new Atom10FeedFormatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.ReadFrom(null));
}
[Fact]
public void ReadFrom_NullCreatedFeed_ThrowsArgumentNullException()
{
using (var stringReader = new StringReader(@"<feed xmlns=""http://www.w3.org/2005/Atom""></feed>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new NullCreatedFeedFormatter();
AssertExtensions.Throws<ArgumentNullException>("feed", () => formatter.ReadFrom(reader));
}
}
[Theory]
[InlineData(@"<different xmlns=""http://www.w3.org/2005/Atom""></different>")]
[InlineData(@"<feed xmlns=""different""></entry>")]
[InlineData(@"<feed></feed>")]
[InlineData(@"<feed/>")]
[InlineData(@"<feed xmlns=""http://www.w3.org/2005/Atom"" />")]
public void ReadFrom_CantRead_ThrowsXmlException(string xmlString)
{
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter();
Assert.Throws<XmlException>(() => formatter.ReadFrom(reader));
}
}
[Theory]
[InlineData("<feed></feed>")]
[InlineData(@"<app:feed xmlns:app=""http://www.w3.org/2005/Atom""></app:feed>")]
[InlineData(@"<feed xmlns=""different""></feed>")]
[InlineData(@"<different xmlns=""http://www.w3.org/2005/Atom""></different>")]
public void ReadXml_ValidReader_Success(string xmlString)
{
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new Atom10FeedFormatter();
((IXmlSerializable)formatter).ReadXml(reader);
SyndicationFeed feed = formatter.Feed;
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Empty(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
}
}
[Fact]
public void ReadXml_NullReader_ThrowsArgumentNullException()
{
IXmlSerializable formatter = new Atom10FeedFormatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.ReadXml(null));
}
[Fact]
public void ReadXml_NullCreatedFeed_ThrowsArgumentNullException()
{
using (var stringReader = new StringReader(@"<entry xmlns=""http://www.w3.org/2005/Atom""></entry>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
IXmlSerializable formatter = new NullCreatedFeedFormatter();
AssertExtensions.Throws<ArgumentNullException>("feed", () => formatter.ReadXml(reader));
}
}
[Theory]
[InlineData("<feed />")]
[InlineData(@"<feed xmlns:app=""http://www.w3.org/2005/Atom"" />")]
[InlineData("<different />")]
public void ReadXml_CantRead_ThrowsXmlException(string xmlString)
{
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
IXmlSerializable formatter = new Atom10FeedFormatter();
Assert.Throws<XmlException>(() => formatter.ReadXml(reader));
}
}
[Fact]
public void ReadXml_ThrowsArgumentException_RethrowsAsXmlException()
{
var reader = new ThrowingXmlReader(new ArgumentException());
IXmlSerializable formatter = new Atom10FeedFormatter();
Assert.Throws<XmlException>(() => formatter.ReadXml(reader));
}
[Fact]
public void ReadXml_ThrowsFormatException_RethrowsAsXmlException()
{
var reader = new ThrowingXmlReader(new FormatException());
IXmlSerializable formatter = new Atom10FeedFormatter();
Assert.Throws<XmlException>(() => formatter.ReadXml(reader));
}
[Fact]
public void ReadItem_ValidItem_ReturnsExpected()
{
using (var stringReader = new StringReader(@"<entry><id xmlns=""http://www.w3.org/2005/Atom"">id</id></entry>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new Formatter();
var feed = new SyndicationFeed();
SyndicationItem item = formatter.ReadItemEntryPoint(reader, feed);
Assert.Equal("id", item.Id);
Assert.Null(item.SourceFeed);
Assert.Empty(feed.Items);
}
}
[Fact]
public void ReadItem_NullReader_ThrowsArgumentNullException()
{
var formatter = new Formatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.ReadItemEntryPoint(null, new SyndicationFeed()));
}
[Fact]
public void ReadItem_NullFeed_ThrowsArgumentNullException()
{
using (var stringReader = new StringReader("<entry></entry>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Formatter();
AssertExtensions.Throws<ArgumentNullException>("feed", () => formatter.ReadItemEntryPoint(reader, null));
}
}
[Fact]
public void ReadItems_ValidItems_ReturnsExpected()
{
using (var stringReader = new StringReader(
@"<parent>
<entry xmlns=""http://www.w3.org/2005/Atom""><id>id1</id></entry>
<entry xmlns=""http://www.w3.org/2005/Atom""><id>id2</id></entry>
<unknown></unknown>
<entry xmlns=""http://www.w3.org/2005/Atom""><id>id3</id></entry>
</parent>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
reader.ReadStartElement();
reader.MoveToElement();
var formatter = new Formatter();
var feed = new SyndicationFeed();
SyndicationItem[] items = formatter.ReadItemsEntryPoint(reader, feed, out var areAllItemsRead).ToArray();
Assert.True(areAllItemsRead);
Assert.Empty(feed.Items);
Assert.Equal(2, items.Length);
Assert.Equal("id1", items[0].Id);
Assert.Null(items[0].SourceFeed);
Assert.Equal("id2", items[1].Id);
Assert.Null(items[1].SourceFeed);
}
}
[Fact]
public void ReadItems_NullReader_ThrowsArgumentNullException()
{
var formatter = new Formatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.ReadItemsEntryPoint(null, new SyndicationFeed(), out var areAllItemsReader));
}
[Fact]
public void ReadItems_NullFeed_ThrowsArgumentNullException()
{
using (var stringReader = new StringReader("<entry></entry>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Formatter();
AssertExtensions.Throws<ArgumentNullException>("feed", () => formatter.ReadItemsEntryPoint(reader, null, out var areAllItemsReader));
}
}
[Theory]
[InlineData("")]
[InlineData("invalid")]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Custom date parsing added in .NET Core changes this behaviour")]
public void Read_InvalidLastUpdatedTime_GetThrowsXmlExcepton(string updated)
{
using (var stringReader = new StringReader(@"<feed xmlns=""http://www.w3.org/2005/Atom""><updated>" + updated + "</updated></feed>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new Atom10FeedFormatter();
formatter.ReadFrom(reader);
Assert.Throws<XmlException>(() => formatter.Feed.LastUpdatedTime);
}
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void PreserveAttributeExtensions_Set_GetReturnsExpected(bool preserveAttributeExtensions)
{
var formatter = new Atom10FeedFormatter() { PreserveAttributeExtensions = preserveAttributeExtensions };
Assert.Equal(preserveAttributeExtensions, formatter.PreserveAttributeExtensions);
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void PreserveElementExtensions_Set_GetReturnsExpected(bool preserveElementExtensions)
{
var formatter = new Atom10FeedFormatter() { PreserveElementExtensions = preserveElementExtensions };
Assert.Equal(preserveElementExtensions, formatter.PreserveElementExtensions);
}
[Fact]
public void CreateFeedInstance_NonGeneric_Success()
{
var formatter = new Formatter();
SyndicationFeed feed = Assert.IsType<SyndicationFeed>(formatter.CreateFeedInstanceEntryPoint());
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Empty(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
var typedFormatter = new Formatter(typeof(SyndicationFeedSubclass));
feed = Assert.IsType<SyndicationFeedSubclass>(typedFormatter.CreateFeedInstanceEntryPoint());
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Empty(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
}
[Fact]
public void CreateItemInstance_Generic_Success()
{
var formatter = new GenericFormatter<SyndicationFeed>();
SyndicationFeed feed = Assert.IsType<SyndicationFeed>(formatter.CreateFeedInstanceEntryPoint());
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Empty(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
var typedFormatter = new GenericFormatter<SyndicationFeedSubclass>();
feed = Assert.IsType<SyndicationFeedSubclass>(typedFormatter.CreateFeedInstanceEntryPoint());
Assert.Empty(feed.AttributeExtensions);
Assert.Empty(feed.Authors);
Assert.Null(feed.BaseUri);
Assert.Empty(feed.Categories);
Assert.Empty(feed.Contributors);
Assert.Null(feed.Copyright);
Assert.Null(feed.Description);
Assert.Empty(feed.ElementExtensions);
Assert.Null(feed.Generator);
Assert.Null(feed.Id);
Assert.Null(feed.ImageUrl);
Assert.Empty(feed.Items);
Assert.Null(feed.Language);
Assert.Equal(DateTimeOffset.MinValue, feed.LastUpdatedTime);
Assert.Empty(feed.Links);
Assert.Null(feed.Title);
}
public class SyndicationFeedSubclass : SyndicationFeed { }
public class SyndicationFeedTryParseTrueSubclass : SyndicationFeed
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
protected override SyndicationCategory CreateCategory() => new SyndicationCategoryTryParseTrueSubclass();
protected override SyndicationItem CreateItem() => new SyndicationItemTryParseTrueSubclass();
protected override SyndicationLink CreateLink() => new SyndicationLinkTryParseTrueSubclass();
protected override SyndicationPerson CreatePerson() => new SyndicationPersonTryParseTrueSubclass();
}
public class SyndicationItemTryParseTrueSubclass : SyndicationItem
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseContent(XmlReader reader, string contentType, string version, out SyndicationContent content)
{
reader.Skip();
content = new TextSyndicationContent("overriden");
return true;
}
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
protected override SyndicationCategory CreateCategory() => new SyndicationCategoryTryParseTrueSubclass();
protected override SyndicationPerson CreatePerson() => new SyndicationPersonTryParseTrueSubclass();
protected override SyndicationLink CreateLink() => new SyndicationLinkTryParseTrueSubclass();
}
public class SyndicationCategoryTryParseTrueSubclass : SyndicationCategory
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
}
public class SyndicationPersonTryParseTrueSubclass : SyndicationPerson
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
}
public class SyndicationLinkTryParseTrueSubclass : SyndicationLink
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
}
public class NullCreatedFeedFormatter : Atom10FeedFormatter
{
protected override SyndicationFeed CreateFeedInstance() => null;
}
public class Formatter : Atom10FeedFormatter
{
public Formatter() : base() { }
public Formatter(SyndicationFeed feedToWrite) : base(feedToWrite) { }
public Formatter(Type feedTypeToCreate) : base(feedTypeToCreate) { }
public Type FeedTypeEntryPoint => FeedType;
public SyndicationFeed CreateFeedInstanceEntryPoint() => CreateFeedInstance();
public void WriteItemEntryPoint(XmlWriter writer, SyndicationItem item, Uri feedBaseUri) => WriteItem(writer, item, feedBaseUri);
public void WriteItemsEntryPoint(XmlWriter writer, IEnumerable<SyndicationItem> items, Uri feedBaseUri) => WriteItems(writer, items, feedBaseUri);
public SyndicationItem ReadItemEntryPoint(XmlReader reader, SyndicationFeed feed) => ReadItem(reader, feed);
public IEnumerable<SyndicationItem> ReadItemsEntryPoint(XmlReader reader, SyndicationFeed feed, out bool areAllItemsRead)
{
return ReadItems(reader, feed, out areAllItemsRead);
}
}
public class GenericFormatter<T> : Atom10FeedFormatter<T> where T : SyndicationFeed, new()
{
public GenericFormatter() : base() { }
public GenericFormatter(T feedToWrite) : base(feedToWrite) { }
public Type FeedTypeEntryPoint => FeedType;
public SyndicationFeed CreateFeedInstanceEntryPoint() => CreateFeedInstance();
}
[DataContract]
public class ExtensionObject
{
[DataMember]
public int Value { get; set; }
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Threading.AccessControl/src/System/Threading/MutexAcl.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Runtime.InteropServices;
using System.Security.AccessControl;
using Microsoft.Win32.SafeHandles;
namespace System.Threading
{
public static class MutexAcl
{
/// <summary>Gets or creates <see cref="Mutex" /> instance, allowing a <see cref="MutexSecurity" /> to be optionally specified to set it during the mutex creation.</summary>
/// <param name="initiallyOwned"><see langword="true" /> to give the calling thread initial ownership of the named system mutex if the named system mutex is created as a result of this call; otherwise, <see langword="false" />.</param>
/// <param name="name">The optional name of the system mutex. If this argument is set to <see langword="null" /> or <see cref="string.Empty" />, a local mutex is created.</param>
/// <param name="createdNew">When this method returns, this argument is always set to <see langword="true" /> if a local mutex is created; that is, when <paramref name="name" /> is <see langword="null" /> or <see cref="string.Empty" />. If <paramref name="name" /> has a valid non-empty value, this argument is set to <see langword="true" /> when the system mutex is created, or it is set to <see langword="false" /> if an existing system mutex is found with that name. This parameter is passed uninitialized.</param>
/// <param name="mutexSecurity">The optional mutex access control security to apply.</param>
/// <returns>An object that represents a system mutex, if named, or a local mutex, if nameless.</returns>
/// <exception cref="ArgumentException">.NET Framework only: The length of the name exceeds the maximum limit.</exception>
/// <exception cref="WaitHandleCannotBeOpenedException">A mutex handle with system-wide <paramref name="name" /> cannot be created. A mutex handle of a different type might have the same name.</exception>
public static unsafe Mutex Create(bool initiallyOwned, string? name, out bool createdNew, MutexSecurity? mutexSecurity)
{
if (mutexSecurity == null)
{
return new Mutex(initiallyOwned, name, out createdNew);
}
uint mutexFlags = initiallyOwned ? Interop.Kernel32.CREATE_MUTEX_INITIAL_OWNER : 0;
fixed (byte* pSecurityDescriptor = mutexSecurity.GetSecurityDescriptorBinaryForm())
{
var secAttrs = new Interop.Kernel32.SECURITY_ATTRIBUTES
{
nLength = (uint)sizeof(Interop.Kernel32.SECURITY_ATTRIBUTES),
lpSecurityDescriptor = (IntPtr)pSecurityDescriptor
};
SafeWaitHandle handle = Interop.Kernel32.CreateMutexEx(
(IntPtr)(&secAttrs),
name,
mutexFlags,
(uint)MutexRights.FullControl // Equivalent to MUTEX_ALL_ACCESS
);
int errorCode = Marshal.GetLastWin32Error();
if (handle.IsInvalid)
{
handle.SetHandleAsInvalid();
if (errorCode == Interop.Errors.ERROR_FILENAME_EXCED_RANGE)
{
throw new ArgumentException(SR.Argument_WaitHandleNameTooLong, nameof(name));
}
if (errorCode == Interop.Errors.ERROR_INVALID_HANDLE)
{
throw new WaitHandleCannotBeOpenedException(SR.Format(SR.Threading_WaitHandleCannotBeOpenedException_InvalidHandle, name));
}
throw Win32Marshal.GetExceptionForWin32Error(errorCode, name);
}
createdNew = (errorCode != Interop.Errors.ERROR_ALREADY_EXISTS);
return CreateAndReplaceHandle(handle);
}
}
/// <summary>
/// Opens a specified named mutex, if it already exists, applying the desired access rights.
/// </summary>
/// <param name="name">The name of the mutex to be opened. If it's prefixed by "Global", it refers to a machine-wide mutex. If it's prefixed by "Local", or doesn't have a prefix, it refers to a session-wide mutex. Both prefix and name are case-sensitive.</param>
/// <param name="rights">The desired access rights to apply to the returned mutex.</param>
/// <returns>An existing named mutex.</returns>
/// <exception cref="ArgumentNullException"><paramref name="name"/> is <see langword="null" />.</exception>
/// <exception cref="ArgumentException"><paramref name="name"/> is an empty string.</exception>
/// <exception cref="WaitHandleCannotBeOpenedException">The named mutex does not exist or is invalid.</exception>
/// <exception cref="DirectoryNotFoundException">The path was not found.</exception>
/// <exception cref="IOException">A Win32 error occurred.</exception>
/// <exception cref="UnauthorizedAccessException">The named mutex exists, but the user does not have the security access required to use it.</exception>
public static Mutex OpenExisting(string name, MutexRights rights)
{
switch (OpenExistingWorker(name, rights, out Mutex? result))
{
case OpenExistingResult.NameNotFound:
throw new WaitHandleCannotBeOpenedException();
case OpenExistingResult.NameInvalid:
throw new WaitHandleCannotBeOpenedException(SR.Format(SR.Threading_WaitHandleCannotBeOpenedException_InvalidHandle, name));
case OpenExistingResult.PathNotFound:
throw new DirectoryNotFoundException(SR.Format(SR.IO_PathNotFound_Path, name));
case OpenExistingResult.Success:
default:
Debug.Assert(result != null, "result should be non-null on success");
return result;
}
}
/// <summary>
/// Tries to open a specified named mutex, if it already exists, applying the desired access rights, and returns a value that indicates whether the operation succeeded.
/// </summary>
/// <param name="name">The name of the mutex to be opened. If it's prefixed by "Global", it refers to a machine-wide mutex. If it's prefixed by "Local", or doesn't have a prefix, it refers to a session-wide mutex. Both prefix and name are case-sensitive.</param>
/// <param name="rights">The desired access rights to apply to the returned mutex.</param>
/// <param name="result">When this method returns <see langword="true" />, contains an object that represents the named mutex if the call succeeded, or <see langword="null" /> otherwise. This parameter is treated as uninitialized.</param>
/// <returns><see langword="true" /> if the named mutex was opened successfully; otherwise, <see langword="false" />.</returns>
/// <exception cref="ArgumentNullException"><paramref name="name"/> is <see langword="null" /></exception>
/// <exception cref="ArgumentException"><paramref name="name"/> is an empty string.</exception>
/// <exception cref="IOException">A Win32 error occurred.</exception>
/// <exception cref="UnauthorizedAccessException">The named mutex exists, but the user does not have the security access required to use it.</exception>
public static bool TryOpenExisting(string name, MutexRights rights, [NotNullWhen(returnValue: true)] out Mutex? result) =>
OpenExistingWorker(name, rights, out result) == OpenExistingResult.Success;
private static OpenExistingResult OpenExistingWorker(string name!!, MutexRights rights, out Mutex? result)
{
if (name.Length == 0)
{
throw new ArgumentException(SR.Argument_EmptyName, nameof(name));
}
result = null;
SafeWaitHandle existingHandle = Interop.Kernel32.OpenMutex((uint)rights, false, name);
int errorCode = Marshal.GetLastWin32Error();
if (existingHandle.IsInvalid)
{
return errorCode switch
{
Interop.Errors.ERROR_FILE_NOT_FOUND or Interop.Errors.ERROR_INVALID_NAME => OpenExistingResult.NameNotFound,
Interop.Errors.ERROR_PATH_NOT_FOUND => OpenExistingResult.PathNotFound,
Interop.Errors.ERROR_INVALID_HANDLE => OpenExistingResult.NameInvalid,
_ => throw Win32Marshal.GetExceptionForWin32Error(errorCode, name)
};
}
result = CreateAndReplaceHandle(existingHandle);
return OpenExistingResult.Success;
}
private static Mutex CreateAndReplaceHandle(SafeWaitHandle replacementHandle)
{
// The value of initiallyOwned should not matter since we are replacing the
// handle with one from an existing Mutex, and disposing the old one
// We should only make sure that it is a valid value
Mutex mutex = new Mutex(initiallyOwned: default);
SafeWaitHandle old = mutex.SafeWaitHandle;
mutex.SafeWaitHandle = replacementHandle;
old.Dispose();
return mutex;
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Runtime.InteropServices;
using System.Security.AccessControl;
using Microsoft.Win32.SafeHandles;
namespace System.Threading
{
public static class MutexAcl
{
/// <summary>Gets or creates <see cref="Mutex" /> instance, allowing a <see cref="MutexSecurity" /> to be optionally specified to set it during the mutex creation.</summary>
/// <param name="initiallyOwned"><see langword="true" /> to give the calling thread initial ownership of the named system mutex if the named system mutex is created as a result of this call; otherwise, <see langword="false" />.</param>
/// <param name="name">The optional name of the system mutex. If this argument is set to <see langword="null" /> or <see cref="string.Empty" />, a local mutex is created.</param>
/// <param name="createdNew">When this method returns, this argument is always set to <see langword="true" /> if a local mutex is created; that is, when <paramref name="name" /> is <see langword="null" /> or <see cref="string.Empty" />. If <paramref name="name" /> has a valid non-empty value, this argument is set to <see langword="true" /> when the system mutex is created, or it is set to <see langword="false" /> if an existing system mutex is found with that name. This parameter is passed uninitialized.</param>
/// <param name="mutexSecurity">The optional mutex access control security to apply.</param>
/// <returns>An object that represents a system mutex, if named, or a local mutex, if nameless.</returns>
/// <exception cref="ArgumentException">.NET Framework only: The length of the name exceeds the maximum limit.</exception>
/// <exception cref="WaitHandleCannotBeOpenedException">A mutex handle with system-wide <paramref name="name" /> cannot be created. A mutex handle of a different type might have the same name.</exception>
public static unsafe Mutex Create(bool initiallyOwned, string? name, out bool createdNew, MutexSecurity? mutexSecurity)
{
if (mutexSecurity == null)
{
return new Mutex(initiallyOwned, name, out createdNew);
}
uint mutexFlags = initiallyOwned ? Interop.Kernel32.CREATE_MUTEX_INITIAL_OWNER : 0;
fixed (byte* pSecurityDescriptor = mutexSecurity.GetSecurityDescriptorBinaryForm())
{
var secAttrs = new Interop.Kernel32.SECURITY_ATTRIBUTES
{
nLength = (uint)sizeof(Interop.Kernel32.SECURITY_ATTRIBUTES),
lpSecurityDescriptor = (IntPtr)pSecurityDescriptor
};
SafeWaitHandle handle = Interop.Kernel32.CreateMutexEx(
(IntPtr)(&secAttrs),
name,
mutexFlags,
(uint)MutexRights.FullControl // Equivalent to MUTEX_ALL_ACCESS
);
int errorCode = Marshal.GetLastWin32Error();
if (handle.IsInvalid)
{
handle.SetHandleAsInvalid();
if (errorCode == Interop.Errors.ERROR_FILENAME_EXCED_RANGE)
{
throw new ArgumentException(SR.Argument_WaitHandleNameTooLong, nameof(name));
}
if (errorCode == Interop.Errors.ERROR_INVALID_HANDLE)
{
throw new WaitHandleCannotBeOpenedException(SR.Format(SR.Threading_WaitHandleCannotBeOpenedException_InvalidHandle, name));
}
throw Win32Marshal.GetExceptionForWin32Error(errorCode, name);
}
createdNew = (errorCode != Interop.Errors.ERROR_ALREADY_EXISTS);
return CreateAndReplaceHandle(handle);
}
}
/// <summary>
/// Opens a specified named mutex, if it already exists, applying the desired access rights.
/// </summary>
/// <param name="name">The name of the mutex to be opened. If it's prefixed by "Global", it refers to a machine-wide mutex. If it's prefixed by "Local", or doesn't have a prefix, it refers to a session-wide mutex. Both prefix and name are case-sensitive.</param>
/// <param name="rights">The desired access rights to apply to the returned mutex.</param>
/// <returns>An existing named mutex.</returns>
/// <exception cref="ArgumentNullException"><paramref name="name"/> is <see langword="null" />.</exception>
/// <exception cref="ArgumentException"><paramref name="name"/> is an empty string.</exception>
/// <exception cref="WaitHandleCannotBeOpenedException">The named mutex does not exist or is invalid.</exception>
/// <exception cref="DirectoryNotFoundException">The path was not found.</exception>
/// <exception cref="IOException">A Win32 error occurred.</exception>
/// <exception cref="UnauthorizedAccessException">The named mutex exists, but the user does not have the security access required to use it.</exception>
public static Mutex OpenExisting(string name, MutexRights rights)
{
switch (OpenExistingWorker(name, rights, out Mutex? result))
{
case OpenExistingResult.NameNotFound:
throw new WaitHandleCannotBeOpenedException();
case OpenExistingResult.NameInvalid:
throw new WaitHandleCannotBeOpenedException(SR.Format(SR.Threading_WaitHandleCannotBeOpenedException_InvalidHandle, name));
case OpenExistingResult.PathNotFound:
throw new DirectoryNotFoundException(SR.Format(SR.IO_PathNotFound_Path, name));
case OpenExistingResult.Success:
default:
Debug.Assert(result != null, "result should be non-null on success");
return result;
}
}
/// <summary>
/// Tries to open a specified named mutex, if it already exists, applying the desired access rights, and returns a value that indicates whether the operation succeeded.
/// </summary>
/// <param name="name">The name of the mutex to be opened. If it's prefixed by "Global", it refers to a machine-wide mutex. If it's prefixed by "Local", or doesn't have a prefix, it refers to a session-wide mutex. Both prefix and name are case-sensitive.</param>
/// <param name="rights">The desired access rights to apply to the returned mutex.</param>
/// <param name="result">When this method returns <see langword="true" />, contains an object that represents the named mutex if the call succeeded, or <see langword="null" /> otherwise. This parameter is treated as uninitialized.</param>
/// <returns><see langword="true" /> if the named mutex was opened successfully; otherwise, <see langword="false" />.</returns>
/// <exception cref="ArgumentNullException"><paramref name="name"/> is <see langword="null" /></exception>
/// <exception cref="ArgumentException"><paramref name="name"/> is an empty string.</exception>
/// <exception cref="IOException">A Win32 error occurred.</exception>
/// <exception cref="UnauthorizedAccessException">The named mutex exists, but the user does not have the security access required to use it.</exception>
public static bool TryOpenExisting(string name, MutexRights rights, [NotNullWhen(returnValue: true)] out Mutex? result) =>
OpenExistingWorker(name, rights, out result) == OpenExistingResult.Success;
private static OpenExistingResult OpenExistingWorker(string name!!, MutexRights rights, out Mutex? result)
{
if (name.Length == 0)
{
throw new ArgumentException(SR.Argument_EmptyName, nameof(name));
}
result = null;
SafeWaitHandle existingHandle = Interop.Kernel32.OpenMutex((uint)rights, false, name);
int errorCode = Marshal.GetLastWin32Error();
if (existingHandle.IsInvalid)
{
return errorCode switch
{
Interop.Errors.ERROR_FILE_NOT_FOUND or Interop.Errors.ERROR_INVALID_NAME => OpenExistingResult.NameNotFound,
Interop.Errors.ERROR_PATH_NOT_FOUND => OpenExistingResult.PathNotFound,
Interop.Errors.ERROR_INVALID_HANDLE => OpenExistingResult.NameInvalid,
_ => throw Win32Marshal.GetExceptionForWin32Error(errorCode, name)
};
}
result = CreateAndReplaceHandle(existingHandle);
return OpenExistingResult.Success;
}
private static Mutex CreateAndReplaceHandle(SafeWaitHandle replacementHandle)
{
// The value of initiallyOwned should not matter since we are replacing the
// handle with one from an existing Mutex, and disposing the old one
// We should only make sure that it is a valid value
Mutex mutex = new Mutex(initiallyOwned: default);
SafeWaitHandle old = mutex.SafeWaitHandle;
mutex.SafeWaitHandle = replacementHandle;
old.Dispose();
return mutex;
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/Loader/classloader/explicitlayout/Regressions/369794/repro369794.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Runtime.InteropServices;
using System;
[StructLayout(LayoutKind.Explicit, Size = 153)]
internal struct A
{
[FieldOffset(0)]
internal bool i;
};
class Test
{
static unsafe int Main(string[] args)
{
int i = sizeof(A);
int j = Marshal.SizeOf(typeof(A));
if (i == 153 && j == 153)
{
Console.WriteLine("PASS");
return 100;
}
else
{
Console.WriteLine("FAIL: sizeof and Marshal.SizeOf should have both returned 153.");
Console.WriteLine("ACTUAL: sizeof(A) = " + i + ", Marshal.SizeOf(A) = " + j);
return 101;
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Runtime.InteropServices;
using System;
[StructLayout(LayoutKind.Explicit, Size = 153)]
internal struct A
{
[FieldOffset(0)]
internal bool i;
};
class Test
{
static unsafe int Main(string[] args)
{
int i = sizeof(A);
int j = Marshal.SizeOf(typeof(A));
if (i == 153 && j == 153)
{
Console.WriteLine("PASS");
return 100;
}
else
{
Console.WriteLine("FAIL: sizeof and Marshal.SizeOf should have both returned 153.");
Console.WriteLine("ACTUAL: sizeof(A) = " + i + ", Marshal.SizeOf(A) = " + j);
return 101;
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/coreclr/tools/aot/ILCompiler.TypeSystem.Tests/RuntimeDeterminedTypesTests.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Internal.TypeSystem;
using Xunit;
namespace TypeSystemTests
{
public class RuntimeDeterminedTypesTests
{
private TestTypeSystemContext _context;
private ModuleDesc _testModule;
private MetadataType _referenceType;
private MetadataType _otherReferenceType;
private MetadataType _structType;
private MetadataType _otherStructType;
private MetadataType _genericReferenceType;
private MetadataType _genericStructType;
private MetadataType _genericReferenceTypeWithThreeParams;
private MetadataType _genericStructTypeWithThreeParams;
public RuntimeDeterminedTypesTests()
{
_context = new TestTypeSystemContext(TargetArchitecture.Unknown);
var systemModule = _context.CreateModuleForSimpleName("CoreTestAssembly");
_context.SetSystemModule(systemModule);
_testModule = systemModule;
_referenceType = _testModule.GetType("Canonicalization", "ReferenceType");
_otherReferenceType = _testModule.GetType("Canonicalization", "OtherReferenceType");
_structType = _testModule.GetType("Canonicalization", "StructType");
_otherStructType = _testModule.GetType("Canonicalization", "OtherStructType");
_genericReferenceType = _testModule.GetType("Canonicalization", "GenericReferenceType`1");
_genericStructType = _testModule.GetType("Canonicalization", "GenericStructType`1");
_genericReferenceTypeWithThreeParams = _testModule.GetType("Canonicalization", "GenericReferenceTypeWithThreeParams`3");
_genericStructTypeWithThreeParams = _testModule.GetType("Canonicalization", "GenericStructTypeWithThreeParams`3");
}
[Fact]
public void TestReferenceTypeConversionToSharedForm()
{
var grtOverRt = _genericReferenceType.MakeInstantiatedType(_referenceType);
var grtOverOtherRt = _genericReferenceType.MakeInstantiatedType(_otherReferenceType);
var grtOverRtShared = grtOverRt.ConvertToSharedRuntimeDeterminedForm();
var grtOverOtherRtShared = grtOverOtherRt.ConvertToSharedRuntimeDeterminedForm();
// GenericReferenceType<ReferenceType> and GenericReferenceType<OtherReferenceType> have the same shared form
Assert.Same(grtOverRtShared, grtOverOtherRtShared);
// The instantiation argument of the shared form is a runtime determined type
var typeArg = grtOverRtShared.Instantiation[0];
Assert.IsType<RuntimeDeterminedType>(typeArg);
// The canonical type used in the shared runtime form is __Canon
var runtimeDeterminedType = (RuntimeDeterminedType)typeArg;
Assert.Same(_context.CanonType, runtimeDeterminedType.CanonicalType);
// The shared runtime form details type is the T from the generic definition
Assert.Same(_genericReferenceType.Instantiation[0], runtimeDeterminedType.RuntimeDeterminedDetailsType);
// Canonical form of GenericReferenceType<T__Canon> is same as canonical form of GenericReferenceType<ReferenceType>
Assert.Same(
grtOverRtShared.ConvertToCanonForm(CanonicalFormKind.Specific),
grtOverRt.ConvertToCanonForm(CanonicalFormKind.Specific));
// GenericReferenceType<ReferenceType> and GenericReferenceType<StructType[]> have the same shared form
var grtOverArray = _genericReferenceType.MakeInstantiatedType(_structType.MakeArrayType());
var grtOverArrayShared = grtOverArray.ConvertToSharedRuntimeDeterminedForm();
Assert.Same(grtOverRtShared, grtOverArrayShared);
// Converting GenericReferenceType<StructType> to shared form is a no-op
var grtOverSt = _genericReferenceType.MakeInstantiatedType(_structType);
var grtOverStShared = grtOverSt.ConvertToSharedRuntimeDeterminedForm();
Assert.Same(grtOverStShared, grtOverSt);
}
[Fact]
public void TestLargeReferenceTypeConversionToSharedForm()
{
var grtOverRtStRt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_referenceType, _structType, _referenceType);
var grtOverRtStOtherRt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_referenceType, _structType, _otherReferenceType);
var grtOverRtStRtShared = grtOverRtStRt.ConvertToSharedRuntimeDeterminedForm();
var grtOverRtStOtherRtShared = grtOverRtStOtherRt.ConvertToSharedRuntimeDeterminedForm();
// GenericReferenceTypeWithThreeParams<ReferenceType, StructType, ReferenceType>
// GenericReferenceTypeWithThreeParams<ReferenceType, StructType, OtherReferenceType>
// have the same shared runtime form.
Assert.Same(grtOverRtStRtShared, grtOverRtStOtherRtShared);
var grtOverStRtSt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_structType, _referenceType, _structType);
var grtOverStOtherRtSt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_structType, _otherReferenceType, _structType);
var grtOverStRtStShared = grtOverStRtSt.ConvertToSharedRuntimeDeterminedForm();
var grtOverStOtherRtStShared = grtOverStOtherRtSt.ConvertToSharedRuntimeDeterminedForm();
// GenericReferenceTypeWithThreeParams<StructType, ReferenceType, StructType>
// GenericReferenceTypeWithThreeParams<StructType, OtherReferenceType, StructType>
// have the same shared runtime form.
Assert.Same(grtOverStRtStShared, grtOverStOtherRtStShared);
// GenericReferenceTypeWithThreeParams<StructType, ReferenceType, StructType>
// GenericReferenceTypeWithThreeParams<StructType, ReferenceType, OtherStructType>
// have different shared runtime form.
var grtOverStRtOtherSt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_structType, _referenceType, _otherStructType);
var grtOverStRtOtherStShared = grtOverStRtOtherSt.ConvertToSharedRuntimeDeterminedForm();
Assert.NotSame(grtOverStRtStShared, grtOverStRtOtherStShared);
}
[Fact]
public void TestUniversalCanonUpgrade()
{
var gstOverUniversalCanon = _genericStructType.MakeInstantiatedType(_context.UniversalCanonType);
var grtOverRtRtStOverUniversal = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_referenceType, _referenceType, gstOverUniversalCanon);
var grtOverRtRtStOverUniversalShared = grtOverRtRtStOverUniversal.ConvertToSharedRuntimeDeterminedForm();
// Shared runtime form of
// GenericReferenceTypeWithThreeParams<ReferenceType, ReferenceType, GenericStructType<__UniversalCanon>> is
// GenericReferenceTypeWithThreeParams<T__UniversalCanon, U__UniversalCanon, V__UniversalCanon>
var arg0 = grtOverRtRtStOverUniversalShared.Instantiation[0];
Assert.IsType<RuntimeDeterminedType>(arg0);
Assert.Same(_context.UniversalCanonType, ((RuntimeDeterminedType)arg0).CanonicalType);
var arg2 = grtOverRtRtStOverUniversalShared.Instantiation[2];
Assert.IsType<RuntimeDeterminedType>(arg2);
Assert.Same(_context.UniversalCanonType, ((RuntimeDeterminedType)arg2).CanonicalType);
}
[Fact]
public void TestSignatureInstantiation()
{
var grtOverRtStRt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_referenceType, _structType, _referenceType);
var grtOverRtStRtShared = grtOverRtStRt.ConvertToSharedRuntimeDeterminedForm();
// GenericReferenceTypeWithThreeParams<T__Canon, StructType, V__Canon> substituted over
// an instantiation of <ReferenceType, StructType, OtherReferenceType> is
// GenericReferenceTypeWithThreeParams<ReferenceType, StructType, OtherReferenceType>
var grtOverRtStRtSharedInstantiated = grtOverRtStRtShared.GetNonRuntimeDeterminedTypeFromRuntimeDeterminedSubtypeViaSubstitution(
new Instantiation(_referenceType, _structType, _otherReferenceType),
Instantiation.Empty);
var grtOverRtStOtherRt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_referenceType, _structType, _otherReferenceType);
Assert.Same(grtOverRtStOtherRt, grtOverRtStRtSharedInstantiated);
}
[Fact]
public void TestInstantiationOverStructOverCanon()
{
var stOverCanon = _genericStructType.MakeInstantiatedType(_context.CanonType);
var grtOverStOverCanon = _genericReferenceType.MakeInstantiatedType(
stOverCanon);
var grtOverStOverCanonShared = grtOverStOverCanon.ConvertToSharedRuntimeDeterminedForm();
// GenericReferenceType<GenericStructType<__Canon>> converts to
// GenericReferenceType<T__GenericStructType<__Canon>>
var typeArg = grtOverStOverCanonShared.Instantiation[0];
Assert.IsType<RuntimeDeterminedType>(typeArg);
var runtimeDeterminedType = (RuntimeDeterminedType)typeArg;
Assert.Same(stOverCanon, runtimeDeterminedType.CanonicalType);
Assert.Same(_genericReferenceType.Instantiation[0], runtimeDeterminedType.RuntimeDeterminedDetailsType);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Internal.TypeSystem;
using Xunit;
namespace TypeSystemTests
{
public class RuntimeDeterminedTypesTests
{
private TestTypeSystemContext _context;
private ModuleDesc _testModule;
private MetadataType _referenceType;
private MetadataType _otherReferenceType;
private MetadataType _structType;
private MetadataType _otherStructType;
private MetadataType _genericReferenceType;
private MetadataType _genericStructType;
private MetadataType _genericReferenceTypeWithThreeParams;
private MetadataType _genericStructTypeWithThreeParams;
public RuntimeDeterminedTypesTests()
{
_context = new TestTypeSystemContext(TargetArchitecture.Unknown);
var systemModule = _context.CreateModuleForSimpleName("CoreTestAssembly");
_context.SetSystemModule(systemModule);
_testModule = systemModule;
_referenceType = _testModule.GetType("Canonicalization", "ReferenceType");
_otherReferenceType = _testModule.GetType("Canonicalization", "OtherReferenceType");
_structType = _testModule.GetType("Canonicalization", "StructType");
_otherStructType = _testModule.GetType("Canonicalization", "OtherStructType");
_genericReferenceType = _testModule.GetType("Canonicalization", "GenericReferenceType`1");
_genericStructType = _testModule.GetType("Canonicalization", "GenericStructType`1");
_genericReferenceTypeWithThreeParams = _testModule.GetType("Canonicalization", "GenericReferenceTypeWithThreeParams`3");
_genericStructTypeWithThreeParams = _testModule.GetType("Canonicalization", "GenericStructTypeWithThreeParams`3");
}
[Fact]
public void TestReferenceTypeConversionToSharedForm()
{
var grtOverRt = _genericReferenceType.MakeInstantiatedType(_referenceType);
var grtOverOtherRt = _genericReferenceType.MakeInstantiatedType(_otherReferenceType);
var grtOverRtShared = grtOverRt.ConvertToSharedRuntimeDeterminedForm();
var grtOverOtherRtShared = grtOverOtherRt.ConvertToSharedRuntimeDeterminedForm();
// GenericReferenceType<ReferenceType> and GenericReferenceType<OtherReferenceType> have the same shared form
Assert.Same(grtOverRtShared, grtOverOtherRtShared);
// The instantiation argument of the shared form is a runtime determined type
var typeArg = grtOverRtShared.Instantiation[0];
Assert.IsType<RuntimeDeterminedType>(typeArg);
// The canonical type used in the shared runtime form is __Canon
var runtimeDeterminedType = (RuntimeDeterminedType)typeArg;
Assert.Same(_context.CanonType, runtimeDeterminedType.CanonicalType);
// The shared runtime form details type is the T from the generic definition
Assert.Same(_genericReferenceType.Instantiation[0], runtimeDeterminedType.RuntimeDeterminedDetailsType);
// Canonical form of GenericReferenceType<T__Canon> is same as canonical form of GenericReferenceType<ReferenceType>
Assert.Same(
grtOverRtShared.ConvertToCanonForm(CanonicalFormKind.Specific),
grtOverRt.ConvertToCanonForm(CanonicalFormKind.Specific));
// GenericReferenceType<ReferenceType> and GenericReferenceType<StructType[]> have the same shared form
var grtOverArray = _genericReferenceType.MakeInstantiatedType(_structType.MakeArrayType());
var grtOverArrayShared = grtOverArray.ConvertToSharedRuntimeDeterminedForm();
Assert.Same(grtOverRtShared, grtOverArrayShared);
// Converting GenericReferenceType<StructType> to shared form is a no-op
var grtOverSt = _genericReferenceType.MakeInstantiatedType(_structType);
var grtOverStShared = grtOverSt.ConvertToSharedRuntimeDeterminedForm();
Assert.Same(grtOverStShared, grtOverSt);
}
[Fact]
public void TestLargeReferenceTypeConversionToSharedForm()
{
var grtOverRtStRt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_referenceType, _structType, _referenceType);
var grtOverRtStOtherRt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_referenceType, _structType, _otherReferenceType);
var grtOverRtStRtShared = grtOverRtStRt.ConvertToSharedRuntimeDeterminedForm();
var grtOverRtStOtherRtShared = grtOverRtStOtherRt.ConvertToSharedRuntimeDeterminedForm();
// GenericReferenceTypeWithThreeParams<ReferenceType, StructType, ReferenceType>
// GenericReferenceTypeWithThreeParams<ReferenceType, StructType, OtherReferenceType>
// have the same shared runtime form.
Assert.Same(grtOverRtStRtShared, grtOverRtStOtherRtShared);
var grtOverStRtSt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_structType, _referenceType, _structType);
var grtOverStOtherRtSt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_structType, _otherReferenceType, _structType);
var grtOverStRtStShared = grtOverStRtSt.ConvertToSharedRuntimeDeterminedForm();
var grtOverStOtherRtStShared = grtOverStOtherRtSt.ConvertToSharedRuntimeDeterminedForm();
// GenericReferenceTypeWithThreeParams<StructType, ReferenceType, StructType>
// GenericReferenceTypeWithThreeParams<StructType, OtherReferenceType, StructType>
// have the same shared runtime form.
Assert.Same(grtOverStRtStShared, grtOverStOtherRtStShared);
// GenericReferenceTypeWithThreeParams<StructType, ReferenceType, StructType>
// GenericReferenceTypeWithThreeParams<StructType, ReferenceType, OtherStructType>
// have different shared runtime form.
var grtOverStRtOtherSt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_structType, _referenceType, _otherStructType);
var grtOverStRtOtherStShared = grtOverStRtOtherSt.ConvertToSharedRuntimeDeterminedForm();
Assert.NotSame(grtOverStRtStShared, grtOverStRtOtherStShared);
}
[Fact]
public void TestUniversalCanonUpgrade()
{
var gstOverUniversalCanon = _genericStructType.MakeInstantiatedType(_context.UniversalCanonType);
var grtOverRtRtStOverUniversal = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_referenceType, _referenceType, gstOverUniversalCanon);
var grtOverRtRtStOverUniversalShared = grtOverRtRtStOverUniversal.ConvertToSharedRuntimeDeterminedForm();
// Shared runtime form of
// GenericReferenceTypeWithThreeParams<ReferenceType, ReferenceType, GenericStructType<__UniversalCanon>> is
// GenericReferenceTypeWithThreeParams<T__UniversalCanon, U__UniversalCanon, V__UniversalCanon>
var arg0 = grtOverRtRtStOverUniversalShared.Instantiation[0];
Assert.IsType<RuntimeDeterminedType>(arg0);
Assert.Same(_context.UniversalCanonType, ((RuntimeDeterminedType)arg0).CanonicalType);
var arg2 = grtOverRtRtStOverUniversalShared.Instantiation[2];
Assert.IsType<RuntimeDeterminedType>(arg2);
Assert.Same(_context.UniversalCanonType, ((RuntimeDeterminedType)arg2).CanonicalType);
}
[Fact]
public void TestSignatureInstantiation()
{
var grtOverRtStRt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_referenceType, _structType, _referenceType);
var grtOverRtStRtShared = grtOverRtStRt.ConvertToSharedRuntimeDeterminedForm();
// GenericReferenceTypeWithThreeParams<T__Canon, StructType, V__Canon> substituted over
// an instantiation of <ReferenceType, StructType, OtherReferenceType> is
// GenericReferenceTypeWithThreeParams<ReferenceType, StructType, OtherReferenceType>
var grtOverRtStRtSharedInstantiated = grtOverRtStRtShared.GetNonRuntimeDeterminedTypeFromRuntimeDeterminedSubtypeViaSubstitution(
new Instantiation(_referenceType, _structType, _otherReferenceType),
Instantiation.Empty);
var grtOverRtStOtherRt = _genericReferenceTypeWithThreeParams.MakeInstantiatedType(
_referenceType, _structType, _otherReferenceType);
Assert.Same(grtOverRtStOtherRt, grtOverRtStRtSharedInstantiated);
}
[Fact]
public void TestInstantiationOverStructOverCanon()
{
var stOverCanon = _genericStructType.MakeInstantiatedType(_context.CanonType);
var grtOverStOverCanon = _genericReferenceType.MakeInstantiatedType(
stOverCanon);
var grtOverStOverCanonShared = grtOverStOverCanon.ConvertToSharedRuntimeDeterminedForm();
// GenericReferenceType<GenericStructType<__Canon>> converts to
// GenericReferenceType<T__GenericStructType<__Canon>>
var typeArg = grtOverStOverCanonShared.Instantiation[0];
Assert.IsType<RuntimeDeterminedType>(typeArg);
var runtimeDeterminedType = (RuntimeDeterminedType)typeArg;
Assert.Same(stOverCanon, runtimeDeterminedType.CanonicalType);
Assert.Same(_genericReferenceType.Instantiation[0], runtimeDeterminedType.RuntimeDeterminedDetailsType);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/coreclr/tools/aot/ILCompiler.Compiler/Compiler/FeatureSettings.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Xml.XPath;
using ILLink.Shared;
using Internal.TypeSystem;
namespace ILCompiler
{
public static class FeatureSettings
{
public static bool ShouldProcessElement(XPathNavigator nav, IReadOnlyDictionary<string, bool> featureSwitchValues)
{
var feature = GetAttribute(nav, "feature");
if (string.IsNullOrEmpty(feature))
return true;
var value = GetAttribute(nav, "featurevalue");
if (string.IsNullOrEmpty(value))
{
//context.LogError(null, DiagnosticId.XmlFeatureDoesNotSpecifyFeatureValue, documentLocation, feature);
return false;
}
if (!bool.TryParse(value, out bool bValue))
{
//context.LogError(null, DiagnosticId.XmlUnsupportedNonBooleanValueForFeature, documentLocation, feature);
return false;
}
var isDefault = GetAttribute(nav, "featuredefault");
bool bIsDefault = false;
if (!string.IsNullOrEmpty(isDefault) && (!bool.TryParse(isDefault, out bIsDefault) || !bIsDefault))
{
//context.LogError(null, DiagnosticId.XmlDocumentLocationHasInvalidFeatureDefault, documentLocation);
return false;
}
if (!featureSwitchValues.TryGetValue(feature, out bool featureSetting))
return bIsDefault;
return bValue == featureSetting;
}
public static string GetAttribute(XPathNavigator nav, string attribute)
{
return nav.GetAttribute(attribute, String.Empty);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Xml.XPath;
using ILLink.Shared;
using Internal.TypeSystem;
namespace ILCompiler
{
public static class FeatureSettings
{
public static bool ShouldProcessElement(XPathNavigator nav, IReadOnlyDictionary<string, bool> featureSwitchValues)
{
var feature = GetAttribute(nav, "feature");
if (string.IsNullOrEmpty(feature))
return true;
var value = GetAttribute(nav, "featurevalue");
if (string.IsNullOrEmpty(value))
{
//context.LogError(null, DiagnosticId.XmlFeatureDoesNotSpecifyFeatureValue, documentLocation, feature);
return false;
}
if (!bool.TryParse(value, out bool bValue))
{
//context.LogError(null, DiagnosticId.XmlUnsupportedNonBooleanValueForFeature, documentLocation, feature);
return false;
}
var isDefault = GetAttribute(nav, "featuredefault");
bool bIsDefault = false;
if (!string.IsNullOrEmpty(isDefault) && (!bool.TryParse(isDefault, out bIsDefault) || !bIsDefault))
{
//context.LogError(null, DiagnosticId.XmlDocumentLocationHasInvalidFeatureDefault, documentLocation);
return false;
}
if (!featureSwitchValues.TryGetValue(feature, out bool featureSetting))
return bIsDefault;
return bValue == featureSetting;
}
public static string GetAttribute(XPathNavigator nav, string attribute)
{
return nav.GetAttribute(attribute, String.Empty);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/coreclr/nativeaot/System.Private.CoreLib/src/Internal/Runtime/CompilerHelpers/LdTokenHelpers.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using Internal.Reflection.Core.NonPortable;
namespace Internal.Runtime.CompilerHelpers
{
/// <summary>
/// These methods are used to implement ldtoken instruction.
/// </summary>
internal static class LdTokenHelpers
{
private static RuntimeTypeHandle GetRuntimeTypeHandle(IntPtr pEEType)
{
return new RuntimeTypeHandle(new EETypePtr(pEEType));
}
private static unsafe RuntimeMethodHandle GetRuntimeMethodHandle(IntPtr pHandleSignature)
{
RuntimeMethodHandle returnValue;
*(IntPtr*)&returnValue = pHandleSignature;
return returnValue;
}
private static unsafe RuntimeFieldHandle GetRuntimeFieldHandle(IntPtr pHandleSignature)
{
RuntimeFieldHandle returnValue;
*(IntPtr*)&returnValue = pHandleSignature;
return returnValue;
}
private static Type GetRuntimeType(IntPtr pEEType)
{
return Type.GetTypeFromEETypePtr(new EETypePtr(pEEType));
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using Internal.Reflection.Core.NonPortable;
namespace Internal.Runtime.CompilerHelpers
{
/// <summary>
/// These methods are used to implement ldtoken instruction.
/// </summary>
internal static class LdTokenHelpers
{
private static RuntimeTypeHandle GetRuntimeTypeHandle(IntPtr pEEType)
{
return new RuntimeTypeHandle(new EETypePtr(pEEType));
}
private static unsafe RuntimeMethodHandle GetRuntimeMethodHandle(IntPtr pHandleSignature)
{
RuntimeMethodHandle returnValue;
*(IntPtr*)&returnValue = pHandleSignature;
return returnValue;
}
private static unsafe RuntimeFieldHandle GetRuntimeFieldHandle(IntPtr pHandleSignature)
{
RuntimeFieldHandle returnValue;
*(IntPtr*)&returnValue = pHandleSignature;
return returnValue;
}
private static Type GetRuntimeType(IntPtr pEEType)
{
return Type.GetTypeFromEETypePtr(new EETypePtr(pEEType));
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/Common/src/Interop/Unix/System.Native/Interop.Poll.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Runtime.InteropServices;
using Microsoft.Win32.SafeHandles;
internal static partial class Interop
{
internal static partial class Sys
{
/// <summary>
/// Polls a set of file descriptors for signals and returns what signals have been set
/// </summary>
/// <param name="pollEvents">A list of PollEvent entries</param>
/// <param name="eventCount">The number of entries in pollEvents</param>
/// <param name="timeout">The amount of time to wait; -1 for infinite, 0 for immediate return, and a positive number is the number of milliseconds</param>
/// <param name="triggered">The number of events triggered (i.e. the number of entries in pollEvents with a non-zero TriggeredEvents). May be zero in the event of a timeout.</param>
/// <returns>An error or Error.SUCCESS.</returns>
[LibraryImport(Libraries.SystemNative, EntryPoint = "SystemNative_Poll")]
internal static unsafe partial Error Poll(PollEvent* pollEvents, uint eventCount, int timeout, uint* triggered);
/// <summary>
/// Polls a File Descriptor for the passed in flags.
/// </summary>
/// <param name="fd">The descriptor to poll</param>
/// <param name="events">The events to poll for</param>
/// <param name="timeout">The amount of time to wait; -1 for infinite, 0 for immediate return, and a positive number is the number of milliseconds</param>
/// <param name="triggered">The events that were returned by the poll call. May be PollEvents.POLLNONE in the case of a timeout.</param>
/// <returns>An error or Error.SUCCESS.</returns>
internal static unsafe Error Poll(SafeHandle fd, PollEvents events, int timeout, out PollEvents triggered)
{
bool gotRef = false;
try
{
fd.DangerousAddRef(ref gotRef);
var pollEvent = new PollEvent
{
FileDescriptor = fd.DangerousGetHandle().ToInt32(),
Events = events,
};
uint unused;
Error err = Poll(&pollEvent, 1, timeout, &unused);
triggered = pollEvent.TriggeredEvents;
return err;
}
finally
{
if (gotRef)
{
fd.DangerousRelease();
}
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Runtime.InteropServices;
using Microsoft.Win32.SafeHandles;
internal static partial class Interop
{
internal static partial class Sys
{
/// <summary>
/// Polls a set of file descriptors for signals and returns what signals have been set
/// </summary>
/// <param name="pollEvents">A list of PollEvent entries</param>
/// <param name="eventCount">The number of entries in pollEvents</param>
/// <param name="timeout">The amount of time to wait; -1 for infinite, 0 for immediate return, and a positive number is the number of milliseconds</param>
/// <param name="triggered">The number of events triggered (i.e. the number of entries in pollEvents with a non-zero TriggeredEvents). May be zero in the event of a timeout.</param>
/// <returns>An error or Error.SUCCESS.</returns>
[LibraryImport(Libraries.SystemNative, EntryPoint = "SystemNative_Poll")]
internal static unsafe partial Error Poll(PollEvent* pollEvents, uint eventCount, int timeout, uint* triggered);
/// <summary>
/// Polls a File Descriptor for the passed in flags.
/// </summary>
/// <param name="fd">The descriptor to poll</param>
/// <param name="events">The events to poll for</param>
/// <param name="timeout">The amount of time to wait; -1 for infinite, 0 for immediate return, and a positive number is the number of milliseconds</param>
/// <param name="triggered">The events that were returned by the poll call. May be PollEvents.POLLNONE in the case of a timeout.</param>
/// <returns>An error or Error.SUCCESS.</returns>
internal static unsafe Error Poll(SafeHandle fd, PollEvents events, int timeout, out PollEvents triggered)
{
bool gotRef = false;
try
{
fd.DangerousAddRef(ref gotRef);
var pollEvent = new PollEvent
{
FileDescriptor = fd.DangerousGetHandle().ToInt32(),
Events = events,
};
uint unused;
Error err = Poll(&pollEvent, 1, timeout, &unused);
triggered = pollEvent.TriggeredEvents;
return err;
}
finally
{
if (gotRef)
{
fd.DangerousRelease();
}
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Private.Xml/src/System/Xml/Schema/XmlSchemaAll.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.Xml.Schema
{
using System.Collections;
using System.Xml.Serialization;
public class XmlSchemaAll : XmlSchemaGroupBase
{
private XmlSchemaObjectCollection _items = new XmlSchemaObjectCollection();
[XmlElement("element", typeof(XmlSchemaElement))]
public override XmlSchemaObjectCollection Items
{
get { return _items; }
}
internal override bool IsEmpty
{
get { return base.IsEmpty || _items.Count == 0; }
}
internal override void SetItems(XmlSchemaObjectCollection newItems)
{
_items = newItems;
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.Xml.Schema
{
using System.Collections;
using System.Xml.Serialization;
public class XmlSchemaAll : XmlSchemaGroupBase
{
private XmlSchemaObjectCollection _items = new XmlSchemaObjectCollection();
[XmlElement("element", typeof(XmlSchemaElement))]
public override XmlSchemaObjectCollection Items
{
get { return _items; }
}
internal override bool IsEmpty
{
get { return base.IsEmpty || _items.Count == 0; }
}
internal override void SetItems(XmlSchemaObjectCollection newItems)
{
_items = newItems;
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/General/Vector256/Widen.UInt16.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace JIT.HardwareIntrinsics.General
{
public static partial class Program
{
private static void WidenUInt16()
{
var test = new VectorWidenTest__WidenUInt16();
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class VectorWidenTest__WidenUInt16
{
private struct DataTable
{
private byte[] inArray1;
private byte[] outLowerArray;
private byte[] outUpperArray;
private GCHandle inHandle1;
private GCHandle outLowerHandle;
private GCHandle outUpperHandle;
private ulong alignment;
public DataTable(Byte[] inArray1, UInt16[] outLowerArray, UInt16[] outUpperArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Byte>();
int sizeOfoutLowerArray = outLowerArray.Length * Unsafe.SizeOf<UInt16>();
int sizeOfoutUpperArray = outUpperArray.Length * Unsafe.SizeOf<UInt16>();
if ((alignment != 32 && alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfoutLowerArray|| (alignment * 2) < sizeOfoutUpperArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.outLowerArray = new byte[alignment * 2];
this.outUpperArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.outLowerHandle = GCHandle.Alloc(this.outLowerArray, GCHandleType.Pinned);
this.outUpperHandle = GCHandle.Alloc(this.outUpperArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Byte, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* outLowerArrayPtr => Align((byte*)(outLowerHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void* outUpperArrayPtr => Align((byte*)(outUpperHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
outLowerHandle.Free();
outUpperHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector256<Byte> _fld1;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Byte>, byte>(ref testStruct._fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Byte>>());
return testStruct;
}
public void RunStructFldScenario(VectorWidenTest__WidenUInt16 testClass)
{
var result = Vector256.Widen(_fld1);
Unsafe.Write(testClass._dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(testClass._dataTable.outUpperArrayPtr, result.Upper);
testClass.ValidateResult(_fld1, testClass._dataTable.outLowerArrayPtr, testClass._dataTable.outUpperArrayPtr);
}
}
private static readonly int LargestVectorSize = 32;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<Byte>>() / sizeof(Byte);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<UInt16>>() / sizeof(UInt16);
private static Byte[] _data1 = new Byte[Op1ElementCount];
private static Vector256<Byte> _clsVar1;
private Vector256<Byte> _fld1;
private DataTable _dataTable;
static VectorWidenTest__WidenUInt16()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Byte>, byte>(ref _clsVar1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Byte>>());
}
public VectorWidenTest__WidenUInt16()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Byte>, byte>(ref _fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Byte>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
_dataTable = new DataTable(_data1, new UInt16[RetElementCount], new UInt16[RetElementCount], LargestVectorSize);
}
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Vector256.Widen(
Unsafe.Read<Vector256<Byte>>(_dataTable.inArray1Ptr)
);
Unsafe.Write(_dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, result.Upper);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var method = typeof(Vector256).GetMethod(nameof(Vector256.Widen), new Type[] {
typeof(Vector256<Byte>)
});
if (method is null)
{
method = typeof(Vector256).GetMethod(nameof(Vector256.Widen), 1, new Type[] {
typeof(Vector256<>).MakeGenericType(Type.MakeGenericMethodParameter(0))
});
}
if (method.IsGenericMethodDefinition)
{
method = method.MakeGenericMethod(typeof(UInt16));
}
var result = method.Invoke(null, new object[] {
Unsafe.Read<Vector256<Byte>>(_dataTable.inArray1Ptr)
});
Unsafe.Write(_dataTable.outLowerArrayPtr, (((Vector256<UInt16> Lower, Vector256<UInt16> Upper))(result)).Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, (((Vector256<UInt16> Lower, Vector256<UInt16> Upper))(result)).Upper);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Vector256.Widen(
_clsVar1
);
Unsafe.Write(_dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, result.Upper);
ValidateResult(_clsVar1, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector256<Byte>>(_dataTable.inArray1Ptr);
var result = Vector256.Widen(op1);
Unsafe.Write(_dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, result.Upper);
ValidateResult(op1, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new VectorWidenTest__WidenUInt16();
var result = Vector256.Widen(test._fld1);
Unsafe.Write(_dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, result.Upper);
ValidateResult(test._fld1, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Vector256.Widen(_fld1);
Unsafe.Write(_dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, result.Upper);
ValidateResult(_fld1, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Vector256.Widen(test._fld1);
Unsafe.Write(_dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, result.Upper);
ValidateResult(test._fld1, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
private void ValidateResult(Vector256<Byte> op1, void* lowerResult, void* upperResult, [CallerMemberName] string method = "")
{
Byte[] inArray1 = new Byte[Op1ElementCount];
UInt16[] outLowerArray = new UInt16[RetElementCount];
UInt16[] outUpperArray = new UInt16[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), op1);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outLowerArray[0]), ref Unsafe.AsRef<byte>(lowerResult), (uint)Unsafe.SizeOf<Vector256<UInt16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outUpperArray[0]), ref Unsafe.AsRef<byte>(upperResult), (uint)Unsafe.SizeOf<Vector256<UInt16>>());
ValidateResult(inArray1, outLowerArray, outUpperArray, method);
}
private void ValidateResult(void* op1, void* lowerResult, void* upperResult, [CallerMemberName] string method = "")
{
Byte[] inArray1 = new Byte[Op1ElementCount];
UInt16[] outLowerArray = new UInt16[RetElementCount];
UInt16[] outUpperArray = new UInt16[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector256<Byte>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outLowerArray[0]), ref Unsafe.AsRef<byte>(lowerResult), (uint)Unsafe.SizeOf<Vector256<UInt16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outUpperArray[0]), ref Unsafe.AsRef<byte>(upperResult), (uint)Unsafe.SizeOf<Vector256<UInt16>>());
ValidateResult(inArray1, outLowerArray, outUpperArray, method);
}
private void ValidateResult(Byte[] firstOp, UInt16[] lowerResult, UInt16[] upperResult, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (lowerResult[i] != (ushort)(firstOp[i]))
{
succeeded = false;
break;
}
}
for (var i = 0; i < RetElementCount; i++)
{
if (upperResult[i] != (ushort)(firstOp[i + RetElementCount]))
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Vector256)}.{nameof(Vector256.Widen)}<UInt16>(Vector256<Byte>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" lowerResult: ({string.Join(", ", lowerResult)})");
TestLibrary.TestFramework.LogInformation($" upperResult: ({string.Join(", ", upperResult)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace JIT.HardwareIntrinsics.General
{
public static partial class Program
{
private static void WidenUInt16()
{
var test = new VectorWidenTest__WidenUInt16();
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class VectorWidenTest__WidenUInt16
{
private struct DataTable
{
private byte[] inArray1;
private byte[] outLowerArray;
private byte[] outUpperArray;
private GCHandle inHandle1;
private GCHandle outLowerHandle;
private GCHandle outUpperHandle;
private ulong alignment;
public DataTable(Byte[] inArray1, UInt16[] outLowerArray, UInt16[] outUpperArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Byte>();
int sizeOfoutLowerArray = outLowerArray.Length * Unsafe.SizeOf<UInt16>();
int sizeOfoutUpperArray = outUpperArray.Length * Unsafe.SizeOf<UInt16>();
if ((alignment != 32 && alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfoutLowerArray|| (alignment * 2) < sizeOfoutUpperArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.outLowerArray = new byte[alignment * 2];
this.outUpperArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.outLowerHandle = GCHandle.Alloc(this.outLowerArray, GCHandleType.Pinned);
this.outUpperHandle = GCHandle.Alloc(this.outUpperArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Byte, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* outLowerArrayPtr => Align((byte*)(outLowerHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void* outUpperArrayPtr => Align((byte*)(outUpperHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
outLowerHandle.Free();
outUpperHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector256<Byte> _fld1;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Byte>, byte>(ref testStruct._fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Byte>>());
return testStruct;
}
public void RunStructFldScenario(VectorWidenTest__WidenUInt16 testClass)
{
var result = Vector256.Widen(_fld1);
Unsafe.Write(testClass._dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(testClass._dataTable.outUpperArrayPtr, result.Upper);
testClass.ValidateResult(_fld1, testClass._dataTable.outLowerArrayPtr, testClass._dataTable.outUpperArrayPtr);
}
}
private static readonly int LargestVectorSize = 32;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<Byte>>() / sizeof(Byte);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<UInt16>>() / sizeof(UInt16);
private static Byte[] _data1 = new Byte[Op1ElementCount];
private static Vector256<Byte> _clsVar1;
private Vector256<Byte> _fld1;
private DataTable _dataTable;
static VectorWidenTest__WidenUInt16()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Byte>, byte>(ref _clsVar1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Byte>>());
}
public VectorWidenTest__WidenUInt16()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Byte>, byte>(ref _fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Byte>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
_dataTable = new DataTable(_data1, new UInt16[RetElementCount], new UInt16[RetElementCount], LargestVectorSize);
}
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Vector256.Widen(
Unsafe.Read<Vector256<Byte>>(_dataTable.inArray1Ptr)
);
Unsafe.Write(_dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, result.Upper);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var method = typeof(Vector256).GetMethod(nameof(Vector256.Widen), new Type[] {
typeof(Vector256<Byte>)
});
if (method is null)
{
method = typeof(Vector256).GetMethod(nameof(Vector256.Widen), 1, new Type[] {
typeof(Vector256<>).MakeGenericType(Type.MakeGenericMethodParameter(0))
});
}
if (method.IsGenericMethodDefinition)
{
method = method.MakeGenericMethod(typeof(UInt16));
}
var result = method.Invoke(null, new object[] {
Unsafe.Read<Vector256<Byte>>(_dataTable.inArray1Ptr)
});
Unsafe.Write(_dataTable.outLowerArrayPtr, (((Vector256<UInt16> Lower, Vector256<UInt16> Upper))(result)).Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, (((Vector256<UInt16> Lower, Vector256<UInt16> Upper))(result)).Upper);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Vector256.Widen(
_clsVar1
);
Unsafe.Write(_dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, result.Upper);
ValidateResult(_clsVar1, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector256<Byte>>(_dataTable.inArray1Ptr);
var result = Vector256.Widen(op1);
Unsafe.Write(_dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, result.Upper);
ValidateResult(op1, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new VectorWidenTest__WidenUInt16();
var result = Vector256.Widen(test._fld1);
Unsafe.Write(_dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, result.Upper);
ValidateResult(test._fld1, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Vector256.Widen(_fld1);
Unsafe.Write(_dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, result.Upper);
ValidateResult(_fld1, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Vector256.Widen(test._fld1);
Unsafe.Write(_dataTable.outLowerArrayPtr, result.Lower);
Unsafe.Write(_dataTable.outUpperArrayPtr, result.Upper);
ValidateResult(test._fld1, _dataTable.outLowerArrayPtr, _dataTable.outUpperArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
private void ValidateResult(Vector256<Byte> op1, void* lowerResult, void* upperResult, [CallerMemberName] string method = "")
{
Byte[] inArray1 = new Byte[Op1ElementCount];
UInt16[] outLowerArray = new UInt16[RetElementCount];
UInt16[] outUpperArray = new UInt16[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), op1);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outLowerArray[0]), ref Unsafe.AsRef<byte>(lowerResult), (uint)Unsafe.SizeOf<Vector256<UInt16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outUpperArray[0]), ref Unsafe.AsRef<byte>(upperResult), (uint)Unsafe.SizeOf<Vector256<UInt16>>());
ValidateResult(inArray1, outLowerArray, outUpperArray, method);
}
private void ValidateResult(void* op1, void* lowerResult, void* upperResult, [CallerMemberName] string method = "")
{
Byte[] inArray1 = new Byte[Op1ElementCount];
UInt16[] outLowerArray = new UInt16[RetElementCount];
UInt16[] outUpperArray = new UInt16[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector256<Byte>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outLowerArray[0]), ref Unsafe.AsRef<byte>(lowerResult), (uint)Unsafe.SizeOf<Vector256<UInt16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outUpperArray[0]), ref Unsafe.AsRef<byte>(upperResult), (uint)Unsafe.SizeOf<Vector256<UInt16>>());
ValidateResult(inArray1, outLowerArray, outUpperArray, method);
}
private void ValidateResult(Byte[] firstOp, UInt16[] lowerResult, UInt16[] upperResult, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (lowerResult[i] != (ushort)(firstOp[i]))
{
succeeded = false;
break;
}
}
for (var i = 0; i < RetElementCount; i++)
{
if (upperResult[i] != (ushort)(firstOp[i + RetElementCount]))
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Vector256)}.{nameof(Vector256.Widen)}<UInt16>(Vector256<Byte>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" lowerResult: ({string.Join(", ", lowerResult)})");
TestLibrary.TestFramework.LogInformation($" upperResult: ({string.Join(", ", upperResult)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/Arm/AdvSimd.Arm64/Divide.Vector64.Single.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void Divide_Vector64_Single()
{
var test = new SimpleBinaryOpTest__Divide_Vector64_Single();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__Divide_Vector64_Single
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Single[] inArray1, Single[] inArray2, Single[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Single>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Single>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Single>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Single, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Single, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector64<Single> _fld1;
public Vector64<Single> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref testStruct._fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref testStruct._fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__Divide_Vector64_Single testClass)
{
var result = AdvSimd.Arm64.Divide(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__Divide_Vector64_Single testClass)
{
fixed (Vector64<Single>* pFld1 = &_fld1)
fixed (Vector64<Single>* pFld2 = &_fld2)
{
var result = AdvSimd.Arm64.Divide(
AdvSimd.LoadVector64((Single*)(pFld1)),
AdvSimd.LoadVector64((Single*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 8;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single);
private static Single[] _data1 = new Single[Op1ElementCount];
private static Single[] _data2 = new Single[Op2ElementCount];
private static Vector64<Single> _clsVar1;
private static Vector64<Single> _clsVar2;
private Vector64<Single> _fld1;
private Vector64<Single> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__Divide_Vector64_Single()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _clsVar2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
}
public SimpleBinaryOpTest__Divide_Vector64_Single()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
_dataTable = new DataTable(_data1, _data2, new Single[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.Arm64.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.Arm64.Divide(
Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Single>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.Arm64.Divide(
AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector64((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd.Arm64).GetMethod(nameof(AdvSimd.Arm64.Divide), new Type[] { typeof(Vector64<Single>), typeof(Vector64<Single>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Single>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd.Arm64).GetMethod(nameof(AdvSimd.Arm64.Divide), new Type[] { typeof(Vector64<Single>), typeof(Vector64<Single>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector64((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.Arm64.Divide(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector64<Single>* pClsVar1 = &_clsVar1)
fixed (Vector64<Single>* pClsVar2 = &_clsVar2)
{
var result = AdvSimd.Arm64.Divide(
AdvSimd.LoadVector64((Single*)(pClsVar1)),
AdvSimd.LoadVector64((Single*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector64<Single>>(_dataTable.inArray2Ptr);
var result = AdvSimd.Arm64.Divide(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr));
var op2 = AdvSimd.LoadVector64((Single*)(_dataTable.inArray2Ptr));
var result = AdvSimd.Arm64.Divide(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__Divide_Vector64_Single();
var result = AdvSimd.Arm64.Divide(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__Divide_Vector64_Single();
fixed (Vector64<Single>* pFld1 = &test._fld1)
fixed (Vector64<Single>* pFld2 = &test._fld2)
{
var result = AdvSimd.Arm64.Divide(
AdvSimd.LoadVector64((Single*)(pFld1)),
AdvSimd.LoadVector64((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.Arm64.Divide(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector64<Single>* pFld1 = &_fld1)
fixed (Vector64<Single>* pFld2 = &_fld2)
{
var result = AdvSimd.Arm64.Divide(
AdvSimd.LoadVector64((Single*)(pFld1)),
AdvSimd.LoadVector64((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.Arm64.Divide(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.Arm64.Divide(
AdvSimd.LoadVector64((Single*)(&test._fld1)),
AdvSimd.LoadVector64((Single*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector64<Single> op1, Vector64<Single> op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector64<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector64<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Single[] left, Single[] right, Single[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (BitConverter.SingleToInt32Bits(Helpers.Divide(left[i], right[i])) != BitConverter.SingleToInt32Bits(result[i]))
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd.Arm64)}.{nameof(AdvSimd.Arm64.Divide)}<Single>(Vector64<Single>, Vector64<Single>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void Divide_Vector64_Single()
{
var test = new SimpleBinaryOpTest__Divide_Vector64_Single();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__Divide_Vector64_Single
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Single[] inArray1, Single[] inArray2, Single[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Single>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Single>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Single>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Single, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Single, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector64<Single> _fld1;
public Vector64<Single> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref testStruct._fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref testStruct._fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__Divide_Vector64_Single testClass)
{
var result = AdvSimd.Arm64.Divide(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__Divide_Vector64_Single testClass)
{
fixed (Vector64<Single>* pFld1 = &_fld1)
fixed (Vector64<Single>* pFld2 = &_fld2)
{
var result = AdvSimd.Arm64.Divide(
AdvSimd.LoadVector64((Single*)(pFld1)),
AdvSimd.LoadVector64((Single*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 8;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single);
private static Single[] _data1 = new Single[Op1ElementCount];
private static Single[] _data2 = new Single[Op2ElementCount];
private static Vector64<Single> _clsVar1;
private static Vector64<Single> _clsVar2;
private Vector64<Single> _fld1;
private Vector64<Single> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__Divide_Vector64_Single()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _clsVar2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
}
public SimpleBinaryOpTest__Divide_Vector64_Single()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
_dataTable = new DataTable(_data1, _data2, new Single[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.Arm64.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.Arm64.Divide(
Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Single>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.Arm64.Divide(
AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector64((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd.Arm64).GetMethod(nameof(AdvSimd.Arm64.Divide), new Type[] { typeof(Vector64<Single>), typeof(Vector64<Single>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Single>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd.Arm64).GetMethod(nameof(AdvSimd.Arm64.Divide), new Type[] { typeof(Vector64<Single>), typeof(Vector64<Single>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector64((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.Arm64.Divide(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector64<Single>* pClsVar1 = &_clsVar1)
fixed (Vector64<Single>* pClsVar2 = &_clsVar2)
{
var result = AdvSimd.Arm64.Divide(
AdvSimd.LoadVector64((Single*)(pClsVar1)),
AdvSimd.LoadVector64((Single*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector64<Single>>(_dataTable.inArray2Ptr);
var result = AdvSimd.Arm64.Divide(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr));
var op2 = AdvSimd.LoadVector64((Single*)(_dataTable.inArray2Ptr));
var result = AdvSimd.Arm64.Divide(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__Divide_Vector64_Single();
var result = AdvSimd.Arm64.Divide(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__Divide_Vector64_Single();
fixed (Vector64<Single>* pFld1 = &test._fld1)
fixed (Vector64<Single>* pFld2 = &test._fld2)
{
var result = AdvSimd.Arm64.Divide(
AdvSimd.LoadVector64((Single*)(pFld1)),
AdvSimd.LoadVector64((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.Arm64.Divide(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector64<Single>* pFld1 = &_fld1)
fixed (Vector64<Single>* pFld2 = &_fld2)
{
var result = AdvSimd.Arm64.Divide(
AdvSimd.LoadVector64((Single*)(pFld1)),
AdvSimd.LoadVector64((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.Arm64.Divide(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.Arm64.Divide(
AdvSimd.LoadVector64((Single*)(&test._fld1)),
AdvSimd.LoadVector64((Single*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector64<Single> op1, Vector64<Single> op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector64<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector64<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Single[] left, Single[] right, Single[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (BitConverter.SingleToInt32Bits(Helpers.Divide(left[i], right[i])) != BitConverter.SingleToInt32Bits(result[i]))
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd.Arm64)}.{nameof(AdvSimd.Arm64.Divide)}<Single>(Vector64<Single>, Vector64<Single>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/General/NotSupported/Vector128BooleanGetElementMaxValue.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\General\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace JIT.HardwareIntrinsics.General
{
public static partial class Program
{
private static void Vector128BooleanGetElementMaxValue()
{
bool succeeded = false;
try
{
bool result = default(Vector128<bool>).GetElement(int.MaxValue);
}
catch (NotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"Vector128BooleanGetElementMaxValue: RunNotSupportedScenario failed to throw NotSupportedException.");
TestLibrary.TestFramework.LogInformation(string.Empty);
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\General\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace JIT.HardwareIntrinsics.General
{
public static partial class Program
{
private static void Vector128BooleanGetElementMaxValue()
{
bool succeeded = false;
try
{
bool result = default(Vector128<bool>).GetElement(int.MaxValue);
}
catch (NotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"Vector128BooleanGetElementMaxValue: RunNotSupportedScenario failed to throw NotSupportedException.");
TestLibrary.TestFramework.LogInformation(string.Empty);
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Private.CoreLib/src/System/Reflection/MethodInfo.Internal.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.Reflection
{
public abstract partial class MethodInfo : MethodBase
{
#if CORERT
public // Needs to be public so that Reflection.Core can see it.
#else
internal
#endif
virtual int GenericParameterCount => GetGenericArguments().Length;
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.Reflection
{
public abstract partial class MethodInfo : MethodBase
{
#if CORERT
public // Needs to be public so that Reflection.Core can see it.
#else
internal
#endif
virtual int GenericParameterCount => GetGenericArguments().Length;
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Diagnostics.DiagnosticSource/src/System/Diagnostics/ActivityListener.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
namespace System.Diagnostics
{
/// <summary>
/// Define the callback that can be used in <see cref="ActivityListener"/> to allow deciding to create the Activity objects and with what data state.
/// </summary>
public delegate ActivitySamplingResult SampleActivity<T>(ref ActivityCreationOptions<T> options);
/// <summary>
/// ActivityListener allows listening to the start and stop Activity events and give the oppertunity to decide creating the Activity for sampling scenarios.
/// </summary>
public sealed class ActivityListener : IDisposable
{
/// <summary>
/// Construct a new <see cref="ActivityListener"/> object to start listening to the <see cref="Activity"/> events.
/// </summary>
public ActivityListener()
{
}
/// <summary>
/// Set or get the callback used to listen to the <see cref="Activity"/> start event.
/// </summary>
public Action<Activity>? ActivityStarted { get; set; }
/// <summary>
/// Set or get the callback used to listen to the <see cref="Activity"/> stop event.
/// </summary>
public Action<Activity>? ActivityStopped { get; set; }
/// <summary>
/// Set or get the callback used to decide if want to listen to <see cref="Activity"/> objects events which created using <see cref="ActivitySource"/> object.
/// </summary>
public Func<ActivitySource, bool>? ShouldListenTo { get; set; }
/// <summary>
/// Set or get the callback used to decide allowing creating <see cref="Activity"/> objects with specific data state.
/// </summary>
public SampleActivity<string>? SampleUsingParentId { get; set; }
/// <summary>
/// Set or get the callback used to decide allowing creating <see cref="Activity"/> objects with specific data state.
/// </summary>
public SampleActivity<ActivityContext>? Sample { get; set; }
/// <summary>
/// Dispose will unregister this <see cref="ActivityListener"/> object from listening to <see cref="Activity"/> events.
/// </summary>
public void Dispose() => ActivitySource.DetachListener(this);
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
namespace System.Diagnostics
{
/// <summary>
/// Define the callback that can be used in <see cref="ActivityListener"/> to allow deciding to create the Activity objects and with what data state.
/// </summary>
public delegate ActivitySamplingResult SampleActivity<T>(ref ActivityCreationOptions<T> options);
/// <summary>
/// ActivityListener allows listening to the start and stop Activity events and give the oppertunity to decide creating the Activity for sampling scenarios.
/// </summary>
public sealed class ActivityListener : IDisposable
{
/// <summary>
/// Construct a new <see cref="ActivityListener"/> object to start listening to the <see cref="Activity"/> events.
/// </summary>
public ActivityListener()
{
}
/// <summary>
/// Set or get the callback used to listen to the <see cref="Activity"/> start event.
/// </summary>
public Action<Activity>? ActivityStarted { get; set; }
/// <summary>
/// Set or get the callback used to listen to the <see cref="Activity"/> stop event.
/// </summary>
public Action<Activity>? ActivityStopped { get; set; }
/// <summary>
/// Set or get the callback used to decide if want to listen to <see cref="Activity"/> objects events which created using <see cref="ActivitySource"/> object.
/// </summary>
public Func<ActivitySource, bool>? ShouldListenTo { get; set; }
/// <summary>
/// Set or get the callback used to decide allowing creating <see cref="Activity"/> objects with specific data state.
/// </summary>
public SampleActivity<string>? SampleUsingParentId { get; set; }
/// <summary>
/// Set or get the callback used to decide allowing creating <see cref="Activity"/> objects with specific data state.
/// </summary>
public SampleActivity<ActivityContext>? Sample { get; set; }
/// <summary>
/// Dispose will unregister this <see cref="ActivityListener"/> object from listening to <see cref="Activity"/> events.
/// </summary>
public void Dispose() => ActivitySource.DetachListener(this);
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/coreclr/pal/tests/palsuite/c_runtime/swprintf/test19/test19.cpp
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/*============================================================================
**
** Source: test19.c
**
** Purpose: Tests swprintf with argument specified precision
**
**
**==========================================================================*/
#include <palsuite.h>
#include "../swprintf.h"
/*
* Uses memcmp & wcslen
*/
PALTEST(c_runtime_swprintf_test19_paltest_swprintf_test19, "c_runtime/swprintf/test19/paltest_swprintf_test19")
{
int n = -1;
if (PAL_Initialize(argc, argv) != 0)
{
return FAIL;
}
DoArgumentPrecTest(convert("%.*s"), 2, (void*)convert("bar"), "bar",
convert("ba"), convert("ba"));
DoArgumentPrecTest(convert("%.*S"), 2, (void*)"bar", "bar", convert("ba"),
convert("ba"));
DoArgumentPrecTest(convert("%.*c"), 0, (void*)'a', "a", convert("a"),
convert("a"));
DoArgumentPrecTest(convert("%.*c"), 4, (void*)'a', "a", convert("a"),
convert("a"));
DoArgumentPrecTest(convert("%.*C"), 0, (void*)'a', "a", convert("a"),
convert("a"));
DoArgumentPrecTest(convert("%.*C"), 4, (void*)'a', "a", convert("a"),
convert("a"));
DoArgumentPrecTest(convert("%.*d"), 1, (void*)42, "42", convert("42"),
convert("42"));
DoArgumentPrecTest(convert("%.*d"), 3, (void*)42, "42", convert("042"),
convert("042"));
DoArgumentPrecTest(convert("%.*i"), 1, (void*)42, "42", convert("42"),
convert("42"));
DoArgumentPrecTest(convert("%.*i"), 3, (void*)42, "42", convert("042"),
convert("042"));
DoArgumentPrecTest(convert("%.*o"), 1, (void*)42, "42", convert("52"),
convert("52"));
DoArgumentPrecTest(convert("%.*o"), 3, (void*)42, "42", convert("052"),
convert("052"));
DoArgumentPrecTest(convert("%.*u"), 1, (void*)42, "42", convert("42"),
convert("42"));
DoArgumentPrecTest(convert("%.*u"), 3, (void*)42, "42", convert("042"),
convert("042"));
DoArgumentPrecTest(convert("%.*x"), 1, (void*)0x42, "0x42", convert("42"),
convert("42"));
DoArgumentPrecTest(convert("%.*x"), 3, (void*)0x42, "0x42", convert("042"),
convert("042"));
DoArgumentPrecTest(convert("%.*X"), 1, (void*)0x42, "0x42", convert("42"),
convert("42"));
DoArgumentPrecTest(convert("%.*X"), 3, (void*)0x42, "0x42", convert("042"),
convert("042"));
DoArgumentPrecDoubleTest(convert("%.*e"), 1, 2.01, convert("2.0e+000"),
convert("2.0e+000"));
DoArgumentPrecDoubleTest(convert("%.*e"), 3, 2.01, convert("2.010e+000"),
convert("2.010e+000"));
DoArgumentPrecDoubleTest(convert("%.*E"), 1, 2.01, convert("2.0E+000"),
convert("2.0E+000"));
DoArgumentPrecDoubleTest(convert("%.*E"), 3, 2.01, convert("2.010E+000"),
convert("2.010E+000"));
DoArgumentPrecDoubleTest(convert("%.*f"), 1, 2.01, convert("2.0"),
convert("2.0"));
DoArgumentPrecDoubleTest(convert("%.*f"), 3, 2.01, convert("2.010"),
convert("2.010"));
DoArgumentPrecDoubleTest(convert("%.*g"), 1, 256.01, convert("3e+002"),
convert("3e+002"));
DoArgumentPrecDoubleTest(convert("%.*g"), 3, 256.01, convert("256"),
convert("256"));
DoArgumentPrecDoubleTest(convert("%.*g"), 4, 256.01, convert("256"),
convert("256"));
DoArgumentPrecDoubleTest(convert("%.*g"), 6, 256.01, convert("256.01"),
convert("256.01"));
DoArgumentPrecDoubleTest(convert("%.*G"), 1, 256.01, convert("3E+002"),
convert("3E+002"));
DoArgumentPrecDoubleTest(convert("%.*G"), 3, 256.01, convert("256"),
convert("256"));
DoArgumentPrecDoubleTest(convert("%.*G"), 4, 256.01, convert("256"),
convert("256"));
DoArgumentPrecDoubleTest(convert("%.*G"), 6, 256.01, convert("256.01"),
convert("256.01"));
PAL_Terminate();
return PASS;
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/*============================================================================
**
** Source: test19.c
**
** Purpose: Tests swprintf with argument specified precision
**
**
**==========================================================================*/
#include <palsuite.h>
#include "../swprintf.h"
/*
* Uses memcmp & wcslen
*/
PALTEST(c_runtime_swprintf_test19_paltest_swprintf_test19, "c_runtime/swprintf/test19/paltest_swprintf_test19")
{
int n = -1;
if (PAL_Initialize(argc, argv) != 0)
{
return FAIL;
}
DoArgumentPrecTest(convert("%.*s"), 2, (void*)convert("bar"), "bar",
convert("ba"), convert("ba"));
DoArgumentPrecTest(convert("%.*S"), 2, (void*)"bar", "bar", convert("ba"),
convert("ba"));
DoArgumentPrecTest(convert("%.*c"), 0, (void*)'a', "a", convert("a"),
convert("a"));
DoArgumentPrecTest(convert("%.*c"), 4, (void*)'a', "a", convert("a"),
convert("a"));
DoArgumentPrecTest(convert("%.*C"), 0, (void*)'a', "a", convert("a"),
convert("a"));
DoArgumentPrecTest(convert("%.*C"), 4, (void*)'a', "a", convert("a"),
convert("a"));
DoArgumentPrecTest(convert("%.*d"), 1, (void*)42, "42", convert("42"),
convert("42"));
DoArgumentPrecTest(convert("%.*d"), 3, (void*)42, "42", convert("042"),
convert("042"));
DoArgumentPrecTest(convert("%.*i"), 1, (void*)42, "42", convert("42"),
convert("42"));
DoArgumentPrecTest(convert("%.*i"), 3, (void*)42, "42", convert("042"),
convert("042"));
DoArgumentPrecTest(convert("%.*o"), 1, (void*)42, "42", convert("52"),
convert("52"));
DoArgumentPrecTest(convert("%.*o"), 3, (void*)42, "42", convert("052"),
convert("052"));
DoArgumentPrecTest(convert("%.*u"), 1, (void*)42, "42", convert("42"),
convert("42"));
DoArgumentPrecTest(convert("%.*u"), 3, (void*)42, "42", convert("042"),
convert("042"));
DoArgumentPrecTest(convert("%.*x"), 1, (void*)0x42, "0x42", convert("42"),
convert("42"));
DoArgumentPrecTest(convert("%.*x"), 3, (void*)0x42, "0x42", convert("042"),
convert("042"));
DoArgumentPrecTest(convert("%.*X"), 1, (void*)0x42, "0x42", convert("42"),
convert("42"));
DoArgumentPrecTest(convert("%.*X"), 3, (void*)0x42, "0x42", convert("042"),
convert("042"));
DoArgumentPrecDoubleTest(convert("%.*e"), 1, 2.01, convert("2.0e+000"),
convert("2.0e+000"));
DoArgumentPrecDoubleTest(convert("%.*e"), 3, 2.01, convert("2.010e+000"),
convert("2.010e+000"));
DoArgumentPrecDoubleTest(convert("%.*E"), 1, 2.01, convert("2.0E+000"),
convert("2.0E+000"));
DoArgumentPrecDoubleTest(convert("%.*E"), 3, 2.01, convert("2.010E+000"),
convert("2.010E+000"));
DoArgumentPrecDoubleTest(convert("%.*f"), 1, 2.01, convert("2.0"),
convert("2.0"));
DoArgumentPrecDoubleTest(convert("%.*f"), 3, 2.01, convert("2.010"),
convert("2.010"));
DoArgumentPrecDoubleTest(convert("%.*g"), 1, 256.01, convert("3e+002"),
convert("3e+002"));
DoArgumentPrecDoubleTest(convert("%.*g"), 3, 256.01, convert("256"),
convert("256"));
DoArgumentPrecDoubleTest(convert("%.*g"), 4, 256.01, convert("256"),
convert("256"));
DoArgumentPrecDoubleTest(convert("%.*g"), 6, 256.01, convert("256.01"),
convert("256.01"));
DoArgumentPrecDoubleTest(convert("%.*G"), 1, 256.01, convert("3E+002"),
convert("3E+002"));
DoArgumentPrecDoubleTest(convert("%.*G"), 3, 256.01, convert("256"),
convert("256"));
DoArgumentPrecDoubleTest(convert("%.*G"), 4, 256.01, convert("256"),
convert("256"));
DoArgumentPrecDoubleTest(convert("%.*G"), 6, 256.01, convert("256.01"),
convert("256.01"));
PAL_Terminate();
return PASS;
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/GC/Scenarios/BinTree/thdtree.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/**
* Description:
* Mainly stresses the GC by creating n threads each manipulating its own local binary tree structure.
* Each thread in turn adds and deletes thousands of nodes from the binary tree.
*/
namespace DefaultNamespace {
using System.Threading;
using System;
using System.IO;
public class ThdTree
{
public static int Main (System.String[] Args)
{
Console.Out.WriteLine("Test should return with ExitCode 100 ...");
// sync console output Console.SetOut(TextWriter.Synchronized(Console.Out));
int iNofThread = 0;
if (Args.Length == 1)
{
if (!Int32.TryParse( Args[0], out iNofThread ))
{
iNofThread = 2;
}
}
else
{
iNofThread = 2;
}
TreeThread Mv_LLTree;
int[] count = {10000, -5000, 3000, -6000, 0, 15000, 0, 10000,0,100,100};
for (int i = 0; i < iNofThread; i++)
{
Mv_LLTree = new TreeThread(i, TreeType.Normal, count);
}
return 100;
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/**
* Description:
* Mainly stresses the GC by creating n threads each manipulating its own local binary tree structure.
* Each thread in turn adds and deletes thousands of nodes from the binary tree.
*/
namespace DefaultNamespace {
using System.Threading;
using System;
using System.IO;
public class ThdTree
{
public static int Main (System.String[] Args)
{
Console.Out.WriteLine("Test should return with ExitCode 100 ...");
// sync console output Console.SetOut(TextWriter.Synchronized(Console.Out));
int iNofThread = 0;
if (Args.Length == 1)
{
if (!Int32.TryParse( Args[0], out iNofThread ))
{
iNofThread = 2;
}
}
else
{
iNofThread = 2;
}
TreeThread Mv_LLTree;
int[] count = {10000, -5000, 3000, -6000, 0, 15000, 0, 10000,0,100,100};
for (int i = 0; i < iNofThread; i++)
{
Mv_LLTree = new TreeThread(i, TreeType.Normal, count);
}
return 100;
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/baseservices/threading/generics/threadstart/thread11.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Threading;
interface IGen<T>
{
void Target();
T Dummy(T t);
}
struct Gen<T> : IGen<T>
{
public T Dummy(T t) { return t; }
public void Target()
{
Interlocked.Increment(ref Test_thread11.Xcounter);
}
public static void ThreadPoolTest()
{
Thread[] threads = new Thread[Test_thread11.nThreads];
IGen<T> obj = new Gen<T>();
for (int i = 0; i < Test_thread11.nThreads; i++)
{
threads[i] = new Thread(new ThreadStart(obj.Target));
threads[i].Start();
}
for (int i = 0; i < Test_thread11.nThreads; i++)
{
threads[i].Join();
}
Test_thread11.Eval(Test_thread11.Xcounter==Test_thread11.nThreads);
Test_thread11.Xcounter = 0;
}
}
public class Test_thread11
{
public static int nThreads =50;
public static int counter = 0;
public static int Xcounter = 0;
public static bool result = true;
public static void Eval(bool exp)
{
counter++;
if (!exp)
{
result = exp;
Console.WriteLine("Test Failed at location: " + counter);
}
}
public static int Main()
{
Gen<int>.ThreadPoolTest();
Gen<double>.ThreadPoolTest();
Gen<string>.ThreadPoolTest();
Gen<object>.ThreadPoolTest();
Gen<Guid>.ThreadPoolTest();
Gen<int[]>.ThreadPoolTest();
Gen<double[,]>.ThreadPoolTest();
Gen<string[][][]>.ThreadPoolTest();
Gen<object[,,,]>.ThreadPoolTest();
Gen<Guid[][,,,][]>.ThreadPoolTest();
if (result)
{
Console.WriteLine("Test Passed");
return 100;
}
else
{
Console.WriteLine("Test Failed");
return 1;
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Threading;
interface IGen<T>
{
void Target();
T Dummy(T t);
}
struct Gen<T> : IGen<T>
{
public T Dummy(T t) { return t; }
public void Target()
{
Interlocked.Increment(ref Test_thread11.Xcounter);
}
public static void ThreadPoolTest()
{
Thread[] threads = new Thread[Test_thread11.nThreads];
IGen<T> obj = new Gen<T>();
for (int i = 0; i < Test_thread11.nThreads; i++)
{
threads[i] = new Thread(new ThreadStart(obj.Target));
threads[i].Start();
}
for (int i = 0; i < Test_thread11.nThreads; i++)
{
threads[i].Join();
}
Test_thread11.Eval(Test_thread11.Xcounter==Test_thread11.nThreads);
Test_thread11.Xcounter = 0;
}
}
public class Test_thread11
{
public static int nThreads =50;
public static int counter = 0;
public static int Xcounter = 0;
public static bool result = true;
public static void Eval(bool exp)
{
counter++;
if (!exp)
{
result = exp;
Console.WriteLine("Test Failed at location: " + counter);
}
}
public static int Main()
{
Gen<int>.ThreadPoolTest();
Gen<double>.ThreadPoolTest();
Gen<string>.ThreadPoolTest();
Gen<object>.ThreadPoolTest();
Gen<Guid>.ThreadPoolTest();
Gen<int[]>.ThreadPoolTest();
Gen<double[,]>.ThreadPoolTest();
Gen<string[][][]>.ThreadPoolTest();
Gen<object[,,,]>.ThreadPoolTest();
Gen<Guid[][,,,][]>.ThreadPoolTest();
if (result)
{
Console.WriteLine("Test Passed");
return 100;
}
else
{
Console.WriteLine("Test Failed");
return 1;
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/Microsoft.Extensions.Hosting.Systemd/src/SystemdNotifier.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Net.Sockets;
using System.Runtime.Versioning;
namespace Microsoft.Extensions.Hosting.Systemd
{
[UnsupportedOSPlatform("browser")]
public class SystemdNotifier : ISystemdNotifier
{
private const string NOTIFY_SOCKET = "NOTIFY_SOCKET";
private readonly string _socketPath;
public SystemdNotifier() :
this(GetNotifySocketPath())
{ }
// For testing
internal SystemdNotifier(string socketPath)
{
_socketPath = socketPath;
}
/// <inheritdoc />
public bool IsEnabled => _socketPath != null;
/// <inheritdoc />
public void Notify(ServiceState state)
{
if (!IsEnabled)
{
return;
}
using (var socket = new Socket(AddressFamily.Unix, SocketType.Dgram, ProtocolType.Unspecified))
{
var endPoint = new UnixDomainSocketEndPoint(_socketPath);
socket.Connect(endPoint);
// It's safe to do a non-blocking call here: messages sent here are much
// smaller than kernel buffers so we won't get blocked.
socket.Send(state.GetData());
}
}
private static string GetNotifySocketPath()
{
string socketPath = Environment.GetEnvironmentVariable(NOTIFY_SOCKET);
if (string.IsNullOrEmpty(socketPath))
{
return null;
}
// Support abstract socket paths.
if (socketPath[0] == '@')
{
socketPath = string.Create(socketPath.Length, socketPath, (buffer, state) =>
{
buffer[0] = '\0';
state.AsSpan(1).CopyTo(buffer.Slice(1));
});
}
return socketPath;
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Net.Sockets;
using System.Runtime.Versioning;
namespace Microsoft.Extensions.Hosting.Systemd
{
[UnsupportedOSPlatform("browser")]
public class SystemdNotifier : ISystemdNotifier
{
private const string NOTIFY_SOCKET = "NOTIFY_SOCKET";
private readonly string _socketPath;
public SystemdNotifier() :
this(GetNotifySocketPath())
{ }
// For testing
internal SystemdNotifier(string socketPath)
{
_socketPath = socketPath;
}
/// <inheritdoc />
public bool IsEnabled => _socketPath != null;
/// <inheritdoc />
public void Notify(ServiceState state)
{
if (!IsEnabled)
{
return;
}
using (var socket = new Socket(AddressFamily.Unix, SocketType.Dgram, ProtocolType.Unspecified))
{
var endPoint = new UnixDomainSocketEndPoint(_socketPath);
socket.Connect(endPoint);
// It's safe to do a non-blocking call here: messages sent here are much
// smaller than kernel buffers so we won't get blocked.
socket.Send(state.GetData());
}
}
private static string GetNotifySocketPath()
{
string socketPath = Environment.GetEnvironmentVariable(NOTIFY_SOCKET);
if (string.IsNullOrEmpty(socketPath))
{
return null;
}
// Support abstract socket paths.
if (socketPath[0] == '@')
{
socketPath = string.Create(socketPath.Length, socketPath, (buffer, state) =>
{
buffer[0] = '\0';
state.AsSpan(1).CopyTo(buffer.Slice(1));
});
}
return socketPath;
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.CodeDom/src/System/CodeDom/CodeBinaryOperatorType.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.CodeDom
{
public enum CodeBinaryOperatorType
{
Add,
Subtract,
Multiply,
Divide,
Modulus,
Assign,
IdentityInequality,
IdentityEquality,
ValueEquality,
BitwiseOr,
BitwiseAnd,
BooleanOr,
BooleanAnd,
LessThan,
LessThanOrEqual,
GreaterThan,
GreaterThanOrEqual,
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.CodeDom
{
public enum CodeBinaryOperatorType
{
Add,
Subtract,
Multiply,
Divide,
Modulus,
Assign,
IdentityInequality,
IdentityEquality,
ValueEquality,
BitwiseOr,
BitwiseAnd,
BooleanOr,
BooleanAnd,
LessThan,
LessThanOrEqual,
GreaterThan,
GreaterThanOrEqual,
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/Arm/AdvSimd/DuplicateSelectedScalarToVector128.Vector128.UInt32.2.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void DuplicateSelectedScalarToVector128_Vector128_UInt32_2()
{
var test = new ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2
{
private struct DataTable
{
private byte[] inArray;
private byte[] outArray;
private GCHandle inHandle;
private GCHandle outHandle;
private ulong alignment;
public DataTable(UInt32[] inArray, UInt32[] outArray, int alignment)
{
int sizeOfinArray = inArray.Length * Unsafe.SizeOf<UInt32>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt32>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle = GCHandle.Alloc(this.inArray, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArrayPtr), ref Unsafe.As<UInt32, byte>(ref inArray[0]), (uint)sizeOfinArray);
}
public void* inArrayPtr => Align((byte*)(inHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<UInt32> _fld;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref testStruct._fld), ref Unsafe.As<UInt32, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
return testStruct;
}
public void RunStructFldScenario(ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2 testClass)
{
var result = AdvSimd.DuplicateSelectedScalarToVector128(_fld, 2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2 testClass)
{
fixed (Vector128<UInt32>* pFld = &_fld)
{
var result = AdvSimd.DuplicateSelectedScalarToVector128(
AdvSimd.LoadVector128((UInt32*)(pFld)),
2
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static readonly byte Imm = 2;
private static UInt32[] _data = new UInt32[Op1ElementCount];
private static Vector128<UInt32> _clsVar;
private Vector128<UInt32> _fld;
private DataTable _dataTable;
static ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2()
{
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _clsVar), ref Unsafe.As<UInt32, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
}
public ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _fld), ref Unsafe.As<UInt32, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetUInt32(); }
_dataTable = new DataTable(_data, new UInt32[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.DuplicateSelectedScalarToVector128(
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArrayPtr),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.DuplicateSelectedScalarToVector128(
AdvSimd.LoadVector128((UInt32*)(_dataTable.inArrayPtr)),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.DuplicateSelectedScalarToVector128), new Type[] { typeof(Vector128<UInt32>), typeof(byte) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArrayPtr),
(byte)2
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt32>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.DuplicateSelectedScalarToVector128), new Type[] { typeof(Vector128<UInt32>), typeof(byte) })
.Invoke(null, new object[] {
AdvSimd.LoadVector128((UInt32*)(_dataTable.inArrayPtr)),
(byte)2
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt32>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.DuplicateSelectedScalarToVector128(
_clsVar,
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<UInt32>* pClsVar = &_clsVar)
{
var result = AdvSimd.DuplicateSelectedScalarToVector128(
AdvSimd.LoadVector128((UInt32*)(pClsVar)),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var firstOp = Unsafe.Read<Vector128<UInt32>>(_dataTable.inArrayPtr);
var result = AdvSimd.DuplicateSelectedScalarToVector128(firstOp, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var firstOp = AdvSimd.LoadVector128((UInt32*)(_dataTable.inArrayPtr));
var result = AdvSimd.DuplicateSelectedScalarToVector128(firstOp, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2();
var result = AdvSimd.DuplicateSelectedScalarToVector128(test._fld, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2();
fixed (Vector128<UInt32>* pFld = &test._fld)
{
var result = AdvSimd.DuplicateSelectedScalarToVector128(
AdvSimd.LoadVector128((UInt32*)(pFld)),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.DuplicateSelectedScalarToVector128(_fld, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<UInt32>* pFld = &_fld)
{
var result = AdvSimd.DuplicateSelectedScalarToVector128(
AdvSimd.LoadVector128((UInt32*)(pFld)),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.DuplicateSelectedScalarToVector128(test._fld, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.DuplicateSelectedScalarToVector128(
AdvSimd.LoadVector128((UInt32*)(&test._fld)),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<UInt32> firstOp, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray = new UInt32[Op1ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray[0]), firstOp);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(void* firstOp, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray = new UInt32[Op1ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(firstOp), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(UInt32[] firstOp, UInt32[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (firstOp[Imm] != result[i])
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd)}.{nameof(AdvSimd.DuplicateSelectedScalarToVector128)}<UInt32>(Vector128<UInt32>, 2): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void DuplicateSelectedScalarToVector128_Vector128_UInt32_2()
{
var test = new ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2
{
private struct DataTable
{
private byte[] inArray;
private byte[] outArray;
private GCHandle inHandle;
private GCHandle outHandle;
private ulong alignment;
public DataTable(UInt32[] inArray, UInt32[] outArray, int alignment)
{
int sizeOfinArray = inArray.Length * Unsafe.SizeOf<UInt32>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt32>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle = GCHandle.Alloc(this.inArray, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArrayPtr), ref Unsafe.As<UInt32, byte>(ref inArray[0]), (uint)sizeOfinArray);
}
public void* inArrayPtr => Align((byte*)(inHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<UInt32> _fld;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref testStruct._fld), ref Unsafe.As<UInt32, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
return testStruct;
}
public void RunStructFldScenario(ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2 testClass)
{
var result = AdvSimd.DuplicateSelectedScalarToVector128(_fld, 2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2 testClass)
{
fixed (Vector128<UInt32>* pFld = &_fld)
{
var result = AdvSimd.DuplicateSelectedScalarToVector128(
AdvSimd.LoadVector128((UInt32*)(pFld)),
2
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static readonly byte Imm = 2;
private static UInt32[] _data = new UInt32[Op1ElementCount];
private static Vector128<UInt32> _clsVar;
private Vector128<UInt32> _fld;
private DataTable _dataTable;
static ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2()
{
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _clsVar), ref Unsafe.As<UInt32, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
}
public ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _fld), ref Unsafe.As<UInt32, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetUInt32(); }
_dataTable = new DataTable(_data, new UInt32[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.DuplicateSelectedScalarToVector128(
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArrayPtr),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.DuplicateSelectedScalarToVector128(
AdvSimd.LoadVector128((UInt32*)(_dataTable.inArrayPtr)),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.DuplicateSelectedScalarToVector128), new Type[] { typeof(Vector128<UInt32>), typeof(byte) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArrayPtr),
(byte)2
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt32>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.DuplicateSelectedScalarToVector128), new Type[] { typeof(Vector128<UInt32>), typeof(byte) })
.Invoke(null, new object[] {
AdvSimd.LoadVector128((UInt32*)(_dataTable.inArrayPtr)),
(byte)2
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt32>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.DuplicateSelectedScalarToVector128(
_clsVar,
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<UInt32>* pClsVar = &_clsVar)
{
var result = AdvSimd.DuplicateSelectedScalarToVector128(
AdvSimd.LoadVector128((UInt32*)(pClsVar)),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var firstOp = Unsafe.Read<Vector128<UInt32>>(_dataTable.inArrayPtr);
var result = AdvSimd.DuplicateSelectedScalarToVector128(firstOp, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var firstOp = AdvSimd.LoadVector128((UInt32*)(_dataTable.inArrayPtr));
var result = AdvSimd.DuplicateSelectedScalarToVector128(firstOp, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2();
var result = AdvSimd.DuplicateSelectedScalarToVector128(test._fld, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new ImmUnaryOpTest__DuplicateSelectedScalarToVector128_Vector128_UInt32_2();
fixed (Vector128<UInt32>* pFld = &test._fld)
{
var result = AdvSimd.DuplicateSelectedScalarToVector128(
AdvSimd.LoadVector128((UInt32*)(pFld)),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.DuplicateSelectedScalarToVector128(_fld, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<UInt32>* pFld = &_fld)
{
var result = AdvSimd.DuplicateSelectedScalarToVector128(
AdvSimd.LoadVector128((UInt32*)(pFld)),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.DuplicateSelectedScalarToVector128(test._fld, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.DuplicateSelectedScalarToVector128(
AdvSimd.LoadVector128((UInt32*)(&test._fld)),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<UInt32> firstOp, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray = new UInt32[Op1ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray[0]), firstOp);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(void* firstOp, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray = new UInt32[Op1ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(firstOp), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(UInt32[] firstOp, UInt32[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (firstOp[Imm] != result[i])
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd)}.{nameof(AdvSimd.DuplicateSelectedScalarToVector128)}<UInt32>(Vector128<UInt32>, 2): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/jit64/regress/vsw/517867/test.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
public class Test_test
{
private int _counter = 0;
public int func(int type)
{
int rc;
try
{
}
finally
{
switch (type)
{
case 1:
rc = foo();
break;
case 2:
case 4:
break;
case 56:
case 54:
rc = foo();
break;
case 5:
case 53:
break;
default:
break;
}
rc = foo();
}
return rc;
}
public int foo()
{
return _counter++;
}
public static int Main()
{
Test_test obj = new Test_test();
int val = obj.func(1);
if (val == 1)
{
System.Console.WriteLine("PASSED");
return 100;
}
System.Console.WriteLine("FAILED");
return 1;
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
public class Test_test
{
private int _counter = 0;
public int func(int type)
{
int rc;
try
{
}
finally
{
switch (type)
{
case 1:
rc = foo();
break;
case 2:
case 4:
break;
case 56:
case 54:
rc = foo();
break;
case 5:
case 53:
break;
default:
break;
}
rc = foo();
}
return rc;
}
public int foo()
{
return _counter++;
}
public static int Main()
{
Test_test obj = new Test_test();
int val = obj.func(1);
if (val == 1)
{
System.Console.WriteLine("PASSED");
return 100;
}
System.Console.WriteLine("FAILED");
return 1;
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/Loader/classloader/v1/Beta1/Layout/Matrix/cs/L-2-12-3D.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
public interface A{
//////////////////////////////
// Instance Methods
int MethPubInst();
}
public interface B{
int MethPubInst2();
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
public interface A{
//////////////////////////////
// Instance Methods
int MethPubInst();
}
public interface B{
int MethPubInst2();
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/Microsoft.Extensions.Options/tests/TrimmingTests/ConfigureTests.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using System;
class Program
{
static int Main(string[] args)
{
ServiceCollection services = new ServiceCollection();
services.Configure<OptionsA>(o =>
{
o.OptionValue = 99;
});
services.ConfigureOptions<OptionsAPostConfigure>();
services.AddOptions<OptionsB>()
.Configure<IOptions<OptionsA>>((b, a) =>
{
b.OptionString = a.Value.OptionValue.ToString();
});
ServiceProvider provider = services.BuildServiceProvider();
OptionsA optionsA = provider.GetService<IOptions<OptionsA>>().Value;
OptionsB optionsB = provider.GetService<IOptionsMonitor<OptionsB>>().CurrentValue;
OptionsC optionsC = provider.GetService<IOptions<OptionsC>>().Value;
OptionsD optionsD = provider.GetService<IOptionsFactory<OptionsD>>().Create(string.Empty);
if (optionsA.OptionValue != 99 ||
optionsA.PostConfigureOption != 101 ||
optionsB.OptionString != "99" ||
optionsC is null ||
optionsD is null)
{
return -1;
}
return 100;
}
private class OptionsA
{
public int OptionValue { get; set; }
public int PostConfigureOption { get; set; }
}
private class OptionsAPostConfigure : IPostConfigureOptions<OptionsA>
{
public void PostConfigure(string name, OptionsA options)
{
if (name.Length != 0)
{
throw new ArgumentException("name must be empty", nameof(name));
}
options.PostConfigureOption = 101;
}
}
private class OptionsB
{
public string OptionString { get; set; }
}
// Note: OptionsC is never configured
private class OptionsC
{
public string OptionString { get; set; }
}
// Note: OptionsD is never configured
private class OptionsD
{
public string OptionString { get; set; }
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using System;
class Program
{
static int Main(string[] args)
{
ServiceCollection services = new ServiceCollection();
services.Configure<OptionsA>(o =>
{
o.OptionValue = 99;
});
services.ConfigureOptions<OptionsAPostConfigure>();
services.AddOptions<OptionsB>()
.Configure<IOptions<OptionsA>>((b, a) =>
{
b.OptionString = a.Value.OptionValue.ToString();
});
ServiceProvider provider = services.BuildServiceProvider();
OptionsA optionsA = provider.GetService<IOptions<OptionsA>>().Value;
OptionsB optionsB = provider.GetService<IOptionsMonitor<OptionsB>>().CurrentValue;
OptionsC optionsC = provider.GetService<IOptions<OptionsC>>().Value;
OptionsD optionsD = provider.GetService<IOptionsFactory<OptionsD>>().Create(string.Empty);
if (optionsA.OptionValue != 99 ||
optionsA.PostConfigureOption != 101 ||
optionsB.OptionString != "99" ||
optionsC is null ||
optionsD is null)
{
return -1;
}
return 100;
}
private class OptionsA
{
public int OptionValue { get; set; }
public int PostConfigureOption { get; set; }
}
private class OptionsAPostConfigure : IPostConfigureOptions<OptionsA>
{
public void PostConfigure(string name, OptionsA options)
{
if (name.Length != 0)
{
throw new ArgumentException("name must be empty", nameof(name));
}
options.PostConfigureOption = 101;
}
}
private class OptionsB
{
public string OptionString { get; set; }
}
// Note: OptionsC is never configured
private class OptionsC
{
public string OptionString { get; set; }
}
// Note: OptionsD is never configured
private class OptionsD
{
public string OptionString { get; set; }
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/Arm/AdvSimd.Arm64/TransposeOdd.Vector64.Single.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void TransposeOdd_Vector64_Single()
{
var test = new SimpleBinaryOpTest__TransposeOdd_Vector64_Single();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__TransposeOdd_Vector64_Single
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Single[] inArray1, Single[] inArray2, Single[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Single>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Single>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Single>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Single, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Single, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector64<Single> _fld1;
public Vector64<Single> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref testStruct._fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref testStruct._fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__TransposeOdd_Vector64_Single testClass)
{
var result = AdvSimd.Arm64.TransposeOdd(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__TransposeOdd_Vector64_Single testClass)
{
fixed (Vector64<Single>* pFld1 = &_fld1)
fixed (Vector64<Single>* pFld2 = &_fld2)
{
var result = AdvSimd.Arm64.TransposeOdd(
AdvSimd.LoadVector64((Single*)(pFld1)),
AdvSimd.LoadVector64((Single*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 8;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single);
private static Single[] _data1 = new Single[Op1ElementCount];
private static Single[] _data2 = new Single[Op2ElementCount];
private static Vector64<Single> _clsVar1;
private static Vector64<Single> _clsVar2;
private Vector64<Single> _fld1;
private Vector64<Single> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__TransposeOdd_Vector64_Single()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _clsVar2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
}
public SimpleBinaryOpTest__TransposeOdd_Vector64_Single()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
_dataTable = new DataTable(_data1, _data2, new Single[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.Arm64.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.Arm64.TransposeOdd(
Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Single>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.Arm64.TransposeOdd(
AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector64((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd.Arm64).GetMethod(nameof(AdvSimd.Arm64.TransposeOdd), new Type[] { typeof(Vector64<Single>), typeof(Vector64<Single>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Single>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd.Arm64).GetMethod(nameof(AdvSimd.Arm64.TransposeOdd), new Type[] { typeof(Vector64<Single>), typeof(Vector64<Single>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector64((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.Arm64.TransposeOdd(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector64<Single>* pClsVar1 = &_clsVar1)
fixed (Vector64<Single>* pClsVar2 = &_clsVar2)
{
var result = AdvSimd.Arm64.TransposeOdd(
AdvSimd.LoadVector64((Single*)(pClsVar1)),
AdvSimd.LoadVector64((Single*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector64<Single>>(_dataTable.inArray2Ptr);
var result = AdvSimd.Arm64.TransposeOdd(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr));
var op2 = AdvSimd.LoadVector64((Single*)(_dataTable.inArray2Ptr));
var result = AdvSimd.Arm64.TransposeOdd(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__TransposeOdd_Vector64_Single();
var result = AdvSimd.Arm64.TransposeOdd(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__TransposeOdd_Vector64_Single();
fixed (Vector64<Single>* pFld1 = &test._fld1)
fixed (Vector64<Single>* pFld2 = &test._fld2)
{
var result = AdvSimd.Arm64.TransposeOdd(
AdvSimd.LoadVector64((Single*)(pFld1)),
AdvSimd.LoadVector64((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.Arm64.TransposeOdd(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector64<Single>* pFld1 = &_fld1)
fixed (Vector64<Single>* pFld2 = &_fld2)
{
var result = AdvSimd.Arm64.TransposeOdd(
AdvSimd.LoadVector64((Single*)(pFld1)),
AdvSimd.LoadVector64((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.Arm64.TransposeOdd(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.Arm64.TransposeOdd(
AdvSimd.LoadVector64((Single*)(&test._fld1)),
AdvSimd.LoadVector64((Single*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector64<Single> op1, Vector64<Single> op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector64<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector64<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Single[] left, Single[] right, Single[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
int index = 0;
int half = RetElementCount / 2;
for (var i = 0; i < RetElementCount; i+=2, index++)
{
if (result[index] != left[i+1] || result[++index] != right[i+1])
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd.Arm64)}.{nameof(AdvSimd.Arm64.TransposeOdd)}<Single>(Vector64<Single>, Vector64<Single>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void TransposeOdd_Vector64_Single()
{
var test = new SimpleBinaryOpTest__TransposeOdd_Vector64_Single();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__TransposeOdd_Vector64_Single
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Single[] inArray1, Single[] inArray2, Single[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Single>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Single>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Single>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Single, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Single, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector64<Single> _fld1;
public Vector64<Single> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref testStruct._fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref testStruct._fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__TransposeOdd_Vector64_Single testClass)
{
var result = AdvSimd.Arm64.TransposeOdd(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__TransposeOdd_Vector64_Single testClass)
{
fixed (Vector64<Single>* pFld1 = &_fld1)
fixed (Vector64<Single>* pFld2 = &_fld2)
{
var result = AdvSimd.Arm64.TransposeOdd(
AdvSimd.LoadVector64((Single*)(pFld1)),
AdvSimd.LoadVector64((Single*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 8;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single);
private static Single[] _data1 = new Single[Op1ElementCount];
private static Single[] _data2 = new Single[Op2ElementCount];
private static Vector64<Single> _clsVar1;
private static Vector64<Single> _clsVar2;
private Vector64<Single> _fld1;
private Vector64<Single> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__TransposeOdd_Vector64_Single()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _clsVar2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
}
public SimpleBinaryOpTest__TransposeOdd_Vector64_Single()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Single>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
_dataTable = new DataTable(_data1, _data2, new Single[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.Arm64.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.Arm64.TransposeOdd(
Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Single>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.Arm64.TransposeOdd(
AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector64((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd.Arm64).GetMethod(nameof(AdvSimd.Arm64.TransposeOdd), new Type[] { typeof(Vector64<Single>), typeof(Vector64<Single>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Single>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd.Arm64).GetMethod(nameof(AdvSimd.Arm64.TransposeOdd), new Type[] { typeof(Vector64<Single>), typeof(Vector64<Single>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector64((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.Arm64.TransposeOdd(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector64<Single>* pClsVar1 = &_clsVar1)
fixed (Vector64<Single>* pClsVar2 = &_clsVar2)
{
var result = AdvSimd.Arm64.TransposeOdd(
AdvSimd.LoadVector64((Single*)(pClsVar1)),
AdvSimd.LoadVector64((Single*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector64<Single>>(_dataTable.inArray2Ptr);
var result = AdvSimd.Arm64.TransposeOdd(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr));
var op2 = AdvSimd.LoadVector64((Single*)(_dataTable.inArray2Ptr));
var result = AdvSimd.Arm64.TransposeOdd(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__TransposeOdd_Vector64_Single();
var result = AdvSimd.Arm64.TransposeOdd(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__TransposeOdd_Vector64_Single();
fixed (Vector64<Single>* pFld1 = &test._fld1)
fixed (Vector64<Single>* pFld2 = &test._fld2)
{
var result = AdvSimd.Arm64.TransposeOdd(
AdvSimd.LoadVector64((Single*)(pFld1)),
AdvSimd.LoadVector64((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.Arm64.TransposeOdd(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector64<Single>* pFld1 = &_fld1)
fixed (Vector64<Single>* pFld2 = &_fld2)
{
var result = AdvSimd.Arm64.TransposeOdd(
AdvSimd.LoadVector64((Single*)(pFld1)),
AdvSimd.LoadVector64((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.Arm64.TransposeOdd(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.Arm64.TransposeOdd(
AdvSimd.LoadVector64((Single*)(&test._fld1)),
AdvSimd.LoadVector64((Single*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector64<Single> op1, Vector64<Single> op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector64<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector64<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Single[] left, Single[] right, Single[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
int index = 0;
int half = RetElementCount / 2;
for (var i = 0; i < RetElementCount; i+=2, index++)
{
if (result[index] != left[i+1] || result[++index] != right[i+1])
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd.Arm64)}.{nameof(AdvSimd.Arm64.TransposeOdd)}<Single>(Vector64<Single>, Vector64<Single>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.ServiceModel.Syndication/tests/System/ServiceModel/Syndication/AtomPub10CategoriesDocumentFormatterTests.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.IO;
using System.Runtime.Serialization;
using System.Xml;
using System.Xml.Serialization;
using Xunit;
namespace System.ServiceModel.Syndication.Tests
{
public class AtomPub10CategoriesDocumentFormatterTests
{
[Fact]
public void Ctor_Default()
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Null(formatter.Document);
Assert.Equal("http://www.w3.org/2007/app", formatter.Version);
}
[Fact]
public void Ctor_CategoriesDocument_Inline()
{
var document = new InlineCategoriesDocument();
var formatter = new AtomPub10CategoriesDocumentFormatter(document);
Assert.Same(document, formatter.Document);
Assert.Equal("http://www.w3.org/2007/app", formatter.Version);
}
[Fact]
public void Ctor_CategoriesDocument_Referenced()
{
var document = new ReferencedCategoriesDocument();
var formatter = new AtomPub10CategoriesDocumentFormatter(document);
Assert.Same(document, formatter.Document);
Assert.Equal("http://www.w3.org/2007/app", formatter.Version);
}
[Fact]
public void Ctor_NullDocumentToWrite_ThrowsArgumentNullException()
{
AssertExtensions.Throws<ArgumentNullException>("documentToWrite", () => new AtomPub10CategoriesDocumentFormatter(null));
}
[Theory]
[InlineData(typeof(InlineCategoriesDocument), typeof(ReferencedCategoriesDocument))]
[InlineData(typeof(InlineCategoriesDocumentSubclass), typeof(ReferencedCategoriesDocumentSubclass))]
public void Ctor_Type_Type(Type inlineDocumentType, Type referencedDocumentType)
{
var formatter = new AtomPub10CategoriesDocumentFormatter(inlineDocumentType, referencedDocumentType);
Assert.Null(formatter.Document);
Assert.Equal("http://www.w3.org/2007/app", formatter.Version);
}
[Fact]
public void Ctor_NullInlineDocumentType_ThrowsArgumentNullException()
{
AssertExtensions.Throws<ArgumentNullException>("inlineDocumentType", () => new AtomPub10CategoriesDocumentFormatter(null, typeof(ReferencedCategoriesDocument)));
}
[Fact]
public void Ctor_InvlaidInlineDocumentType_ThrowsArgumentException()
{
AssertExtensions.Throws<ArgumentException>("inlineDocumentType", () => new AtomPub10CategoriesDocumentFormatter(typeof(int), typeof(ReferencedCategoriesDocument)));
}
[Fact]
public void Ctor_NullReferencedDocumentType_ThrowsArgumentNullException()
{
AssertExtensions.Throws<ArgumentNullException>("referencedDocumentType", () => new AtomPub10CategoriesDocumentFormatter(typeof(InlineCategoriesDocument), null));
}
[Fact]
public void Ctor_InvalidReferencedDocumentType_ThrowsArgumentException()
{
AssertExtensions.Throws<ArgumentException>("referencedDocumentType", () => new AtomPub10CategoriesDocumentFormatter(typeof(InlineCategoriesDocument), typeof(int)));
}
[Fact]
public void GetSchema_Invoke_ReturnsNull()
{
IXmlSerializable formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Null(formatter.GetSchema());
}
public static IEnumerable<object[]> WriteTo_TestData()
{
// Empty InlineCategoriesDocument.
yield return new object[]
{
new InlineCategoriesDocument(),
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xmlns:app=""http://www.w3.org/2007/app"" />"
};
// Full InlineCategoriesDocument
var fullSyndicationCategory = new SyndicationCategory("category_name", "category_scheme", "category_label");
fullSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("category_name1"), null);
fullSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("category_name2", "category_namespace"), "");
fullSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("category_name3", "category_namespace"), "category_value");
fullSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("category_name4", "xmlns"), "");
fullSyndicationCategory.ElementExtensions.Add(new ExtensionObject { Value = 10 });
var fullInlineCategoriesDocument = new InlineCategoriesDocument(new SyndicationCategory[]
{
new SyndicationCategory(),
fullSyndicationCategory
})
{
BaseUri = new Uri("http://inlinecategories_url.com"),
Language = "inlinecategories_Language",
IsFixed = true,
Scheme = "inlinecategories_scheme"
};
fullInlineCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("inlinecategories_name1"), null);
fullInlineCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("inlinecategories_name2", "inlinecategories_namespace"), "");
fullInlineCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("inlinecategories_name3", "inlinecategories_namespace"), "inlinecategories_value");
fullInlineCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("inlinecategories_name4", "xmlns"), "");
fullInlineCategoriesDocument.ElementExtensions.Add(new ExtensionObject { Value = 10 });
yield return new object[]
{
fullInlineCategoriesDocument,
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xml:base=""http://inlinecategories_url.com/"" xml:lang=""inlinecategories_Language"" scheme=""inlinecategories_scheme"" fixed=""yes"" inlinecategories_name1="""" d1p1:inlinecategories_name2="""" d1p1:inlinecategories_name3=""inlinecategories_value"" d1p2:inlinecategories_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""inlinecategories_namespace"" xmlns:app=""http://www.w3.org/2007/app"">
<a10:category term="""" />
<a10:category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</a10:category>
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</app:categories>"
};
// Empty ReferencedCategoriesDocument.
yield return new object[]
{
new ReferencedCategoriesDocument(),
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xmlns:app=""http://www.w3.org/2007/app"" />"
};
// Full ReferencedCategoriesDocument.
var fullReferenceCategoriesDocument = new ReferencedCategoriesDocument(new Uri("http://referencecategories_link.com"))
{
BaseUri = new Uri("http://referencecategories_url.com"),
Language = "referencecategories_language"
};
fullReferenceCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("referencecategories_name1"), null);
fullReferenceCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("referencecategories_name2", "referencecategories_namespace"), "");
fullReferenceCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("referencecategories_name3", "referencecategories_namespace"), "referencecategories_value");
fullReferenceCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("referencecategories_name4", "xmlns"), "");
fullReferenceCategoriesDocument.ElementExtensions.Add(new ExtensionObject { Value = 10 });
yield return new object[]
{
fullReferenceCategoriesDocument,
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xml:base=""http://referencecategories_url.com/"" xml:lang=""referencecategories_language"" href=""http://referencecategories_link.com/"" referencecategories_name1="""" d1p1:referencecategories_name2="""" d1p1:referencecategories_name3=""referencecategories_value"" d1p2:referencecategories_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""referencecategories_namespace"" xmlns:app=""http://www.w3.org/2007/app"">
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</app:categories>"
};
}
[Theory]
[MemberData(nameof(WriteTo_TestData))]
public void WriteTo_HasDocument_SerializesExpected(CategoriesDocument document, string expected)
{
var formatter = new AtomPub10CategoriesDocumentFormatter(document);
CompareHelper.AssertEqualWriteOutput(expected, writer => formatter.WriteTo(writer));
CompareHelper.AssertEqualWriteOutput(expected, writer => document.Save(writer));
CompareHelper.AssertEqualWriteOutput(expected, writer =>
{
writer.WriteStartElement("app", "categories", "http://www.w3.org/2007/app");
((IXmlSerializable)formatter).WriteXml(writer);
writer.WriteEndElement();
});
}
[Fact]
public void WriteTo_NullWriter_ThrowsArgumentNullException()
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
AssertExtensions.Throws<ArgumentNullException>("writer", () => formatter.WriteTo(null));
}
[Fact]
public void WriteTo_NoDocument_ThrowsInvalidOperationException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Throws<InvalidOperationException>(() => formatter.WriteTo(writer));
}
}
[Fact]
public void WriteXml_NullWriter_ThrowsArgumentNullException()
{
IXmlSerializable formatter = new AtomPub10CategoriesDocumentFormatter();
AssertExtensions.Throws<ArgumentNullException>("writer", () => formatter.WriteXml(null));
}
[Fact]
public void WriteXml_NoDocument_ThrowsInvalidOperationException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
IXmlSerializable formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Throws<InvalidOperationException>(() => formatter.WriteXml(writer));
}
}
public static IEnumerable<object[]> CanRead_TestData()
{
yield return new object[] { @"<categories />", false };
yield return new object[] { @"<app:different xmlns:app=""http://www.w3.org/2007/app"">", false };
yield return new object[] { @"<app:categories xmlns:app=""http://www.w3.org/2007/app"" />", true };
}
[Theory]
[MemberData(nameof(CanRead_TestData))]
public void CanRead_ValidReader_ReturnsExpected(string xmlString, bool expected)
{
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Equal(expected, formatter.CanRead(reader));
}
}
[Fact]
public void CanRead_NullReader_ThrowsArgumentNullException()
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.CanRead(null));
}
[Fact]
public void ReadFrom_InlineCategoriesDocument_ReturnsExpected()
{
string xmlString =
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xml:base=""http://inlinecategories_url.com/"" xml:lang=""inlinecategories_Language"" scheme=""inlinecategories_scheme"" fixed=""yes"" inlinecategories_name1="""" d1p1:inlinecategories_name2="""" d1p1:inlinecategories_name3=""inlinecategories_value"" d1p2:inlinecategories_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""inlinecategories_namespace"" xmlns:app=""http://www.w3.org/2007/app"">
<a10:category term="""" />
<a10:category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</a10:category>
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</app:categories>";
VerifyRead<InlineCategoriesDocument>(xmlString, document =>
{
Assert.Equal(4, document.AttributeExtensions.Count);
Assert.Equal("", document.AttributeExtensions[new XmlQualifiedName("inlinecategories_name1")]);
Assert.Equal("", document.AttributeExtensions[new XmlQualifiedName("inlinecategories_name2", "inlinecategories_namespace")]);
Assert.Equal("inlinecategories_value", document.AttributeExtensions[new XmlQualifiedName("inlinecategories_name3", "inlinecategories_namespace")]);
Assert.Equal("", document.AttributeExtensions[new XmlQualifiedName("inlinecategories_name4", "xmlns")]);
Assert.Equal(new Uri("http://inlinecategories_url.com/"), document.BaseUri);
Assert.Equal(2, document.Categories.Count);
Assert.Equal(1, document.ElementExtensions.Count);
Assert.Equal(10, document.ElementExtensions[0].GetObject<ExtensionObject>().Value);
Assert.True(document.IsFixed);
Assert.Equal("inlinecategories_Language", document.Language);
Assert.Equal("inlinecategories_scheme", document.Scheme);
SyndicationCategory firstCategory = document.Categories[0];
Assert.Empty(firstCategory.AttributeExtensions);
Assert.Empty(firstCategory.ElementExtensions);
Assert.Empty(firstCategory.Name);
Assert.Equal("inlinecategories_scheme", firstCategory.Scheme);
Assert.Null(firstCategory.Label);
SyndicationCategory secondCategory = document.Categories[1];
Assert.Equal(4, secondCategory.AttributeExtensions.Count);
Assert.Equal("", secondCategory.AttributeExtensions[new XmlQualifiedName("category_name1")]);
Assert.Equal("", secondCategory.AttributeExtensions[new XmlQualifiedName("category_name2", "category_namespace")]);
Assert.Equal("category_value", secondCategory.AttributeExtensions[new XmlQualifiedName("category_name3", "category_namespace")]);
Assert.Equal("", secondCategory.AttributeExtensions[new XmlQualifiedName("category_name4", "xmlns")]);
Assert.Equal(1, secondCategory.ElementExtensions.Count);
Assert.Equal(10, secondCategory.ElementExtensions[0].GetObject<ExtensionObject>().Value);
Assert.Equal("category_name", secondCategory.Name);
Assert.Equal("category_scheme", secondCategory.Scheme);
Assert.Equal("category_label", secondCategory.Label);
});
}
[Fact]
public void Read_InlineCategoriesDocumentTryParseTrue_ReturnsExpected()
{
using (var stringReader = new StringReader(
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xml:base=""http://inlinecategories_url.com/"" xml:lang=""inlinecategories_Language"" scheme=""inlinecategories_scheme"" fixed=""yes"" inlinecategories_name1="""" d1p1:inlinecategories_name2="""" d1p1:inlinecategories_name3=""inlinecategories_value"" d1p2:inlinecategories_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""inlinecategories_namespace"" xmlns:app=""http://www.w3.org/2007/app"">
<a10:category term="""" />
<a10:category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</a10:category>
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</app:categories>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new AtomPub10CategoriesDocumentFormatter(typeof(InlineCategoriesDocumentTryParseTrueSubclass), typeof(ReferencedCategoriesDocumentTryParseTrueSubclass));
formatter.ReadFrom(reader);
InlineCategoriesDocumentTryParseTrueSubclass document = Assert.IsType<InlineCategoriesDocumentTryParseTrueSubclass>(formatter.Document);
Assert.Empty(document.AttributeExtensions);
Assert.Equal(new Uri("http://inlinecategories_url.com/"), document.BaseUri);
Assert.Equal(2, document.Categories.Count);
Assert.Empty(document.ElementExtensions);
Assert.True(document.IsFixed);
Assert.Equal("inlinecategories_Language", document.Language);
Assert.Equal("inlinecategories_scheme", document.Scheme);
SyndicationCategory firstCategory = document.Categories[0];
Assert.Empty(firstCategory.AttributeExtensions);
Assert.Empty(firstCategory.ElementExtensions);
Assert.Empty(firstCategory.Name);
Assert.Equal("inlinecategories_scheme", firstCategory.Scheme);
Assert.Null(firstCategory.Label);
SyndicationCategory secondCategory = document.Categories[1];
Assert.Empty(secondCategory.AttributeExtensions);
Assert.Empty(secondCategory.ElementExtensions);
Assert.Equal("category_name", secondCategory.Name);
Assert.Equal("category_scheme", secondCategory.Scheme);
Assert.Equal("category_label", secondCategory.Label);
}
}
[Fact]
public void ReadFrom_ReferencedCategoriesDocument_ReturnsExpected()
{
string xmlString =
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xml:base=""http://referencecategories_url.com/"" xml:lang=""referencecategories_language"" href=""http://referencecategories_link.com/"" referencecategories_name1="""" d1p1:referencecategories_name2="""" d1p1:referencecategories_name3=""referencecategories_value"" d1p2:referencecategories_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""referencecategories_namespace"" xmlns:app=""http://www.w3.org/2007/app"">
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</app:categories>";
VerifyRead<ReferencedCategoriesDocument>(xmlString, document =>
{
Assert.Equal(4, document.AttributeExtensions.Count);
Assert.Equal("", document.AttributeExtensions[new XmlQualifiedName("referencecategories_name1")]);
Assert.Equal("", document.AttributeExtensions[new XmlQualifiedName("referencecategories_name2", "referencecategories_namespace")]);
Assert.Equal("referencecategories_value", document.AttributeExtensions[new XmlQualifiedName("referencecategories_name3", "referencecategories_namespace")]);
Assert.Equal("", document.AttributeExtensions[new XmlQualifiedName("referencecategories_name4", "xmlns")]);
Assert.Equal(new Uri("http://referencecategories_url.com/"), document.BaseUri);
Assert.Equal(1, document.ElementExtensions.Count);
Assert.Equal(10, document.ElementExtensions[0].GetObject<ExtensionObject>().Value);
Assert.Equal("referencecategories_language", document.Language);
Assert.Equal(new Uri("http://referencecategories_link.com"), document.Link);
});
}
[Fact]
public void ReadFrom_ReferencedCategoriesDocumentTryParseTrue_ReturnsExpected()
{
using (var stringReader = new StringReader(
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xml:base=""http://referencecategories_url.com/"" xml:lang=""referencecategories_language"" href=""http://referencecategories_link.com/"" referencecategories_name1="""" d1p1:referencecategories_name2="""" d1p1:referencecategories_name3=""referencecategories_value"" d1p2:referencecategories_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""referencecategories_namespace"" xmlns:app=""http://www.w3.org/2007/app"">
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</app:categories>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new AtomPub10CategoriesDocumentFormatter(typeof(InlineCategoriesDocumentTryParseTrueSubclass), typeof(ReferencedCategoriesDocumentTryParseTrueSubclass));
formatter.ReadFrom(reader);
ReferencedCategoriesDocumentTryParseTrueSubclass document = Assert.IsType<ReferencedCategoriesDocumentTryParseTrueSubclass>(formatter.Document);
Assert.Empty(document.AttributeExtensions);
Assert.Equal(new Uri("http://referencecategories_url.com/"), document.BaseUri);
Assert.Empty(document.ElementExtensions);
Assert.Equal("referencecategories_language", document.Language);
Assert.Equal(new Uri("http://referencecategories_link.com"), document.Link);
}
}
[Fact]
public void ReadFrom_EmptyCategory_ReturnsExpected()
{
VerifyRead<InlineCategoriesDocument>(@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xmlns:app=""http://www.w3.org/2007/app"" />", document =>
{
Assert.Empty(document.AttributeExtensions);
Assert.Null(document.BaseUri);
Assert.Empty(document.Categories);
Assert.Empty(document.ElementExtensions);
Assert.False(document.IsFixed);
Assert.Null(document.Language);
Assert.Null(document.Scheme);
});
}
private static void VerifyRead<T>(string xmlString, Action<T> verifyAction) where T : CategoriesDocument
{
// ReadFrom.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
formatter.ReadFrom(reader);
T document = Assert.IsType<T>(formatter.Document);
verifyAction(document);
}
// ReadFrom with custom subclass.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new AtomPub10CategoriesDocumentFormatter(typeof(InlineCategoriesDocumentSubclass), typeof(ReferencedCategoriesDocumentSubclass));
formatter.ReadFrom(reader);
if (typeof(T) == typeof(InlineCategoriesDocument))
{
InlineCategoriesDocumentSubclass document = Assert.IsType<InlineCategoriesDocumentSubclass>(formatter.Document);
verifyAction(document as T);
}
else
{
ReferencedCategoriesDocumentSubclass document = Assert.IsType<ReferencedCategoriesDocumentSubclass>(formatter.Document);
verifyAction(document as T);
}
}
// ReadXml.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new AtomPub10CategoriesDocumentFormatter();
((IXmlSerializable)formatter).ReadXml(reader);
T document = Assert.IsType<T>(formatter.Document);
verifyAction(document);
}
// ReadXml with custom subclass.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new AtomPub10CategoriesDocumentFormatter(typeof(InlineCategoriesDocumentSubclass), typeof(ReferencedCategoriesDocumentSubclass));
((IXmlSerializable)formatter).ReadXml(reader);
if (typeof(T) == typeof(InlineCategoriesDocument))
{
InlineCategoriesDocumentSubclass document = Assert.IsType<InlineCategoriesDocumentSubclass>(formatter.Document);
verifyAction(document as T);
}
else
{
ReferencedCategoriesDocumentSubclass document = Assert.IsType<ReferencedCategoriesDocumentSubclass>(formatter.Document);
verifyAction(document as T);
}
}
// Load.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
T document = Assert.IsType<T>(CategoriesDocument.Load(reader));
verifyAction(document);
}
}
[Fact]
public void ReadFrom_NullReader_ThrowsArgumentNullException()
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.ReadFrom(null));
}
[Theory]
[InlineData(@"<app:different xmlns:a10=""http://www.w3.org/2005/Atom"" xmlns:app=""http://www.w3.org/2007/app"" />")]
[InlineData(@"<categories xmlns:a10=""http://www.w3.org/2005/Atom"" xmlns:app=""http://www.w3.org/2007/app"" />")]
public void ReadFrom_CantRead_ThrowsXmlException(string xmlString)
{
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Throws<XmlException>(() => formatter.ReadFrom(reader));
}
}
[Fact]
public void ReadXml_ThrowsArgumentException_RethrowsAsXmlException()
{
var reader = new ThrowingXmlReader(new ArgumentException());
IXmlSerializable formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Throws<XmlException>(() => formatter.ReadXml(reader));
}
[Fact]
public void ReadXml_ThrowsFormatException_RethrowsAsXmlException()
{
var reader = new ThrowingXmlReader(new FormatException());
IXmlSerializable formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Throws<XmlException>(() => formatter.ReadXml(reader));
}
[Fact]
public void ReadXml_NullReader_ThrowsArgumentNullException()
{
IXmlSerializable formatter = new AtomPub10CategoriesDocumentFormatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.ReadXml(null));
}
[Fact]
public void CreateInlineCategoriesDocument_NonGeneric_Success()
{
var formatter = new Formatter();
InlineCategoriesDocument document = Assert.IsType<InlineCategoriesDocument>(formatter.CreateInlineCategoriesDocumentEntryPoint());
Assert.Empty(document.AttributeExtensions);
Assert.Null(document.BaseUri);
Assert.Empty(document.Categories);
Assert.Empty(document.ElementExtensions);
Assert.False(document.IsFixed);
Assert.Null(document.Language);
Assert.Null(document.Scheme);
var typedFormatter = new Formatter(typeof(InlineCategoriesDocumentSubclass), typeof(ReferencedCategoriesDocumentSubclass));
document = Assert.IsType<InlineCategoriesDocumentSubclass>(typedFormatter.CreateInlineCategoriesDocumentEntryPoint());
Assert.Empty(document.AttributeExtensions);
Assert.Null(document.BaseUri);
Assert.Empty(document.Categories);
Assert.Empty(document.ElementExtensions);
Assert.False(document.IsFixed);
Assert.Null(document.Language);
Assert.Null(document.Scheme);
}
[Fact]
public void CreateReferencedCategoriesDocument_NonGeneric_Success()
{
var formatter = new Formatter();
ReferencedCategoriesDocument document = Assert.IsType<ReferencedCategoriesDocument>(formatter.CreateReferencedCategoriesDocumentEntryPoint());
Assert.Empty(document.AttributeExtensions);
Assert.Null(document.BaseUri);
Assert.Empty(document.ElementExtensions);
Assert.Null(document.Language);
Assert.Null(document.Link);
var typedFormatter = new Formatter(typeof(InlineCategoriesDocumentSubclass), typeof(ReferencedCategoriesDocumentSubclass));
document = Assert.IsType<ReferencedCategoriesDocumentSubclass>(typedFormatter.CreateReferencedCategoriesDocumentEntryPoint());
Assert.Empty(document.AttributeExtensions);
Assert.Null(document.BaseUri);
Assert.Empty(document.ElementExtensions);
Assert.Null(document.Language);
Assert.Null(document.Link);
}
public class InlineCategoriesDocumentSubclass : InlineCategoriesDocument { }
public class ReferencedCategoriesDocumentSubclass : ReferencedCategoriesDocument { }
public class InlineCategoriesDocumentTryParseTrueSubclass : InlineCategoriesDocument
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
protected override SyndicationCategory CreateCategory() => new SyndicationCategoryTryParseTrueSubclass();
}
public class SyndicationCategoryTryParseTrueSubclass : SyndicationCategory
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
}
public class ReferencedCategoriesDocumentTryParseTrueSubclass : ReferencedCategoriesDocument
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
}
public class Formatter : AtomPub10CategoriesDocumentFormatter
{
public Formatter() : base() { }
public Formatter(CategoriesDocument documentToWrite) : base(documentToWrite) { }
public Formatter(Type inlineDocumentType, Type referencedDocumentType) : base(inlineDocumentType, referencedDocumentType) { }
public InlineCategoriesDocument CreateInlineCategoriesDocumentEntryPoint() => CreateInlineCategoriesDocument();
public ReferencedCategoriesDocument CreateReferencedCategoriesDocumentEntryPoint() => CreateReferencedCategoriesDocument();
}
[DataContract]
public class ExtensionObject
{
[DataMember]
public int Value { get; set; }
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.IO;
using System.Runtime.Serialization;
using System.Xml;
using System.Xml.Serialization;
using Xunit;
namespace System.ServiceModel.Syndication.Tests
{
public class AtomPub10CategoriesDocumentFormatterTests
{
[Fact]
public void Ctor_Default()
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Null(formatter.Document);
Assert.Equal("http://www.w3.org/2007/app", formatter.Version);
}
[Fact]
public void Ctor_CategoriesDocument_Inline()
{
var document = new InlineCategoriesDocument();
var formatter = new AtomPub10CategoriesDocumentFormatter(document);
Assert.Same(document, formatter.Document);
Assert.Equal("http://www.w3.org/2007/app", formatter.Version);
}
[Fact]
public void Ctor_CategoriesDocument_Referenced()
{
var document = new ReferencedCategoriesDocument();
var formatter = new AtomPub10CategoriesDocumentFormatter(document);
Assert.Same(document, formatter.Document);
Assert.Equal("http://www.w3.org/2007/app", formatter.Version);
}
[Fact]
public void Ctor_NullDocumentToWrite_ThrowsArgumentNullException()
{
AssertExtensions.Throws<ArgumentNullException>("documentToWrite", () => new AtomPub10CategoriesDocumentFormatter(null));
}
[Theory]
[InlineData(typeof(InlineCategoriesDocument), typeof(ReferencedCategoriesDocument))]
[InlineData(typeof(InlineCategoriesDocumentSubclass), typeof(ReferencedCategoriesDocumentSubclass))]
public void Ctor_Type_Type(Type inlineDocumentType, Type referencedDocumentType)
{
var formatter = new AtomPub10CategoriesDocumentFormatter(inlineDocumentType, referencedDocumentType);
Assert.Null(formatter.Document);
Assert.Equal("http://www.w3.org/2007/app", formatter.Version);
}
[Fact]
public void Ctor_NullInlineDocumentType_ThrowsArgumentNullException()
{
AssertExtensions.Throws<ArgumentNullException>("inlineDocumentType", () => new AtomPub10CategoriesDocumentFormatter(null, typeof(ReferencedCategoriesDocument)));
}
[Fact]
public void Ctor_InvlaidInlineDocumentType_ThrowsArgumentException()
{
AssertExtensions.Throws<ArgumentException>("inlineDocumentType", () => new AtomPub10CategoriesDocumentFormatter(typeof(int), typeof(ReferencedCategoriesDocument)));
}
[Fact]
public void Ctor_NullReferencedDocumentType_ThrowsArgumentNullException()
{
AssertExtensions.Throws<ArgumentNullException>("referencedDocumentType", () => new AtomPub10CategoriesDocumentFormatter(typeof(InlineCategoriesDocument), null));
}
[Fact]
public void Ctor_InvalidReferencedDocumentType_ThrowsArgumentException()
{
AssertExtensions.Throws<ArgumentException>("referencedDocumentType", () => new AtomPub10CategoriesDocumentFormatter(typeof(InlineCategoriesDocument), typeof(int)));
}
[Fact]
public void GetSchema_Invoke_ReturnsNull()
{
IXmlSerializable formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Null(formatter.GetSchema());
}
public static IEnumerable<object[]> WriteTo_TestData()
{
// Empty InlineCategoriesDocument.
yield return new object[]
{
new InlineCategoriesDocument(),
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xmlns:app=""http://www.w3.org/2007/app"" />"
};
// Full InlineCategoriesDocument
var fullSyndicationCategory = new SyndicationCategory("category_name", "category_scheme", "category_label");
fullSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("category_name1"), null);
fullSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("category_name2", "category_namespace"), "");
fullSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("category_name3", "category_namespace"), "category_value");
fullSyndicationCategory.AttributeExtensions.Add(new XmlQualifiedName("category_name4", "xmlns"), "");
fullSyndicationCategory.ElementExtensions.Add(new ExtensionObject { Value = 10 });
var fullInlineCategoriesDocument = new InlineCategoriesDocument(new SyndicationCategory[]
{
new SyndicationCategory(),
fullSyndicationCategory
})
{
BaseUri = new Uri("http://inlinecategories_url.com"),
Language = "inlinecategories_Language",
IsFixed = true,
Scheme = "inlinecategories_scheme"
};
fullInlineCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("inlinecategories_name1"), null);
fullInlineCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("inlinecategories_name2", "inlinecategories_namespace"), "");
fullInlineCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("inlinecategories_name3", "inlinecategories_namespace"), "inlinecategories_value");
fullInlineCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("inlinecategories_name4", "xmlns"), "");
fullInlineCategoriesDocument.ElementExtensions.Add(new ExtensionObject { Value = 10 });
yield return new object[]
{
fullInlineCategoriesDocument,
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xml:base=""http://inlinecategories_url.com/"" xml:lang=""inlinecategories_Language"" scheme=""inlinecategories_scheme"" fixed=""yes"" inlinecategories_name1="""" d1p1:inlinecategories_name2="""" d1p1:inlinecategories_name3=""inlinecategories_value"" d1p2:inlinecategories_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""inlinecategories_namespace"" xmlns:app=""http://www.w3.org/2007/app"">
<a10:category term="""" />
<a10:category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</a10:category>
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</app:categories>"
};
// Empty ReferencedCategoriesDocument.
yield return new object[]
{
new ReferencedCategoriesDocument(),
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xmlns:app=""http://www.w3.org/2007/app"" />"
};
// Full ReferencedCategoriesDocument.
var fullReferenceCategoriesDocument = new ReferencedCategoriesDocument(new Uri("http://referencecategories_link.com"))
{
BaseUri = new Uri("http://referencecategories_url.com"),
Language = "referencecategories_language"
};
fullReferenceCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("referencecategories_name1"), null);
fullReferenceCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("referencecategories_name2", "referencecategories_namespace"), "");
fullReferenceCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("referencecategories_name3", "referencecategories_namespace"), "referencecategories_value");
fullReferenceCategoriesDocument.AttributeExtensions.Add(new XmlQualifiedName("referencecategories_name4", "xmlns"), "");
fullReferenceCategoriesDocument.ElementExtensions.Add(new ExtensionObject { Value = 10 });
yield return new object[]
{
fullReferenceCategoriesDocument,
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xml:base=""http://referencecategories_url.com/"" xml:lang=""referencecategories_language"" href=""http://referencecategories_link.com/"" referencecategories_name1="""" d1p1:referencecategories_name2="""" d1p1:referencecategories_name3=""referencecategories_value"" d1p2:referencecategories_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""referencecategories_namespace"" xmlns:app=""http://www.w3.org/2007/app"">
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</app:categories>"
};
}
[Theory]
[MemberData(nameof(WriteTo_TestData))]
public void WriteTo_HasDocument_SerializesExpected(CategoriesDocument document, string expected)
{
var formatter = new AtomPub10CategoriesDocumentFormatter(document);
CompareHelper.AssertEqualWriteOutput(expected, writer => formatter.WriteTo(writer));
CompareHelper.AssertEqualWriteOutput(expected, writer => document.Save(writer));
CompareHelper.AssertEqualWriteOutput(expected, writer =>
{
writer.WriteStartElement("app", "categories", "http://www.w3.org/2007/app");
((IXmlSerializable)formatter).WriteXml(writer);
writer.WriteEndElement();
});
}
[Fact]
public void WriteTo_NullWriter_ThrowsArgumentNullException()
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
AssertExtensions.Throws<ArgumentNullException>("writer", () => formatter.WriteTo(null));
}
[Fact]
public void WriteTo_NoDocument_ThrowsInvalidOperationException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Throws<InvalidOperationException>(() => formatter.WriteTo(writer));
}
}
[Fact]
public void WriteXml_NullWriter_ThrowsArgumentNullException()
{
IXmlSerializable formatter = new AtomPub10CategoriesDocumentFormatter();
AssertExtensions.Throws<ArgumentNullException>("writer", () => formatter.WriteXml(null));
}
[Fact]
public void WriteXml_NoDocument_ThrowsInvalidOperationException()
{
using (var stringWriter = new StringWriter())
using (var writer = XmlWriter.Create(stringWriter))
{
IXmlSerializable formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Throws<InvalidOperationException>(() => formatter.WriteXml(writer));
}
}
public static IEnumerable<object[]> CanRead_TestData()
{
yield return new object[] { @"<categories />", false };
yield return new object[] { @"<app:different xmlns:app=""http://www.w3.org/2007/app"">", false };
yield return new object[] { @"<app:categories xmlns:app=""http://www.w3.org/2007/app"" />", true };
}
[Theory]
[MemberData(nameof(CanRead_TestData))]
public void CanRead_ValidReader_ReturnsExpected(string xmlString, bool expected)
{
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Equal(expected, formatter.CanRead(reader));
}
}
[Fact]
public void CanRead_NullReader_ThrowsArgumentNullException()
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.CanRead(null));
}
[Fact]
public void ReadFrom_InlineCategoriesDocument_ReturnsExpected()
{
string xmlString =
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xml:base=""http://inlinecategories_url.com/"" xml:lang=""inlinecategories_Language"" scheme=""inlinecategories_scheme"" fixed=""yes"" inlinecategories_name1="""" d1p1:inlinecategories_name2="""" d1p1:inlinecategories_name3=""inlinecategories_value"" d1p2:inlinecategories_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""inlinecategories_namespace"" xmlns:app=""http://www.w3.org/2007/app"">
<a10:category term="""" />
<a10:category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</a10:category>
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</app:categories>";
VerifyRead<InlineCategoriesDocument>(xmlString, document =>
{
Assert.Equal(4, document.AttributeExtensions.Count);
Assert.Equal("", document.AttributeExtensions[new XmlQualifiedName("inlinecategories_name1")]);
Assert.Equal("", document.AttributeExtensions[new XmlQualifiedName("inlinecategories_name2", "inlinecategories_namespace")]);
Assert.Equal("inlinecategories_value", document.AttributeExtensions[new XmlQualifiedName("inlinecategories_name3", "inlinecategories_namespace")]);
Assert.Equal("", document.AttributeExtensions[new XmlQualifiedName("inlinecategories_name4", "xmlns")]);
Assert.Equal(new Uri("http://inlinecategories_url.com/"), document.BaseUri);
Assert.Equal(2, document.Categories.Count);
Assert.Equal(1, document.ElementExtensions.Count);
Assert.Equal(10, document.ElementExtensions[0].GetObject<ExtensionObject>().Value);
Assert.True(document.IsFixed);
Assert.Equal("inlinecategories_Language", document.Language);
Assert.Equal("inlinecategories_scheme", document.Scheme);
SyndicationCategory firstCategory = document.Categories[0];
Assert.Empty(firstCategory.AttributeExtensions);
Assert.Empty(firstCategory.ElementExtensions);
Assert.Empty(firstCategory.Name);
Assert.Equal("inlinecategories_scheme", firstCategory.Scheme);
Assert.Null(firstCategory.Label);
SyndicationCategory secondCategory = document.Categories[1];
Assert.Equal(4, secondCategory.AttributeExtensions.Count);
Assert.Equal("", secondCategory.AttributeExtensions[new XmlQualifiedName("category_name1")]);
Assert.Equal("", secondCategory.AttributeExtensions[new XmlQualifiedName("category_name2", "category_namespace")]);
Assert.Equal("category_value", secondCategory.AttributeExtensions[new XmlQualifiedName("category_name3", "category_namespace")]);
Assert.Equal("", secondCategory.AttributeExtensions[new XmlQualifiedName("category_name4", "xmlns")]);
Assert.Equal(1, secondCategory.ElementExtensions.Count);
Assert.Equal(10, secondCategory.ElementExtensions[0].GetObject<ExtensionObject>().Value);
Assert.Equal("category_name", secondCategory.Name);
Assert.Equal("category_scheme", secondCategory.Scheme);
Assert.Equal("category_label", secondCategory.Label);
});
}
[Fact]
public void Read_InlineCategoriesDocumentTryParseTrue_ReturnsExpected()
{
using (var stringReader = new StringReader(
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xml:base=""http://inlinecategories_url.com/"" xml:lang=""inlinecategories_Language"" scheme=""inlinecategories_scheme"" fixed=""yes"" inlinecategories_name1="""" d1p1:inlinecategories_name2="""" d1p1:inlinecategories_name3=""inlinecategories_value"" d1p2:inlinecategories_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""inlinecategories_namespace"" xmlns:app=""http://www.w3.org/2007/app"">
<a10:category term="""" />
<a10:category category_name1="""" d2p1:category_name2="""" d2p1:category_name3=""category_value"" d1p2:category_name4="""" term=""category_name"" label=""category_label"" scheme=""category_scheme"" xmlns:d2p1=""category_namespace"">
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</a10:category>
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</app:categories>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new AtomPub10CategoriesDocumentFormatter(typeof(InlineCategoriesDocumentTryParseTrueSubclass), typeof(ReferencedCategoriesDocumentTryParseTrueSubclass));
formatter.ReadFrom(reader);
InlineCategoriesDocumentTryParseTrueSubclass document = Assert.IsType<InlineCategoriesDocumentTryParseTrueSubclass>(formatter.Document);
Assert.Empty(document.AttributeExtensions);
Assert.Equal(new Uri("http://inlinecategories_url.com/"), document.BaseUri);
Assert.Equal(2, document.Categories.Count);
Assert.Empty(document.ElementExtensions);
Assert.True(document.IsFixed);
Assert.Equal("inlinecategories_Language", document.Language);
Assert.Equal("inlinecategories_scheme", document.Scheme);
SyndicationCategory firstCategory = document.Categories[0];
Assert.Empty(firstCategory.AttributeExtensions);
Assert.Empty(firstCategory.ElementExtensions);
Assert.Empty(firstCategory.Name);
Assert.Equal("inlinecategories_scheme", firstCategory.Scheme);
Assert.Null(firstCategory.Label);
SyndicationCategory secondCategory = document.Categories[1];
Assert.Empty(secondCategory.AttributeExtensions);
Assert.Empty(secondCategory.ElementExtensions);
Assert.Equal("category_name", secondCategory.Name);
Assert.Equal("category_scheme", secondCategory.Scheme);
Assert.Equal("category_label", secondCategory.Label);
}
}
[Fact]
public void ReadFrom_ReferencedCategoriesDocument_ReturnsExpected()
{
string xmlString =
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xml:base=""http://referencecategories_url.com/"" xml:lang=""referencecategories_language"" href=""http://referencecategories_link.com/"" referencecategories_name1="""" d1p1:referencecategories_name2="""" d1p1:referencecategories_name3=""referencecategories_value"" d1p2:referencecategories_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""referencecategories_namespace"" xmlns:app=""http://www.w3.org/2007/app"">
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</app:categories>";
VerifyRead<ReferencedCategoriesDocument>(xmlString, document =>
{
Assert.Equal(4, document.AttributeExtensions.Count);
Assert.Equal("", document.AttributeExtensions[new XmlQualifiedName("referencecategories_name1")]);
Assert.Equal("", document.AttributeExtensions[new XmlQualifiedName("referencecategories_name2", "referencecategories_namespace")]);
Assert.Equal("referencecategories_value", document.AttributeExtensions[new XmlQualifiedName("referencecategories_name3", "referencecategories_namespace")]);
Assert.Equal("", document.AttributeExtensions[new XmlQualifiedName("referencecategories_name4", "xmlns")]);
Assert.Equal(new Uri("http://referencecategories_url.com/"), document.BaseUri);
Assert.Equal(1, document.ElementExtensions.Count);
Assert.Equal(10, document.ElementExtensions[0].GetObject<ExtensionObject>().Value);
Assert.Equal("referencecategories_language", document.Language);
Assert.Equal(new Uri("http://referencecategories_link.com"), document.Link);
});
}
[Fact]
public void ReadFrom_ReferencedCategoriesDocumentTryParseTrue_ReturnsExpected()
{
using (var stringReader = new StringReader(
@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xml:base=""http://referencecategories_url.com/"" xml:lang=""referencecategories_language"" href=""http://referencecategories_link.com/"" referencecategories_name1="""" d1p1:referencecategories_name2="""" d1p1:referencecategories_name3=""referencecategories_value"" d1p2:referencecategories_name4="""" xmlns:d1p2=""xmlns"" xmlns:d1p1=""referencecategories_namespace"" xmlns:app=""http://www.w3.org/2007/app"">
<AtomPub10CategoriesDocumentFormatterTests.ExtensionObject xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns=""http://schemas.datacontract.org/2004/07/System.ServiceModel.Syndication.Tests"">
<Value>10</Value>
</AtomPub10CategoriesDocumentFormatterTests.ExtensionObject>
</app:categories>"))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new AtomPub10CategoriesDocumentFormatter(typeof(InlineCategoriesDocumentTryParseTrueSubclass), typeof(ReferencedCategoriesDocumentTryParseTrueSubclass));
formatter.ReadFrom(reader);
ReferencedCategoriesDocumentTryParseTrueSubclass document = Assert.IsType<ReferencedCategoriesDocumentTryParseTrueSubclass>(formatter.Document);
Assert.Empty(document.AttributeExtensions);
Assert.Equal(new Uri("http://referencecategories_url.com/"), document.BaseUri);
Assert.Empty(document.ElementExtensions);
Assert.Equal("referencecategories_language", document.Language);
Assert.Equal(new Uri("http://referencecategories_link.com"), document.Link);
}
}
[Fact]
public void ReadFrom_EmptyCategory_ReturnsExpected()
{
VerifyRead<InlineCategoriesDocument>(@"<app:categories xmlns:a10=""http://www.w3.org/2005/Atom"" xmlns:app=""http://www.w3.org/2007/app"" />", document =>
{
Assert.Empty(document.AttributeExtensions);
Assert.Null(document.BaseUri);
Assert.Empty(document.Categories);
Assert.Empty(document.ElementExtensions);
Assert.False(document.IsFixed);
Assert.Null(document.Language);
Assert.Null(document.Scheme);
});
}
private static void VerifyRead<T>(string xmlString, Action<T> verifyAction) where T : CategoriesDocument
{
// ReadFrom.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
formatter.ReadFrom(reader);
T document = Assert.IsType<T>(formatter.Document);
verifyAction(document);
}
// ReadFrom with custom subclass.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new AtomPub10CategoriesDocumentFormatter(typeof(InlineCategoriesDocumentSubclass), typeof(ReferencedCategoriesDocumentSubclass));
formatter.ReadFrom(reader);
if (typeof(T) == typeof(InlineCategoriesDocument))
{
InlineCategoriesDocumentSubclass document = Assert.IsType<InlineCategoriesDocumentSubclass>(formatter.Document);
verifyAction(document as T);
}
else
{
ReferencedCategoriesDocumentSubclass document = Assert.IsType<ReferencedCategoriesDocumentSubclass>(formatter.Document);
verifyAction(document as T);
}
}
// ReadXml.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new AtomPub10CategoriesDocumentFormatter();
((IXmlSerializable)formatter).ReadXml(reader);
T document = Assert.IsType<T>(formatter.Document);
verifyAction(document);
}
// ReadXml with custom subclass.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
reader.MoveToContent();
var formatter = new AtomPub10CategoriesDocumentFormatter(typeof(InlineCategoriesDocumentSubclass), typeof(ReferencedCategoriesDocumentSubclass));
((IXmlSerializable)formatter).ReadXml(reader);
if (typeof(T) == typeof(InlineCategoriesDocument))
{
InlineCategoriesDocumentSubclass document = Assert.IsType<InlineCategoriesDocumentSubclass>(formatter.Document);
verifyAction(document as T);
}
else
{
ReferencedCategoriesDocumentSubclass document = Assert.IsType<ReferencedCategoriesDocumentSubclass>(formatter.Document);
verifyAction(document as T);
}
}
// Load.
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
T document = Assert.IsType<T>(CategoriesDocument.Load(reader));
verifyAction(document);
}
}
[Fact]
public void ReadFrom_NullReader_ThrowsArgumentNullException()
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.ReadFrom(null));
}
[Theory]
[InlineData(@"<app:different xmlns:a10=""http://www.w3.org/2005/Atom"" xmlns:app=""http://www.w3.org/2007/app"" />")]
[InlineData(@"<categories xmlns:a10=""http://www.w3.org/2005/Atom"" xmlns:app=""http://www.w3.org/2007/app"" />")]
public void ReadFrom_CantRead_ThrowsXmlException(string xmlString)
{
using (var stringReader = new StringReader(xmlString))
using (XmlReader reader = XmlReader.Create(stringReader))
{
var formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Throws<XmlException>(() => formatter.ReadFrom(reader));
}
}
[Fact]
public void ReadXml_ThrowsArgumentException_RethrowsAsXmlException()
{
var reader = new ThrowingXmlReader(new ArgumentException());
IXmlSerializable formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Throws<XmlException>(() => formatter.ReadXml(reader));
}
[Fact]
public void ReadXml_ThrowsFormatException_RethrowsAsXmlException()
{
var reader = new ThrowingXmlReader(new FormatException());
IXmlSerializable formatter = new AtomPub10CategoriesDocumentFormatter();
Assert.Throws<XmlException>(() => formatter.ReadXml(reader));
}
[Fact]
public void ReadXml_NullReader_ThrowsArgumentNullException()
{
IXmlSerializable formatter = new AtomPub10CategoriesDocumentFormatter();
AssertExtensions.Throws<ArgumentNullException>("reader", () => formatter.ReadXml(null));
}
[Fact]
public void CreateInlineCategoriesDocument_NonGeneric_Success()
{
var formatter = new Formatter();
InlineCategoriesDocument document = Assert.IsType<InlineCategoriesDocument>(formatter.CreateInlineCategoriesDocumentEntryPoint());
Assert.Empty(document.AttributeExtensions);
Assert.Null(document.BaseUri);
Assert.Empty(document.Categories);
Assert.Empty(document.ElementExtensions);
Assert.False(document.IsFixed);
Assert.Null(document.Language);
Assert.Null(document.Scheme);
var typedFormatter = new Formatter(typeof(InlineCategoriesDocumentSubclass), typeof(ReferencedCategoriesDocumentSubclass));
document = Assert.IsType<InlineCategoriesDocumentSubclass>(typedFormatter.CreateInlineCategoriesDocumentEntryPoint());
Assert.Empty(document.AttributeExtensions);
Assert.Null(document.BaseUri);
Assert.Empty(document.Categories);
Assert.Empty(document.ElementExtensions);
Assert.False(document.IsFixed);
Assert.Null(document.Language);
Assert.Null(document.Scheme);
}
[Fact]
public void CreateReferencedCategoriesDocument_NonGeneric_Success()
{
var formatter = new Formatter();
ReferencedCategoriesDocument document = Assert.IsType<ReferencedCategoriesDocument>(formatter.CreateReferencedCategoriesDocumentEntryPoint());
Assert.Empty(document.AttributeExtensions);
Assert.Null(document.BaseUri);
Assert.Empty(document.ElementExtensions);
Assert.Null(document.Language);
Assert.Null(document.Link);
var typedFormatter = new Formatter(typeof(InlineCategoriesDocumentSubclass), typeof(ReferencedCategoriesDocumentSubclass));
document = Assert.IsType<ReferencedCategoriesDocumentSubclass>(typedFormatter.CreateReferencedCategoriesDocumentEntryPoint());
Assert.Empty(document.AttributeExtensions);
Assert.Null(document.BaseUri);
Assert.Empty(document.ElementExtensions);
Assert.Null(document.Language);
Assert.Null(document.Link);
}
public class InlineCategoriesDocumentSubclass : InlineCategoriesDocument { }
public class ReferencedCategoriesDocumentSubclass : ReferencedCategoriesDocument { }
public class InlineCategoriesDocumentTryParseTrueSubclass : InlineCategoriesDocument
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
protected override SyndicationCategory CreateCategory() => new SyndicationCategoryTryParseTrueSubclass();
}
public class SyndicationCategoryTryParseTrueSubclass : SyndicationCategory
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
}
public class ReferencedCategoriesDocumentTryParseTrueSubclass : ReferencedCategoriesDocument
{
protected override bool TryParseAttribute(string name, string ns, string value, string version) => true;
protected override bool TryParseElement(XmlReader reader, string version)
{
reader.Skip();
return true;
}
}
public class Formatter : AtomPub10CategoriesDocumentFormatter
{
public Formatter() : base() { }
public Formatter(CategoriesDocument documentToWrite) : base(documentToWrite) { }
public Formatter(Type inlineDocumentType, Type referencedDocumentType) : base(inlineDocumentType, referencedDocumentType) { }
public InlineCategoriesDocument CreateInlineCategoriesDocumentEntryPoint() => CreateInlineCategoriesDocument();
public ReferencedCategoriesDocument CreateReferencedCategoriesDocumentEntryPoint() => CreateReferencedCategoriesDocument();
}
[DataContract]
public class ExtensionObject
{
[DataMember]
public int Value { get; set; }
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Diagnostics.DiagnosticSource/src/System/Diagnostics/ActivityContext.netcoreapp.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.Diagnostics
{
/// <summary>
/// ActivityContext representation conforms to the w3c TraceContext specification. It contains two identifiers
/// a TraceId and a SpanId - along with a set of common TraceFlags and system-specific TraceState values.
/// </summary>
public readonly partial struct ActivityContext : IEquatable<ActivityContext>
{
public override int GetHashCode() => HashCode.Combine(TraceId, SpanId, TraceFlags, TraceState);
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.Diagnostics
{
/// <summary>
/// ActivityContext representation conforms to the w3c TraceContext specification. It contains two identifiers
/// a TraceId and a SpanId - along with a set of common TraceFlags and system-specific TraceState values.
/// </summary>
public readonly partial struct ActivityContext : IEquatable<ActivityContext>
{
public override int GetHashCode() => HashCode.Combine(TraceId, SpanId, TraceFlags, TraceState);
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.ComponentModel.Composition/tests/System/ComponentModel/Composition/Factories/ExportProviderFactory.RecomposableExportProvider.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.ComponentModel.Composition.Hosting;
using System.ComponentModel.Composition.Primitives;
using System.Linq;
using Xunit;
namespace System.ComponentModel.Composition.Factories
{
partial class ExportProviderFactory
{
public class RecomposableExportProvider : ExportProvider
{
public static Dictionary<string, object> EmptyMetadataDictionary = new Dictionary<string, object>();
public List<Export> _exports = new List<Export>();
public void AddExport(string contractName, object value)
{
Export export = CreateExport(contractName, value);
var exports = this._exports.ToList();
exports.Add(export);
ChangeExports(exports);
}
public void RemoveExport(string contractName)
{
int index = FindExport(contractName);
Assert.True(index >= 0);
var exports = this._exports.ToList();
exports.RemoveAt(index);
ChangeExports(exports);
}
public void ReplaceExportValue(string contractName, object newValue)
{
int index = FindExport(contractName);
Assert.True(index >= 0);
var exports = this._exports.ToList();
exports.RemoveAt(index);
exports.Add(CreateExport(contractName, newValue));
ChangeExports(exports);
}
private void ChangeExports(List<Export> newExports)
{
using (var atomicComposition = new AtomicComposition())
{
atomicComposition.AddCompleteAction(() => this._exports = newExports);
atomicComposition.SetValue(this, newExports);
var addedExports = newExports.Except(this._exports).Select(export => export.Definition);
var removedExports = this._exports.Except(newExports).Select(export => export.Definition);
this.OnExportsChanging(new ExportsChangeEventArgs(addedExports, removedExports, atomicComposition));
atomicComposition.AddCompleteAction(() => this.OnExportsChanged(
new ExportsChangeEventArgs(addedExports, removedExports, null)));
atomicComposition.Complete();
}
}
private int FindExport(string contractName)
{
for (int i = 0; i < _exports.Count; i++)
{
if (_exports[i].Definition.ContractName == contractName)
{
return i;
}
}
return -1;
}
private Export CreateExport(string contractName, object value)
{
return new Export(new ExportDefinition(contractName, EmptyMetadataDictionary), () => value);
}
protected override IEnumerable<Export> GetExportsCore(ImportDefinition importDefinition, AtomicComposition context)
{
IEnumerable<Export> contextExports;
if (context == null || !context.TryGetValue(this, out contextExports))
{
contextExports = this._exports;
}
List<Export> exports = new List<Export>();
var func = importDefinition.Constraint.Compile();
foreach (Export export in contextExports)
{
if (func(export.Definition))
{
exports.Add(export);
}
}
return exports;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.ComponentModel.Composition.Hosting;
using System.ComponentModel.Composition.Primitives;
using System.Linq;
using Xunit;
namespace System.ComponentModel.Composition.Factories
{
partial class ExportProviderFactory
{
public class RecomposableExportProvider : ExportProvider
{
public static Dictionary<string, object> EmptyMetadataDictionary = new Dictionary<string, object>();
public List<Export> _exports = new List<Export>();
public void AddExport(string contractName, object value)
{
Export export = CreateExport(contractName, value);
var exports = this._exports.ToList();
exports.Add(export);
ChangeExports(exports);
}
public void RemoveExport(string contractName)
{
int index = FindExport(contractName);
Assert.True(index >= 0);
var exports = this._exports.ToList();
exports.RemoveAt(index);
ChangeExports(exports);
}
public void ReplaceExportValue(string contractName, object newValue)
{
int index = FindExport(contractName);
Assert.True(index >= 0);
var exports = this._exports.ToList();
exports.RemoveAt(index);
exports.Add(CreateExport(contractName, newValue));
ChangeExports(exports);
}
private void ChangeExports(List<Export> newExports)
{
using (var atomicComposition = new AtomicComposition())
{
atomicComposition.AddCompleteAction(() => this._exports = newExports);
atomicComposition.SetValue(this, newExports);
var addedExports = newExports.Except(this._exports).Select(export => export.Definition);
var removedExports = this._exports.Except(newExports).Select(export => export.Definition);
this.OnExportsChanging(new ExportsChangeEventArgs(addedExports, removedExports, atomicComposition));
atomicComposition.AddCompleteAction(() => this.OnExportsChanged(
new ExportsChangeEventArgs(addedExports, removedExports, null)));
atomicComposition.Complete();
}
}
private int FindExport(string contractName)
{
for (int i = 0; i < _exports.Count; i++)
{
if (_exports[i].Definition.ContractName == contractName)
{
return i;
}
}
return -1;
}
private Export CreateExport(string contractName, object value)
{
return new Export(new ExportDefinition(contractName, EmptyMetadataDictionary), () => value);
}
protected override IEnumerable<Export> GetExportsCore(ImportDefinition importDefinition, AtomicComposition context)
{
IEnumerable<Export> contextExports;
if (context == null || !context.TryGetValue(this, out contextExports))
{
contextExports = this._exports;
}
List<Export> exports = new List<Export>();
var func = importDefinition.Constraint.Compile();
foreach (Export export in contextExports)
{
if (func(export.Definition))
{
exports.Add(export);
}
}
return exports;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/coreclr/tools/aot/ILCompiler.ReadyToRun/IBC/MIbcProfileParser.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO.Compression;
using System.Reflection;
using Internal.TypeSystem;
using Internal.TypeSystem.Ecma;
using Internal.IL;
using Internal.Pgo;
using System.Linq;
using System.IO;
using System.Diagnostics;
using System.Reflection.PortableExecutable;
namespace ILCompiler.IBC
{
static class MIbcProfileParser
{
private class MetadataLoaderForPgoData : IPgoSchemaDataLoader<TypeSystemEntityOrUnknown>
{
private readonly EcmaMethodIL _ilBody;
public MetadataLoaderForPgoData(EcmaMethodIL ilBody)
{
_ilBody = ilBody;
}
TypeSystemEntityOrUnknown IPgoSchemaDataLoader<TypeSystemEntityOrUnknown>.TypeFromLong(long token)
{
try
{
if (token == 0)
return new TypeSystemEntityOrUnknown(null);
if ((token & 0xFF000000) == 0)
{
// token type is 0, therefore it can't be a type
return new TypeSystemEntityOrUnknown((int)token);
}
TypeDesc foundType = _ilBody.GetObject((int)token, NotFoundBehavior.ReturnNull) as TypeDesc;
if (foundType == null)
{
return new TypeSystemEntityOrUnknown((int)token & 0x00FFFFFF);
}
return new TypeSystemEntityOrUnknown(foundType);
}
catch
{
return new TypeSystemEntityOrUnknown((int)token);
}
}
}
public static PEReader OpenMibcAsPEReader(string filename)
{
byte[] peData = null;
PEReader peReader = null;
{
FileStream fsMibcFile = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.Read, bufferSize: 0x1000, useAsync: false);
bool disposeOnException = true;
try
{
byte firstByte = (byte)fsMibcFile.ReadByte();
byte secondByte = (byte)fsMibcFile.ReadByte();
fsMibcFile.Seek(0, SeekOrigin.Begin);
if (firstByte == 0x4d && secondByte == 0x5a)
{
// Uncompressed Mibc format, starts with 'MZ' prefix like all other PE files
peReader = new PEReader(fsMibcFile, PEStreamOptions.Default);
disposeOnException = false;
}
else
{
using (var zipFile = new ZipArchive(fsMibcFile, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null))
{
disposeOnException = false;
var mibcDataEntry = zipFile.GetEntry(Path.GetFileName(filename) + ".dll");
using (var mibcDataStream = mibcDataEntry.Open())
{
peData = new byte[mibcDataEntry.Length];
using (BinaryReader br = new BinaryReader(mibcDataStream))
{
peData = br.ReadBytes(checked((int)mibcDataEntry.Length));
}
}
}
}
}
finally
{
if (disposeOnException)
fsMibcFile.Dispose();
}
}
if (peData != null)
{
peReader = new PEReader(System.Collections.Immutable.ImmutableArray.Create<byte>(peData));
}
return peReader;
}
/// <summary>
/// Parse an MIBC file for the methods that are interesting.
/// The version bubble must be specified and will describe the restrict the set of methods parsed to those relevant to the compilation
/// The onlyDefinedInAssembly parameter is used to restrict the set of types parsed to include only those which are defined in a specific module. Specify null to allow definitions from all modules.
/// This limited parsing is not necessarily an exact set of prevention, so detailed algorithms that work at the individual method level are still necessary, but this allows avoiding excessive parsing.
///
/// The format of the Mibc file is that of a .NET dll, with a global method named "AssemblyDictionary". Inside of that file are a series of references that are broken up by which assemblies define the individual methods.
/// These references are encoded as IL code that represents the details.
/// The format of these IL instruction is as follows.
///
/// ldstr mibcGroupName
/// ldtoken mibcGroupMethod
/// pop
/// {Repeat the above pattern N times, once per Mibc group}
///
/// See comment above ReadMIbcGroup for details of the group format
///
/// The mibcGroupName is in the following format "Assembly_{definingAssemblyName};{OtherAssemblyName};{OtherAssemblyName};...; (OtherAssemblyName is ; delimited)
///
/// </summary>
/// <returns></returns>
public static ProfileData ParseMIbcFile(TypeSystemContext tsc, PEReader peReader, HashSet<string> assemblyNamesInVersionBubble, string onlyDefinedInAssembly)
{
var mibcModule = EcmaModule.Create(tsc, peReader, null, null, new CustomCanonResolver(tsc));
var assemblyDictionary = (EcmaMethod)mibcModule.GetGlobalModuleType().GetMethod("AssemblyDictionary", null);
IEnumerable<MethodProfileData> loadedMethodProfileData = Enumerable.Empty<MethodProfileData>();
EcmaMethodIL ilBody = EcmaMethodIL.Create(assemblyDictionary);
ILReader ilReader = new ILReader(ilBody.GetILBytes());
string mibcGroupName = "";
while (ilReader.HasNext)
{
ILOpcode opcode = ilReader.ReadILOpcode();
switch (opcode)
{
case ILOpcode.ldstr:
int userStringToken = ilReader.ReadILToken();
Debug.Assert(mibcGroupName == "");
if (mibcGroupName == "")
{
mibcGroupName = (string)ilBody.GetObject(userStringToken);
}
break;
case ILOpcode.ldtoken:
int token = ilReader.ReadILToken();
if (String.IsNullOrEmpty(mibcGroupName))
break;
string[] assembliesByName = mibcGroupName.Split(';');
bool hasMatchingDefinition = (onlyDefinedInAssembly == null) || assembliesByName[0].Equals(onlyDefinedInAssembly);
if (!hasMatchingDefinition)
break;
if (assemblyNamesInVersionBubble != null)
{
bool areAllEntriesInVersionBubble = true;
foreach (string s in assembliesByName)
{
if (string.IsNullOrEmpty(s))
continue;
if (!assemblyNamesInVersionBubble.Contains(s))
{
areAllEntriesInVersionBubble = false;
break;
}
}
if (!areAllEntriesInVersionBubble)
break;
}
loadedMethodProfileData = loadedMethodProfileData.Concat(ReadMIbcGroup(tsc, (EcmaMethod)ilBody.GetObject(token)));
break;
case ILOpcode.pop:
mibcGroupName = "";
break;
default:
ilReader.Skip(opcode);
break;
}
}
return new IBCProfileData(false, loadedMethodProfileData);
}
enum MibcGroupParseState
{
LookingForNextMethod,
LookingForOptionalData,
ProcessingExclusiveWeight,
ProcessingCallgraphCount,
ProcessingCallgraphToken,
ProcessingCallgraphWeight,
ProcessingInstrumentationData,
}
/// <summary>
/// Parse MIbcGroup method and return enumerable of MethodProfileData
///
/// Like the AssemblyDictionary method, data is encoded via IL instructions. The format is
///
/// ldtoken methodInProfileData
/// Any series of instructions that does not include pop. Expansion data is encoded via ldstr "id"
/// followed by a expansion specific sequence of il opcodes.
/// pop
/// {Repeat N times for N methods described}
///
/// Extensions supported with current parser:
///
/// ldstr "ExclusiveWeight"
/// Any ldc.i4 or ldc.r4 or ldc.r8 instruction to indicate the exclusive weight
///
/// ldstr "WeightedCallData"
/// ldc.i4 <Count of methods called>
/// Repeat <Count of methods called times>
/// ldtoken <Method called from this method>
/// ldc.i4 <Weight associated with calling the <Method called from this method>>
///
/// This format is designed to be extensible to hold more data as we add new per method profile data without breaking existing parsers.
/// </summary>
static IEnumerable<MethodProfileData> ReadMIbcGroup(TypeSystemContext tsc, EcmaMethod method)
{
EcmaMethodIL ilBody = EcmaMethodIL.Create(method);
MetadataLoaderForPgoData metadataLoader = new MetadataLoaderForPgoData(ilBody);
ILReader ilReader = new ILReader(ilBody.GetILBytes());
object methodInProgress = null;
object metadataNotResolvable = new object();
object metadataObject = null;
MibcGroupParseState state = MibcGroupParseState.LookingForNextMethod;
int intValue = 0;
int weightedCallGraphSize = 0;
int profileEntryFound = 0;
double exclusiveWeight = 0;
Dictionary<MethodDesc, int> weights = null;
bool processIntValue = false;
List<long> instrumentationDataLongs = null;
PgoSchemaElem[] pgoSchemaData = null;
while (ilReader.HasNext)
{
ILOpcode opcode = ilReader.ReadILOpcode();
processIntValue = false;
switch (opcode)
{
case ILOpcode.ldtoken:
{
int token = ilReader.ReadILToken();
if (state == MibcGroupParseState.ProcessingInstrumentationData)
{
instrumentationDataLongs.Add(token);
}
else
{
metadataObject = null;
try
{
metadataObject = ilBody.GetObject(token, NotFoundBehavior.ReturnNull);
if (metadataObject == null)
metadataObject = metadataNotResolvable;
}
catch (TypeSystemException)
{
// The method being referred to may be missing. In that situation,
// use the metadataNotResolvable sentinel to indicate that this record should be ignored
metadataObject = metadataNotResolvable;
}
switch (state)
{
case MibcGroupParseState.ProcessingCallgraphToken:
state = MibcGroupParseState.ProcessingCallgraphWeight;
break;
case MibcGroupParseState.LookingForNextMethod:
methodInProgress = metadataObject;
state = MibcGroupParseState.LookingForOptionalData;
break;
default:
state = MibcGroupParseState.LookingForOptionalData;
break;
}
}
}
break;
case ILOpcode.ldc_r4:
{
float fltValue = ilReader.ReadILFloat();
switch (state)
{
case MibcGroupParseState.ProcessingExclusiveWeight:
exclusiveWeight = fltValue;
state = MibcGroupParseState.LookingForOptionalData;
break;
default:
state = MibcGroupParseState.LookingForOptionalData;
break;
}
break;
}
case ILOpcode.ldc_r8:
{
double dblValue = ilReader.ReadILDouble();
switch (state)
{
case MibcGroupParseState.ProcessingExclusiveWeight:
exclusiveWeight = dblValue;
state = MibcGroupParseState.LookingForOptionalData;
break;
default:
state = MibcGroupParseState.LookingForOptionalData;
break;
}
break;
}
case ILOpcode.ldc_i4_0:
intValue = 0;
processIntValue = true;
break;
case ILOpcode.ldc_i4_1:
intValue = 1;
processIntValue = true;
break;
case ILOpcode.ldc_i4_2:
intValue = 2;
processIntValue = true;
break;
case ILOpcode.ldc_i4_3:
intValue = 3;
processIntValue = true;
break;
case ILOpcode.ldc_i4_4:
intValue = 4;
processIntValue = true;
break;
case ILOpcode.ldc_i4_5:
intValue = 5;
processIntValue = true;
break;
case ILOpcode.ldc_i4_6:
intValue = 6;
processIntValue = true;
break;
case ILOpcode.ldc_i4_7:
intValue = 7;
processIntValue = true;
break;
case ILOpcode.ldc_i4_8:
intValue = 8;
processIntValue = true;
break;
case ILOpcode.ldc_i4_m1:
intValue = -1;
processIntValue = true;
break;
case ILOpcode.ldc_i4_s:
intValue = (sbyte)ilReader.ReadILByte();
processIntValue = true;
break;
case ILOpcode.ldc_i4:
intValue = (int)ilReader.ReadILUInt32();
processIntValue = true;
break;
case ILOpcode.ldc_i8:
if (state == MibcGroupParseState.ProcessingInstrumentationData)
{
instrumentationDataLongs.Add((long)ilReader.ReadILUInt64());
}
break;
case ILOpcode.ldstr:
{
int userStringToken = ilReader.ReadILToken();
string optionalDataName = (string)ilBody.GetObject(userStringToken);
switch (optionalDataName)
{
case "ExclusiveWeight":
state = MibcGroupParseState.ProcessingExclusiveWeight;
break;
case "WeightedCallData":
state = MibcGroupParseState.ProcessingCallgraphCount;
break;
case "InstrumentationDataStart":
state = MibcGroupParseState.ProcessingInstrumentationData;
instrumentationDataLongs = new List<long>();
break;
case "InstrumentationDataEnd":
if (instrumentationDataLongs != null)
{
instrumentationDataLongs.Add(2); // MarshalMask 2 (Type)
instrumentationDataLongs.Add(0); // PgoInstrumentationKind.Done (0)
pgoSchemaData = PgoProcessor.ParsePgoData<TypeSystemEntityOrUnknown>(metadataLoader, instrumentationDataLongs, false).ToArray();
}
state = MibcGroupParseState.LookingForOptionalData;
break;
default:
state = MibcGroupParseState.LookingForOptionalData;
break;
}
}
break;
case ILOpcode.pop:
if (methodInProgress != metadataNotResolvable)
{
profileEntryFound++;
if (exclusiveWeight == 0)
{
// If no exclusive weight is found assign a non zero value that assumes the order in the pgo file is significant.
exclusiveWeight = Math.Min(1000000.0 - profileEntryFound, 0.0) / 1000000.0;
}
if (methodInProgress != null)
{
// If the method being loaded didn't have meaningful input, skip
MethodProfileData mibcData = new MethodProfileData((MethodDesc)methodInProgress, MethodProfilingDataFlags.ReadMethodCode, exclusiveWeight, weights, 0xFFFFFFFF, pgoSchemaData);
yield return mibcData;
}
state = MibcGroupParseState.LookingForNextMethod;
exclusiveWeight = 0;
weights = null;
instrumentationDataLongs = null;
pgoSchemaData = null;
}
methodInProgress = null;
break;
default:
state = MibcGroupParseState.LookingForOptionalData;
ilReader.Skip(opcode);
break;
}
if (processIntValue)
{
switch (state)
{
case MibcGroupParseState.ProcessingExclusiveWeight:
exclusiveWeight = intValue;
state = MibcGroupParseState.LookingForOptionalData;
break;
case MibcGroupParseState.ProcessingCallgraphCount:
weightedCallGraphSize = intValue;
weights = new Dictionary<MethodDesc, int>();
if (weightedCallGraphSize > 0)
state = MibcGroupParseState.ProcessingCallgraphToken;
else
state = MibcGroupParseState.LookingForOptionalData;
break;
case MibcGroupParseState.ProcessingCallgraphWeight:
if (metadataObject != metadataNotResolvable)
{
weights.Add((MethodDesc)metadataObject, intValue);
}
weightedCallGraphSize--;
if (weightedCallGraphSize > 0)
state = MibcGroupParseState.ProcessingCallgraphToken;
else
state = MibcGroupParseState.LookingForOptionalData;
break;
case MibcGroupParseState.ProcessingInstrumentationData:
instrumentationDataLongs.Add(intValue);
break;
default:
state = MibcGroupParseState.LookingForOptionalData;
instrumentationDataLongs = null;
break;
}
}
}
}
/// <summary>
/// Use this implementation of IModuleResolver to provide a module resolver which overrides resolution of System.Private.Canon module to point to a module
/// that can resolve the CanonTypes out of the core library as CanonType.
/// </summary>
class CustomCanonResolver : IModuleResolver
{
private class CanonModule : ModuleDesc, IAssemblyDesc
{
public CanonModule(TypeSystemContext wrappedContext) : base(wrappedContext, null)
{
}
public override IEnumerable<MetadataType> GetAllTypes()
{
throw new NotImplementedException();
}
public override MetadataType GetGlobalModuleType()
{
throw new NotImplementedException();
}
public override object GetType(string nameSpace, string name, NotFoundBehavior notFoundBehavior)
{
TypeSystemContext context = Context;
if (context.SupportsCanon && (nameSpace == context.CanonType.Namespace) && (name == context.CanonType.Name))
return Context.CanonType;
if (context.SupportsUniversalCanon && (nameSpace == context.UniversalCanonType.Namespace) && (name == context.UniversalCanonType.Name))
return Context.UniversalCanonType;
else
{
if (notFoundBehavior != NotFoundBehavior.ReturnNull)
{
var failure = ResolutionFailure.GetTypeLoadResolutionFailure(nameSpace, name, "System.Private.Canon");
if (notFoundBehavior == NotFoundBehavior.Throw)
failure.Throw();
return failure;
}
return null;
}
}
public AssemblyName GetName()
{
return new AssemblyName("System.Private.Canon");
}
}
CanonModule _canonModule;
AssemblyName _canonModuleName;
IModuleResolver _wrappedResolver;
public CustomCanonResolver(TypeSystemContext wrappedContext)
{
_canonModule = new CanonModule(wrappedContext);
_canonModuleName = _canonModule.GetName();
_wrappedResolver = wrappedContext;
}
ModuleDesc IModuleResolver.ResolveAssembly(AssemblyName name, bool throwIfNotFound)
{
if (name.Name == _canonModuleName.Name)
return _canonModule;
else
return _wrappedResolver.ResolveAssembly(name, throwIfNotFound);
}
ModuleDesc IModuleResolver.ResolveModule(IAssemblyDesc referencingModule, string fileName, bool throwIfNotFound)
{
return _wrappedResolver.ResolveModule(referencingModule, fileName, throwIfNotFound);
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO.Compression;
using System.Reflection;
using Internal.TypeSystem;
using Internal.TypeSystem.Ecma;
using Internal.IL;
using Internal.Pgo;
using System.Linq;
using System.IO;
using System.Diagnostics;
using System.Reflection.PortableExecutable;
namespace ILCompiler.IBC
{
static class MIbcProfileParser
{
private class MetadataLoaderForPgoData : IPgoSchemaDataLoader<TypeSystemEntityOrUnknown>
{
private readonly EcmaMethodIL _ilBody;
public MetadataLoaderForPgoData(EcmaMethodIL ilBody)
{
_ilBody = ilBody;
}
TypeSystemEntityOrUnknown IPgoSchemaDataLoader<TypeSystemEntityOrUnknown>.TypeFromLong(long token)
{
try
{
if (token == 0)
return new TypeSystemEntityOrUnknown(null);
if ((token & 0xFF000000) == 0)
{
// token type is 0, therefore it can't be a type
return new TypeSystemEntityOrUnknown((int)token);
}
TypeDesc foundType = _ilBody.GetObject((int)token, NotFoundBehavior.ReturnNull) as TypeDesc;
if (foundType == null)
{
return new TypeSystemEntityOrUnknown((int)token & 0x00FFFFFF);
}
return new TypeSystemEntityOrUnknown(foundType);
}
catch
{
return new TypeSystemEntityOrUnknown((int)token);
}
}
}
public static PEReader OpenMibcAsPEReader(string filename)
{
byte[] peData = null;
PEReader peReader = null;
{
FileStream fsMibcFile = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.Read, bufferSize: 0x1000, useAsync: false);
bool disposeOnException = true;
try
{
byte firstByte = (byte)fsMibcFile.ReadByte();
byte secondByte = (byte)fsMibcFile.ReadByte();
fsMibcFile.Seek(0, SeekOrigin.Begin);
if (firstByte == 0x4d && secondByte == 0x5a)
{
// Uncompressed Mibc format, starts with 'MZ' prefix like all other PE files
peReader = new PEReader(fsMibcFile, PEStreamOptions.Default);
disposeOnException = false;
}
else
{
using (var zipFile = new ZipArchive(fsMibcFile, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null))
{
disposeOnException = false;
var mibcDataEntry = zipFile.GetEntry(Path.GetFileName(filename) + ".dll");
using (var mibcDataStream = mibcDataEntry.Open())
{
peData = new byte[mibcDataEntry.Length];
using (BinaryReader br = new BinaryReader(mibcDataStream))
{
peData = br.ReadBytes(checked((int)mibcDataEntry.Length));
}
}
}
}
}
finally
{
if (disposeOnException)
fsMibcFile.Dispose();
}
}
if (peData != null)
{
peReader = new PEReader(System.Collections.Immutable.ImmutableArray.Create<byte>(peData));
}
return peReader;
}
/// <summary>
/// Parse an MIBC file for the methods that are interesting.
/// The version bubble must be specified and will describe the restrict the set of methods parsed to those relevant to the compilation
/// The onlyDefinedInAssembly parameter is used to restrict the set of types parsed to include only those which are defined in a specific module. Specify null to allow definitions from all modules.
/// This limited parsing is not necessarily an exact set of prevention, so detailed algorithms that work at the individual method level are still necessary, but this allows avoiding excessive parsing.
///
/// The format of the Mibc file is that of a .NET dll, with a global method named "AssemblyDictionary". Inside of that file are a series of references that are broken up by which assemblies define the individual methods.
/// These references are encoded as IL code that represents the details.
/// The format of these IL instruction is as follows.
///
/// ldstr mibcGroupName
/// ldtoken mibcGroupMethod
/// pop
/// {Repeat the above pattern N times, once per Mibc group}
///
/// See comment above ReadMIbcGroup for details of the group format
///
/// The mibcGroupName is in the following format "Assembly_{definingAssemblyName};{OtherAssemblyName};{OtherAssemblyName};...; (OtherAssemblyName is ; delimited)
///
/// </summary>
/// <returns></returns>
public static ProfileData ParseMIbcFile(TypeSystemContext tsc, PEReader peReader, HashSet<string> assemblyNamesInVersionBubble, string onlyDefinedInAssembly)
{
var mibcModule = EcmaModule.Create(tsc, peReader, null, null, new CustomCanonResolver(tsc));
var assemblyDictionary = (EcmaMethod)mibcModule.GetGlobalModuleType().GetMethod("AssemblyDictionary", null);
IEnumerable<MethodProfileData> loadedMethodProfileData = Enumerable.Empty<MethodProfileData>();
EcmaMethodIL ilBody = EcmaMethodIL.Create(assemblyDictionary);
ILReader ilReader = new ILReader(ilBody.GetILBytes());
string mibcGroupName = "";
while (ilReader.HasNext)
{
ILOpcode opcode = ilReader.ReadILOpcode();
switch (opcode)
{
case ILOpcode.ldstr:
int userStringToken = ilReader.ReadILToken();
Debug.Assert(mibcGroupName == "");
if (mibcGroupName == "")
{
mibcGroupName = (string)ilBody.GetObject(userStringToken);
}
break;
case ILOpcode.ldtoken:
int token = ilReader.ReadILToken();
if (String.IsNullOrEmpty(mibcGroupName))
break;
string[] assembliesByName = mibcGroupName.Split(';');
bool hasMatchingDefinition = (onlyDefinedInAssembly == null) || assembliesByName[0].Equals(onlyDefinedInAssembly);
if (!hasMatchingDefinition)
break;
if (assemblyNamesInVersionBubble != null)
{
bool areAllEntriesInVersionBubble = true;
foreach (string s in assembliesByName)
{
if (string.IsNullOrEmpty(s))
continue;
if (!assemblyNamesInVersionBubble.Contains(s))
{
areAllEntriesInVersionBubble = false;
break;
}
}
if (!areAllEntriesInVersionBubble)
break;
}
loadedMethodProfileData = loadedMethodProfileData.Concat(ReadMIbcGroup(tsc, (EcmaMethod)ilBody.GetObject(token)));
break;
case ILOpcode.pop:
mibcGroupName = "";
break;
default:
ilReader.Skip(opcode);
break;
}
}
return new IBCProfileData(false, loadedMethodProfileData);
}
enum MibcGroupParseState
{
LookingForNextMethod,
LookingForOptionalData,
ProcessingExclusiveWeight,
ProcessingCallgraphCount,
ProcessingCallgraphToken,
ProcessingCallgraphWeight,
ProcessingInstrumentationData,
}
/// <summary>
/// Parse MIbcGroup method and return enumerable of MethodProfileData
///
/// Like the AssemblyDictionary method, data is encoded via IL instructions. The format is
///
/// ldtoken methodInProfileData
/// Any series of instructions that does not include pop. Expansion data is encoded via ldstr "id"
/// followed by a expansion specific sequence of il opcodes.
/// pop
/// {Repeat N times for N methods described}
///
/// Extensions supported with current parser:
///
/// ldstr "ExclusiveWeight"
/// Any ldc.i4 or ldc.r4 or ldc.r8 instruction to indicate the exclusive weight
///
/// ldstr "WeightedCallData"
/// ldc.i4 <Count of methods called>
/// Repeat <Count of methods called times>
/// ldtoken <Method called from this method>
/// ldc.i4 <Weight associated with calling the <Method called from this method>>
///
/// This format is designed to be extensible to hold more data as we add new per method profile data without breaking existing parsers.
/// </summary>
static IEnumerable<MethodProfileData> ReadMIbcGroup(TypeSystemContext tsc, EcmaMethod method)
{
EcmaMethodIL ilBody = EcmaMethodIL.Create(method);
MetadataLoaderForPgoData metadataLoader = new MetadataLoaderForPgoData(ilBody);
ILReader ilReader = new ILReader(ilBody.GetILBytes());
object methodInProgress = null;
object metadataNotResolvable = new object();
object metadataObject = null;
MibcGroupParseState state = MibcGroupParseState.LookingForNextMethod;
int intValue = 0;
int weightedCallGraphSize = 0;
int profileEntryFound = 0;
double exclusiveWeight = 0;
Dictionary<MethodDesc, int> weights = null;
bool processIntValue = false;
List<long> instrumentationDataLongs = null;
PgoSchemaElem[] pgoSchemaData = null;
while (ilReader.HasNext)
{
ILOpcode opcode = ilReader.ReadILOpcode();
processIntValue = false;
switch (opcode)
{
case ILOpcode.ldtoken:
{
int token = ilReader.ReadILToken();
if (state == MibcGroupParseState.ProcessingInstrumentationData)
{
instrumentationDataLongs.Add(token);
}
else
{
metadataObject = null;
try
{
metadataObject = ilBody.GetObject(token, NotFoundBehavior.ReturnNull);
if (metadataObject == null)
metadataObject = metadataNotResolvable;
}
catch (TypeSystemException)
{
// The method being referred to may be missing. In that situation,
// use the metadataNotResolvable sentinel to indicate that this record should be ignored
metadataObject = metadataNotResolvable;
}
switch (state)
{
case MibcGroupParseState.ProcessingCallgraphToken:
state = MibcGroupParseState.ProcessingCallgraphWeight;
break;
case MibcGroupParseState.LookingForNextMethod:
methodInProgress = metadataObject;
state = MibcGroupParseState.LookingForOptionalData;
break;
default:
state = MibcGroupParseState.LookingForOptionalData;
break;
}
}
}
break;
case ILOpcode.ldc_r4:
{
float fltValue = ilReader.ReadILFloat();
switch (state)
{
case MibcGroupParseState.ProcessingExclusiveWeight:
exclusiveWeight = fltValue;
state = MibcGroupParseState.LookingForOptionalData;
break;
default:
state = MibcGroupParseState.LookingForOptionalData;
break;
}
break;
}
case ILOpcode.ldc_r8:
{
double dblValue = ilReader.ReadILDouble();
switch (state)
{
case MibcGroupParseState.ProcessingExclusiveWeight:
exclusiveWeight = dblValue;
state = MibcGroupParseState.LookingForOptionalData;
break;
default:
state = MibcGroupParseState.LookingForOptionalData;
break;
}
break;
}
case ILOpcode.ldc_i4_0:
intValue = 0;
processIntValue = true;
break;
case ILOpcode.ldc_i4_1:
intValue = 1;
processIntValue = true;
break;
case ILOpcode.ldc_i4_2:
intValue = 2;
processIntValue = true;
break;
case ILOpcode.ldc_i4_3:
intValue = 3;
processIntValue = true;
break;
case ILOpcode.ldc_i4_4:
intValue = 4;
processIntValue = true;
break;
case ILOpcode.ldc_i4_5:
intValue = 5;
processIntValue = true;
break;
case ILOpcode.ldc_i4_6:
intValue = 6;
processIntValue = true;
break;
case ILOpcode.ldc_i4_7:
intValue = 7;
processIntValue = true;
break;
case ILOpcode.ldc_i4_8:
intValue = 8;
processIntValue = true;
break;
case ILOpcode.ldc_i4_m1:
intValue = -1;
processIntValue = true;
break;
case ILOpcode.ldc_i4_s:
intValue = (sbyte)ilReader.ReadILByte();
processIntValue = true;
break;
case ILOpcode.ldc_i4:
intValue = (int)ilReader.ReadILUInt32();
processIntValue = true;
break;
case ILOpcode.ldc_i8:
if (state == MibcGroupParseState.ProcessingInstrumentationData)
{
instrumentationDataLongs.Add((long)ilReader.ReadILUInt64());
}
break;
case ILOpcode.ldstr:
{
int userStringToken = ilReader.ReadILToken();
string optionalDataName = (string)ilBody.GetObject(userStringToken);
switch (optionalDataName)
{
case "ExclusiveWeight":
state = MibcGroupParseState.ProcessingExclusiveWeight;
break;
case "WeightedCallData":
state = MibcGroupParseState.ProcessingCallgraphCount;
break;
case "InstrumentationDataStart":
state = MibcGroupParseState.ProcessingInstrumentationData;
instrumentationDataLongs = new List<long>();
break;
case "InstrumentationDataEnd":
if (instrumentationDataLongs != null)
{
instrumentationDataLongs.Add(2); // MarshalMask 2 (Type)
instrumentationDataLongs.Add(0); // PgoInstrumentationKind.Done (0)
pgoSchemaData = PgoProcessor.ParsePgoData<TypeSystemEntityOrUnknown>(metadataLoader, instrumentationDataLongs, false).ToArray();
}
state = MibcGroupParseState.LookingForOptionalData;
break;
default:
state = MibcGroupParseState.LookingForOptionalData;
break;
}
}
break;
case ILOpcode.pop:
if (methodInProgress != metadataNotResolvable)
{
profileEntryFound++;
if (exclusiveWeight == 0)
{
// If no exclusive weight is found assign a non zero value that assumes the order in the pgo file is significant.
exclusiveWeight = Math.Min(1000000.0 - profileEntryFound, 0.0) / 1000000.0;
}
if (methodInProgress != null)
{
// If the method being loaded didn't have meaningful input, skip
MethodProfileData mibcData = new MethodProfileData((MethodDesc)methodInProgress, MethodProfilingDataFlags.ReadMethodCode, exclusiveWeight, weights, 0xFFFFFFFF, pgoSchemaData);
yield return mibcData;
}
state = MibcGroupParseState.LookingForNextMethod;
exclusiveWeight = 0;
weights = null;
instrumentationDataLongs = null;
pgoSchemaData = null;
}
methodInProgress = null;
break;
default:
state = MibcGroupParseState.LookingForOptionalData;
ilReader.Skip(opcode);
break;
}
if (processIntValue)
{
switch (state)
{
case MibcGroupParseState.ProcessingExclusiveWeight:
exclusiveWeight = intValue;
state = MibcGroupParseState.LookingForOptionalData;
break;
case MibcGroupParseState.ProcessingCallgraphCount:
weightedCallGraphSize = intValue;
weights = new Dictionary<MethodDesc, int>();
if (weightedCallGraphSize > 0)
state = MibcGroupParseState.ProcessingCallgraphToken;
else
state = MibcGroupParseState.LookingForOptionalData;
break;
case MibcGroupParseState.ProcessingCallgraphWeight:
if (metadataObject != metadataNotResolvable)
{
weights.Add((MethodDesc)metadataObject, intValue);
}
weightedCallGraphSize--;
if (weightedCallGraphSize > 0)
state = MibcGroupParseState.ProcessingCallgraphToken;
else
state = MibcGroupParseState.LookingForOptionalData;
break;
case MibcGroupParseState.ProcessingInstrumentationData:
instrumentationDataLongs.Add(intValue);
break;
default:
state = MibcGroupParseState.LookingForOptionalData;
instrumentationDataLongs = null;
break;
}
}
}
}
/// <summary>
/// Use this implementation of IModuleResolver to provide a module resolver which overrides resolution of System.Private.Canon module to point to a module
/// that can resolve the CanonTypes out of the core library as CanonType.
/// </summary>
class CustomCanonResolver : IModuleResolver
{
private class CanonModule : ModuleDesc, IAssemblyDesc
{
public CanonModule(TypeSystemContext wrappedContext) : base(wrappedContext, null)
{
}
public override IEnumerable<MetadataType> GetAllTypes()
{
throw new NotImplementedException();
}
public override MetadataType GetGlobalModuleType()
{
throw new NotImplementedException();
}
public override object GetType(string nameSpace, string name, NotFoundBehavior notFoundBehavior)
{
TypeSystemContext context = Context;
if (context.SupportsCanon && (nameSpace == context.CanonType.Namespace) && (name == context.CanonType.Name))
return Context.CanonType;
if (context.SupportsUniversalCanon && (nameSpace == context.UniversalCanonType.Namespace) && (name == context.UniversalCanonType.Name))
return Context.UniversalCanonType;
else
{
if (notFoundBehavior != NotFoundBehavior.ReturnNull)
{
var failure = ResolutionFailure.GetTypeLoadResolutionFailure(nameSpace, name, "System.Private.Canon");
if (notFoundBehavior == NotFoundBehavior.Throw)
failure.Throw();
return failure;
}
return null;
}
}
public AssemblyName GetName()
{
return new AssemblyName("System.Private.Canon");
}
}
CanonModule _canonModule;
AssemblyName _canonModuleName;
IModuleResolver _wrappedResolver;
public CustomCanonResolver(TypeSystemContext wrappedContext)
{
_canonModule = new CanonModule(wrappedContext);
_canonModuleName = _canonModule.GetName();
_wrappedResolver = wrappedContext;
}
ModuleDesc IModuleResolver.ResolveAssembly(AssemblyName name, bool throwIfNotFound)
{
if (name.Name == _canonModuleName.Name)
return _canonModule;
else
return _wrappedResolver.ResolveAssembly(name, throwIfNotFound);
}
ModuleDesc IModuleResolver.ResolveModule(IAssemblyDesc referencingModule, string fileName, bool throwIfNotFound)
{
return _wrappedResolver.ResolveModule(referencingModule, fileName, throwIfNotFound);
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Reflection.MetadataLoadContext/src/System/Reflection/TypeLoading/Assemblies/RoAssembly.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Runtime.Serialization;
using System.Text;
namespace System.Reflection.TypeLoading
{
/// <summary>
/// Base class for all Assembly objects created by a MetadataLoadContext.
/// </summary>
internal abstract partial class RoAssembly : LeveledAssembly
{
private readonly RoModule?[] _loadedModules; // Any loaded modules indexed by [rid - 1]. Does NOT include the manifest module.
protected RoAssembly(MetadataLoadContext loader, int assemblyFileCount)
: base()
{
Loader = loader;
IsSingleModule = (assemblyFileCount == 0);
_loadedModules = (assemblyFileCount == 0) ? Array.Empty<RoModule>() : new RoModule[assemblyFileCount];
}
public sealed override Module ManifestModule => GetRoManifestModule();
internal abstract RoModule GetRoManifestModule();
protected bool IsSingleModule { get; }
public sealed override string ToString() => Loader.GetDisposedString() ?? base.ToString();
// Naming
public sealed override AssemblyName GetName(bool copiedName) => GetAssemblyNameDataNoCopy().CreateAssemblyName();
internal AssemblyNameData GetAssemblyNameDataNoCopy() => _lazyAssemblyNameData ?? (_lazyAssemblyNameData = ComputeNameData());
protected abstract AssemblyNameData ComputeNameData();
private volatile AssemblyNameData? _lazyAssemblyNameData;
public sealed override string FullName => _lazyFullName ?? (_lazyFullName = GetName().FullName);
private volatile string? _lazyFullName;
internal const string ThrowingMessageInRAF = "This member throws an exception for assemblies embedded in a single-file app";
// Location and codebase
public abstract override string Location { get; }
#if NETCOREAPP
[Obsolete(Obsoletions.CodeBaseMessage, DiagnosticId = Obsoletions.CodeBaseDiagId, UrlFormat = Obsoletions.SharedUrlFormat)]
[RequiresAssemblyFiles(ThrowingMessageInRAF)]
#endif
public sealed override string CodeBase => throw new NotSupportedException(SR.NotSupported_AssemblyCodeBase);
#if NETCOREAPP
[Obsolete(Obsoletions.CodeBaseMessage, DiagnosticId = Obsoletions.CodeBaseDiagId, UrlFormat = Obsoletions.SharedUrlFormat)]
[RequiresAssemblyFiles(ThrowingMessageInRAF)]
#endif
public sealed override string EscapedCodeBase => throw new NotSupportedException(SR.NotSupported_AssemblyCodeBase);
// Custom Attributes
public sealed override IList<CustomAttributeData> GetCustomAttributesData() => CustomAttributes.ToReadOnlyCollection();
public abstract override IEnumerable<CustomAttributeData> CustomAttributes { get; }
// Apis to retrieved types physically defined in this module.
public sealed override Type[] GetTypes() => IsSingleModule ? ManifestModule.GetTypes() : base.GetTypes();
public sealed override IEnumerable<TypeInfo> DefinedTypes => GetDefinedRoTypes()!;
private IEnumerable<RoType>? GetDefinedRoTypes() => IsSingleModule ? GetRoManifestModule().GetDefinedRoTypes() : MultiModuleGetDefinedRoTypes();
private IEnumerable<RoType> MultiModuleGetDefinedRoTypes()
{
foreach (RoModule module in ComputeRoModules(getResourceModules: false))
{
foreach (RoType t in module.GetDefinedRoTypes()!)
{
yield return t;
}
}
}
// Apis to retrieve public types physically defined in this module.
public sealed override Type[] GetExportedTypes()
{
// todo: use IEnumerable<T> extension instead: ExportedTypes.ToArray();
List<Type> list = new List<Type>(ExportedTypes);
return list.ToArray();
}
public sealed override IEnumerable<Type> ExportedTypes
{
get
{
foreach (RoType type in GetDefinedRoTypes()!)
{
if (type.IsVisibleOutsideAssembly())
yield return type;
}
}
}
// Api to retrieve types by name. Retrieves both types physically defined in this module and types this assembly forwards from another assembly.
public sealed override Type? GetType(string name!!, bool throwOnError, bool ignoreCase)
{
// Known compat disagreement: This api is supposed to throw an ArgumentException if the name has an assembly qualification
// (though the intended meaning seems clear.) This is difficult for us to implement as we don't have our own type name parser.
// (We can't just throw in the assemblyResolve delegate because assembly qualifications are permitted inside generic arguments,
// just not in the top level type name.) In the bigger scheme of things, this does not seem worth worrying about.
return Helpers.LoadTypeFromAssemblyQualifiedName(name, defaultAssembly: this, ignoreCase: ignoreCase, throwOnError: throwOnError);
}
/// <summary>
/// Helper routine for the more general Assembly.GetType() family of apis. Also used in typeRef resolution.
///
/// Resolves top-level named types only. No nested types. No constructed types. The input name must not be escaped.
///
/// If a type is not contained or forwarded from the assembly, this method returns null (does not throw.)
/// This supports the "throwOnError: false" behavior of Assembly.GetType(string, bool).
/// </summary>
internal RoDefinitionType? GetTypeCore(string ns, string name, bool ignoreCase, out Exception? e) => GetTypeCore(ns.ToUtf8(), name.ToUtf8(), ignoreCase, out e);
internal RoDefinitionType? GetTypeCore(ReadOnlySpan<byte> ns, ReadOnlySpan<byte> name, bool ignoreCase, out Exception? e)
{
RoDefinitionType? result = GetRoManifestModule().GetTypeCore(ns, name, ignoreCase, out e);
if (IsSingleModule || result != null)
return result;
foreach (RoModule module in ComputeRoModules(getResourceModules: false))
{
if (module == ManifestModule)
continue;
result = module.GetTypeCore(ns, name, ignoreCase, out e);
if (result != null)
return result;
}
return null;
}
// Assembly dependencies
public sealed override AssemblyName[] GetReferencedAssemblies()
{
// For compat, this api only searches the manifest module. Tools normally ensure the manifest module's assemblyRef
// table represents the union of all module's assemblyRef table.
AssemblyNameData[] data = GetReferencedAssembliesNoCopy();
AssemblyName[] result = new AssemblyName[data.Length];
for (int i = 0; i < data.Length; i++)
{
result[i] = data[i].CreateAssemblyName();
}
return result;
}
private AssemblyNameData[] GetReferencedAssembliesNoCopy() => _lazyAssemblyReferences ?? (_lazyAssemblyReferences = ComputeAssemblyReferences());
protected abstract AssemblyNameData[] ComputeAssemblyReferences();
private volatile AssemblyNameData[]? _lazyAssemblyReferences;
// Miscellaneous properties
public sealed override bool ReflectionOnly => true;
#if NETCOREAPP
[Obsolete("The Global Assembly Cache is not supported.", DiagnosticId = "SYSLIB0005", UrlFormat = "https://aka.ms/dotnet-warnings/{0}")]
#endif
public sealed override bool GlobalAssemblyCache => false;
public sealed override long HostContext => 0;
public abstract override string ImageRuntimeVersion { get; }
public abstract override bool IsDynamic { get; }
public abstract override MethodInfo? EntryPoint { get; }
// Manifest resource support.
public abstract override ManifestResourceInfo? GetManifestResourceInfo(string resourceName);
public abstract override string[] GetManifestResourceNames();
public abstract override Stream? GetManifestResourceStream(string name);
public sealed override Stream? GetManifestResourceStream(Type type, string name)
{
StringBuilder sb = new StringBuilder();
if (type == null)
{
if (name == null)
throw new ArgumentNullException(nameof(type));
}
else
{
string? ns = type.Namespace;
if (ns != null)
{
sb.Append(ns);
if (name != null)
sb.Append(Type.Delimiter);
}
}
if (name != null)
sb.Append(name);
return GetManifestResourceStream(sb.ToString());
}
// Serialization
public sealed override void GetObjectData(SerializationInfo info, StreamingContext context) => throw new NotSupportedException();
// Satellite assemblies
public sealed override Assembly GetSatelliteAssembly(CultureInfo culture) => throw new NotSupportedException(SR.NotSupported_SatelliteAssembly);
public sealed override Assembly GetSatelliteAssembly(CultureInfo culture, Version? version) => throw new NotSupportedException(SR.NotSupported_SatelliteAssembly);
// Operations that are invalid for ReflectionOnly objects.
public sealed override object[] GetCustomAttributes(bool inherit) => throw new InvalidOperationException(SR.Arg_ReflectionOnlyCA);
public sealed override object[] GetCustomAttributes(Type attributeType, bool inherit) => throw new InvalidOperationException(SR.Arg_ReflectionOnlyCA);
public sealed override bool IsDefined(Type attributeType, bool inherit) => throw new InvalidOperationException(SR.Arg_ReflectionOnlyCA);
// Compat quirk: Why ArgumentException instead of InvalidOperationException?
public sealed override object CreateInstance(string typeName, bool ignoreCase, BindingFlags bindingAttr, Binder? binder, object?[]? args, CultureInfo? culture, object?[]? activationAttributes) => throw new ArgumentException(SR.Arg_ReflectionOnlyInvoke);
internal MetadataLoadContext Loader { get; }
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Runtime.Serialization;
using System.Text;
namespace System.Reflection.TypeLoading
{
/// <summary>
/// Base class for all Assembly objects created by a MetadataLoadContext.
/// </summary>
internal abstract partial class RoAssembly : LeveledAssembly
{
private readonly RoModule?[] _loadedModules; // Any loaded modules indexed by [rid - 1]. Does NOT include the manifest module.
protected RoAssembly(MetadataLoadContext loader, int assemblyFileCount)
: base()
{
Loader = loader;
IsSingleModule = (assemblyFileCount == 0);
_loadedModules = (assemblyFileCount == 0) ? Array.Empty<RoModule>() : new RoModule[assemblyFileCount];
}
public sealed override Module ManifestModule => GetRoManifestModule();
internal abstract RoModule GetRoManifestModule();
protected bool IsSingleModule { get; }
public sealed override string ToString() => Loader.GetDisposedString() ?? base.ToString();
// Naming
public sealed override AssemblyName GetName(bool copiedName) => GetAssemblyNameDataNoCopy().CreateAssemblyName();
internal AssemblyNameData GetAssemblyNameDataNoCopy() => _lazyAssemblyNameData ?? (_lazyAssemblyNameData = ComputeNameData());
protected abstract AssemblyNameData ComputeNameData();
private volatile AssemblyNameData? _lazyAssemblyNameData;
public sealed override string FullName => _lazyFullName ?? (_lazyFullName = GetName().FullName);
private volatile string? _lazyFullName;
internal const string ThrowingMessageInRAF = "This member throws an exception for assemblies embedded in a single-file app";
// Location and codebase
public abstract override string Location { get; }
#if NETCOREAPP
[Obsolete(Obsoletions.CodeBaseMessage, DiagnosticId = Obsoletions.CodeBaseDiagId, UrlFormat = Obsoletions.SharedUrlFormat)]
[RequiresAssemblyFiles(ThrowingMessageInRAF)]
#endif
public sealed override string CodeBase => throw new NotSupportedException(SR.NotSupported_AssemblyCodeBase);
#if NETCOREAPP
[Obsolete(Obsoletions.CodeBaseMessage, DiagnosticId = Obsoletions.CodeBaseDiagId, UrlFormat = Obsoletions.SharedUrlFormat)]
[RequiresAssemblyFiles(ThrowingMessageInRAF)]
#endif
public sealed override string EscapedCodeBase => throw new NotSupportedException(SR.NotSupported_AssemblyCodeBase);
// Custom Attributes
public sealed override IList<CustomAttributeData> GetCustomAttributesData() => CustomAttributes.ToReadOnlyCollection();
public abstract override IEnumerable<CustomAttributeData> CustomAttributes { get; }
// Apis to retrieved types physically defined in this module.
public sealed override Type[] GetTypes() => IsSingleModule ? ManifestModule.GetTypes() : base.GetTypes();
public sealed override IEnumerable<TypeInfo> DefinedTypes => GetDefinedRoTypes()!;
private IEnumerable<RoType>? GetDefinedRoTypes() => IsSingleModule ? GetRoManifestModule().GetDefinedRoTypes() : MultiModuleGetDefinedRoTypes();
private IEnumerable<RoType> MultiModuleGetDefinedRoTypes()
{
foreach (RoModule module in ComputeRoModules(getResourceModules: false))
{
foreach (RoType t in module.GetDefinedRoTypes()!)
{
yield return t;
}
}
}
// Apis to retrieve public types physically defined in this module.
public sealed override Type[] GetExportedTypes()
{
// todo: use IEnumerable<T> extension instead: ExportedTypes.ToArray();
List<Type> list = new List<Type>(ExportedTypes);
return list.ToArray();
}
public sealed override IEnumerable<Type> ExportedTypes
{
get
{
foreach (RoType type in GetDefinedRoTypes()!)
{
if (type.IsVisibleOutsideAssembly())
yield return type;
}
}
}
// Api to retrieve types by name. Retrieves both types physically defined in this module and types this assembly forwards from another assembly.
public sealed override Type? GetType(string name!!, bool throwOnError, bool ignoreCase)
{
// Known compat disagreement: This api is supposed to throw an ArgumentException if the name has an assembly qualification
// (though the intended meaning seems clear.) This is difficult for us to implement as we don't have our own type name parser.
// (We can't just throw in the assemblyResolve delegate because assembly qualifications are permitted inside generic arguments,
// just not in the top level type name.) In the bigger scheme of things, this does not seem worth worrying about.
return Helpers.LoadTypeFromAssemblyQualifiedName(name, defaultAssembly: this, ignoreCase: ignoreCase, throwOnError: throwOnError);
}
/// <summary>
/// Helper routine for the more general Assembly.GetType() family of apis. Also used in typeRef resolution.
///
/// Resolves top-level named types only. No nested types. No constructed types. The input name must not be escaped.
///
/// If a type is not contained or forwarded from the assembly, this method returns null (does not throw.)
/// This supports the "throwOnError: false" behavior of Assembly.GetType(string, bool).
/// </summary>
internal RoDefinitionType? GetTypeCore(string ns, string name, bool ignoreCase, out Exception? e) => GetTypeCore(ns.ToUtf8(), name.ToUtf8(), ignoreCase, out e);
internal RoDefinitionType? GetTypeCore(ReadOnlySpan<byte> ns, ReadOnlySpan<byte> name, bool ignoreCase, out Exception? e)
{
RoDefinitionType? result = GetRoManifestModule().GetTypeCore(ns, name, ignoreCase, out e);
if (IsSingleModule || result != null)
return result;
foreach (RoModule module in ComputeRoModules(getResourceModules: false))
{
if (module == ManifestModule)
continue;
result = module.GetTypeCore(ns, name, ignoreCase, out e);
if (result != null)
return result;
}
return null;
}
// Assembly dependencies
public sealed override AssemblyName[] GetReferencedAssemblies()
{
// For compat, this api only searches the manifest module. Tools normally ensure the manifest module's assemblyRef
// table represents the union of all module's assemblyRef table.
AssemblyNameData[] data = GetReferencedAssembliesNoCopy();
AssemblyName[] result = new AssemblyName[data.Length];
for (int i = 0; i < data.Length; i++)
{
result[i] = data[i].CreateAssemblyName();
}
return result;
}
private AssemblyNameData[] GetReferencedAssembliesNoCopy() => _lazyAssemblyReferences ?? (_lazyAssemblyReferences = ComputeAssemblyReferences());
protected abstract AssemblyNameData[] ComputeAssemblyReferences();
private volatile AssemblyNameData[]? _lazyAssemblyReferences;
// Miscellaneous properties
public sealed override bool ReflectionOnly => true;
#if NETCOREAPP
[Obsolete("The Global Assembly Cache is not supported.", DiagnosticId = "SYSLIB0005", UrlFormat = "https://aka.ms/dotnet-warnings/{0}")]
#endif
public sealed override bool GlobalAssemblyCache => false;
public sealed override long HostContext => 0;
public abstract override string ImageRuntimeVersion { get; }
public abstract override bool IsDynamic { get; }
public abstract override MethodInfo? EntryPoint { get; }
// Manifest resource support.
public abstract override ManifestResourceInfo? GetManifestResourceInfo(string resourceName);
public abstract override string[] GetManifestResourceNames();
public abstract override Stream? GetManifestResourceStream(string name);
public sealed override Stream? GetManifestResourceStream(Type type, string name)
{
StringBuilder sb = new StringBuilder();
if (type == null)
{
if (name == null)
throw new ArgumentNullException(nameof(type));
}
else
{
string? ns = type.Namespace;
if (ns != null)
{
sb.Append(ns);
if (name != null)
sb.Append(Type.Delimiter);
}
}
if (name != null)
sb.Append(name);
return GetManifestResourceStream(sb.ToString());
}
// Serialization
public sealed override void GetObjectData(SerializationInfo info, StreamingContext context) => throw new NotSupportedException();
// Satellite assemblies
public sealed override Assembly GetSatelliteAssembly(CultureInfo culture) => throw new NotSupportedException(SR.NotSupported_SatelliteAssembly);
public sealed override Assembly GetSatelliteAssembly(CultureInfo culture, Version? version) => throw new NotSupportedException(SR.NotSupported_SatelliteAssembly);
// Operations that are invalid for ReflectionOnly objects.
public sealed override object[] GetCustomAttributes(bool inherit) => throw new InvalidOperationException(SR.Arg_ReflectionOnlyCA);
public sealed override object[] GetCustomAttributes(Type attributeType, bool inherit) => throw new InvalidOperationException(SR.Arg_ReflectionOnlyCA);
public sealed override bool IsDefined(Type attributeType, bool inherit) => throw new InvalidOperationException(SR.Arg_ReflectionOnlyCA);
// Compat quirk: Why ArgumentException instead of InvalidOperationException?
public sealed override object CreateInstance(string typeName, bool ignoreCase, BindingFlags bindingAttr, Binder? binder, object?[]? args, CultureInfo? culture, object?[]? activationAttributes) => throw new ArgumentException(SR.Arg_ReflectionOnlyInvoke);
internal MetadataLoadContext Loader { get; }
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/Regression/JitBlue/Runtime_40444/Runtime_40444.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Threading;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
class Runtime_40444
{
public static int t2_result;
public static int t2_finished;
public static int s_divisor;
static void Thread2()
{
t2_result++;
t2_finished = 1;
}
static int TestVolatileRead(ref int address)
{
int ret = address;
Thread.MemoryBarrier(); // Call MemoryBarrier to ensure the proper semantic in a portable way.
return ret;
}
static bool Test(ref bool result)
{
int loc_finished;
t2_finished = 0;
// Run Thread2() in a new thread
new Thread(new ThreadStart(Thread2)).Start();
//
//Wait for Thread2 to signal that it has a result by setting
// t2_finished to 1.
//
// We wait by performing this loop 1000 million times
//
// It is important that we have no calls in the loop
// and that the JIT inlines the method TestVolatileRead
//
//
int i = 0;
int divisor = s_divisor;
for (; ; )
{
if (TestVolatileRead(ref t2_finished)==1)
{
// The value was changed by Thread2
// We print out how many iterations we looped for and
// return true
Console.WriteLine("{0}: t2_result = {1}", i, t2_result);
result = true;
// The other thread has run and we just saw the value of
// t2_finished change so we return true and pass the test
//
return true;
}
i++;
// Integer division is somewhat expensive and will add additional
// time for this loop to execute
//
// Chain the two divides so the processor can't hide the latency
//
if (((i / divisor) / divisor) == 1)
{
divisor++;
}
if (i == 1000000000) // 1000 million
{
loc_finished = t2_finished;
break;
}
}
// If loc_finished is still zero then the other thread has never run
// then we need to retry this test.
//
if (loc_finished == 0)
{
// We will return false,
// this means that we couldn't tell if the test failed
//
return false;
}
else
{
// If we count up to 1000 million and we complete the loop
// then we fail this test.
//
// Without the fix to the JIT we hoisted the read out of
// the loop and we would always reach here.
//
Console.WriteLine("{0}: FAILED, t2_result = {1}, t2_finished is {2}", i, t2_result, t2_finished);
// The other thread has run and we never saw the value of t2_finsihed change
// so we return true and fail the test
//
result = false;
return true;
}
}
static int Main()
{
bool passes_test = false;
bool test_result = false;
for (int i=0; i<100; i++)
{
t2_result = 0;
s_divisor = 1000000;
// Test returns true when it is able to determine pass or fail
// and it sets passes_test to true when it passes
//
test_result = Test(ref passes_test);
if (test_result)
{
break;
}
}
if (passes_test)
{
Console.WriteLine("Passed");
return 100;
}
else
{
if (test_result)
{
Console.WriteLine("FAILED");
return -1;
}
else
{
Console.WriteLine("Unable to determine");
return 101;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Threading;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
class Runtime_40444
{
public static int t2_result;
public static int t2_finished;
public static int s_divisor;
static void Thread2()
{
t2_result++;
t2_finished = 1;
}
static int TestVolatileRead(ref int address)
{
int ret = address;
Thread.MemoryBarrier(); // Call MemoryBarrier to ensure the proper semantic in a portable way.
return ret;
}
static bool Test(ref bool result)
{
int loc_finished;
t2_finished = 0;
// Run Thread2() in a new thread
new Thread(new ThreadStart(Thread2)).Start();
//
//Wait for Thread2 to signal that it has a result by setting
// t2_finished to 1.
//
// We wait by performing this loop 1000 million times
//
// It is important that we have no calls in the loop
// and that the JIT inlines the method TestVolatileRead
//
//
int i = 0;
int divisor = s_divisor;
for (; ; )
{
if (TestVolatileRead(ref t2_finished)==1)
{
// The value was changed by Thread2
// We print out how many iterations we looped for and
// return true
Console.WriteLine("{0}: t2_result = {1}", i, t2_result);
result = true;
// The other thread has run and we just saw the value of
// t2_finished change so we return true and pass the test
//
return true;
}
i++;
// Integer division is somewhat expensive and will add additional
// time for this loop to execute
//
// Chain the two divides so the processor can't hide the latency
//
if (((i / divisor) / divisor) == 1)
{
divisor++;
}
if (i == 1000000000) // 1000 million
{
loc_finished = t2_finished;
break;
}
}
// If loc_finished is still zero then the other thread has never run
// then we need to retry this test.
//
if (loc_finished == 0)
{
// We will return false,
// this means that we couldn't tell if the test failed
//
return false;
}
else
{
// If we count up to 1000 million and we complete the loop
// then we fail this test.
//
// Without the fix to the JIT we hoisted the read out of
// the loop and we would always reach here.
//
Console.WriteLine("{0}: FAILED, t2_result = {1}, t2_finished is {2}", i, t2_result, t2_finished);
// The other thread has run and we never saw the value of t2_finsihed change
// so we return true and fail the test
//
result = false;
return true;
}
}
static int Main()
{
bool passes_test = false;
bool test_result = false;
for (int i=0; i<100; i++)
{
t2_result = 0;
s_divisor = 1000000;
// Test returns true when it is able to determine pass or fail
// and it sets passes_test to true when it passes
//
test_result = Test(ref passes_test);
if (test_result)
{
break;
}
}
if (passes_test)
{
Console.WriteLine("Passed");
return 100;
}
else
{
if (test_result)
{
Console.WriteLine("FAILED");
return -1;
}
else
{
Console.WriteLine("Unable to determine");
return 101;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Reflection.Metadata/src/System/Reflection/Metadata/MetadataKind.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.Reflection.Metadata
{
public enum MetadataKind
{
/// <summary>
/// CLI metadata.
/// </summary>
Ecma335 = 0,
/// <summary>
/// Windows Metadata.
/// </summary>
WindowsMetadata = 1,
/// <summary>
/// Windows Metadata generated by managed compilers.
/// </summary>
ManagedWindowsMetadata = 2,
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.Reflection.Metadata
{
public enum MetadataKind
{
/// <summary>
/// CLI metadata.
/// </summary>
Ecma335 = 0,
/// <summary>
/// Windows Metadata.
/// </summary>
WindowsMetadata = 1,
/// <summary>
/// Windows Metadata generated by managed compilers.
/// </summary>
ManagedWindowsMetadata = 2,
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.IO.IsolatedStorage/src/System/IO/IsolatedStorage/INormalizeForIsolatedStorage.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.IO.IsolatedStorage
{
public interface INormalizeForIsolatedStorage
{
object Normalize();
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.IO.IsolatedStorage
{
public interface INormalizeForIsolatedStorage
{
object Normalize();
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Configuration.ConfigurationManager/src/System/Configuration/KeyValueInternalCollection.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Specialized;
namespace System.Configuration
{
internal sealed class KeyValueInternalCollection : NameValueCollection
{
private readonly AppSettingsSection _root;
public KeyValueInternalCollection(AppSettingsSection root)
{
_root = root;
foreach (KeyValueConfigurationElement element in _root.Settings) base.Add(element.Key, element.Value);
}
public override void Add(string key, string value)
{
_root.Settings.Add(new KeyValueConfigurationElement(key, value));
base.Add(key, value);
}
public override void Clear()
{
_root.Settings.Clear();
base.Clear();
}
public override void Remove(string key)
{
_root.Settings.Remove(key);
base.Remove(key);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Specialized;
namespace System.Configuration
{
internal sealed class KeyValueInternalCollection : NameValueCollection
{
private readonly AppSettingsSection _root;
public KeyValueInternalCollection(AppSettingsSection root)
{
_root = root;
foreach (KeyValueConfigurationElement element in _root.Settings) base.Add(element.Key, element.Value);
}
public override void Add(string key, string value)
{
_root.Settings.Add(new KeyValueConfigurationElement(key, value));
base.Add(key, value);
}
public override void Clear()
{
_root.Settings.Clear();
base.Clear();
}
public override void Remove(string key)
{
_root.Settings.Remove(key);
base.Remove(key);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Memory/tests/Span/Fill.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Linq;
using System.Runtime.InteropServices;
using Xunit;
using static System.TestHelpers;
namespace System.SpanTests
{
public static partial class SpanTests
{
[Fact]
public static void FillEmpty()
{
var span = Span<byte>.Empty;
span.Fill(1);
}
[Fact]
public static void FillByteLonger()
{
const byte fill = 5;
var expected = new byte[2048];
for (int i = 0; i < expected.Length; i++)
{
expected[i] = fill;
}
var actual = new byte[2048];
var span = new Span<byte>(actual);
span.Fill(fill);
Assert.Equal<byte>(expected, actual);
}
[Fact]
public static void FillByteUnaligned()
{
const byte fill = 5;
const int length = 32;
var expectedFull = new byte[length];
for (int i = 0; i < length; i++)
{
expectedFull[i] = fill;
}
var actualFull = new byte[length];
var start = 1;
var expectedSpan = new Span<byte>(expectedFull, start, length - start - 1);
var actualSpan = new Span<byte>(actualFull, start, length - start - 1);
actualSpan.Fill(fill);
byte[] actual = actualSpan.ToArray();
byte[] expected = expectedSpan.ToArray();
Assert.Equal<byte>(expected, actual);
Assert.Equal(0, actualFull[0]);
Assert.Equal(0, actualFull[length - 1]);
}
[Fact]
public static void FillValueTypeWithoutReferences()
{
const byte fill = 5;
for (int length = 0; length < 32; length++)
{
var expectedFull = new int[length];
var actualFull = new int[length];
for (int i = 0; i < length; i++)
{
expectedFull[i] = fill;
actualFull[i] = i;
}
var span = new Span<int>(actualFull);
span.Fill(fill);
Assert.Equal<int>(expectedFull, actualFull);
}
}
[Fact]
public static void FillReferenceType()
{
string[] actual = { "a", "b", "c" };
string[] expected = { "d", "d", "d" };
var span = new Span<string>(actual);
span.Fill("d");
Assert.Equal<string>(expected, actual);
}
[Fact]
public static void FillValueTypeWithReferences()
{
TestValueTypeWithReference[] actual = {
new TestValueTypeWithReference() { I = 1, S = "a" },
new TestValueTypeWithReference() { I = 2, S = "b" },
new TestValueTypeWithReference() { I = 3, S = "c" } };
TestValueTypeWithReference[] expected = {
new TestValueTypeWithReference() { I = 5, S = "d" },
new TestValueTypeWithReference() { I = 5, S = "d" },
new TestValueTypeWithReference() { I = 5, S = "d" } };
var span = new Span<TestValueTypeWithReference>(actual);
span.Fill(new TestValueTypeWithReference() { I = 5, S = "d" });
Assert.Equal<TestValueTypeWithReference>(expected, actual);
}
[Fact]
public static unsafe void FillNativeBytes()
{
// Arrange
int length = 50;
byte* ptr = null;
try
{
ptr = (byte*)Marshal.AllocHGlobal((IntPtr)50);
}
// Skipping test if Out-of-Memory, since this test can only be run, if there is enough memory
catch (OutOfMemoryException)
{
Console.WriteLine(
$"Span.Fill test {nameof(FillNativeBytes)} skipped due to {nameof(OutOfMemoryException)}.");
return;
}
try
{
byte initial = 1;
for (int i = 0; i < length; i++)
{
*(ptr + i) = initial;
}
const byte fill = 5;
var span = new Span<byte>(ptr, length);
// Act
span.Fill(fill);
// Assert using custom code for perf and to avoid allocating extra memory
for (int i = 0; i < length; i++)
{
var actual = *(ptr + i);
Assert.Equal(fill, actual);
}
}
finally
{
Marshal.FreeHGlobal(new IntPtr(ptr));
}
}
[Fact]
public static void FillWithRecognizedType()
{
RunTest<sbyte>(0x20);
RunTest<byte>(0x20);
RunTest<bool>(true);
RunTest<short>(0x1234);
RunTest<ushort>(0x1234);
RunTest<char>('x');
RunTest<int>(0x12345678);
RunTest<uint>(0x12345678);
RunTest<long>(0x0123456789abcdef);
RunTest<ulong>(0x0123456789abcdef);
RunTest<nint>(unchecked((nint)0x0123456789abcdef));
RunTest<nuint>(unchecked((nuint)0x0123456789abcdef));
RunTest<Half>((Half)1.0);
RunTest<float>(1.0f);
RunTest<double>(1.0);
RunTest<StringComparison>(StringComparison.CurrentCultureIgnoreCase); // should be treated as underlying primitive
RunTest<string>("Hello world!"); // ref type, no SIMD
RunTest<decimal>(1.0m); // 128-bit struct
RunTest<Guid>(new Guid("29e07627-2481-4f43-8fbf-09cf21180239")); // 128-bit struct
RunTest<My96BitStruct>(new(0x11111111, 0x22222222, 0x33333333)); // 96-bit struct, no SIMD
RunTest<My256BitStruct>(new(0x1111111111111111, 0x2222222222222222, 0x3333333333333333, 0x4444444444444444));
RunTest<My512BitStruct>(new(
0x1111111111111111, 0x2222222222222222, 0x3333333333333333, 0x4444444444444444,
0x5555555555555555, 0x6666666666666666, 0x7777777777777777, 0x8888888888888888)); // 512-bit struct, no SIMD
RunTest<MyRefContainingStruct>(new("Hello world!")); // struct contains refs, no SIMD
static void RunTest<T>(T value)
{
T[] arr = new T[128];
// Run tests for lengths := 0 to 64, ensuring we don't overrun our buffer
for (int i = 0; i <= 64; i++)
{
arr.AsSpan(0, i).Fill(value);
Assert.Equal(Enumerable.Repeat(value, i), arr.Take(i)); // first i entries should've been populated with 'value'
Assert.Equal(Enumerable.Repeat(default(T), arr.Length - i), arr.Skip(i)); // remaining entries should contain default(T)
Array.Clear(arr);
}
}
}
private readonly struct My96BitStruct
{
public My96BitStruct(int data0, int data1, int data2)
{
Data0 = data0;
Data1 = data1;
Data2 = data2;
}
public readonly int Data0;
public readonly int Data1;
public readonly int Data2;
}
private readonly struct My256BitStruct
{
public My256BitStruct(ulong data0, ulong data1, ulong data2, ulong data3)
{
Data0 = data0;
Data1 = data1;
Data2 = data2;
Data3 = data3;
}
public readonly ulong Data0;
public readonly ulong Data1;
public readonly ulong Data2;
public readonly ulong Data3;
}
private readonly struct My512BitStruct
{
public My512BitStruct(ulong data0, ulong data1, ulong data2, ulong data3, ulong data4, ulong data5, ulong data6, ulong data7)
{
Data0 = data0;
Data1 = data1;
Data2 = data2;
Data3 = data3;
Data4 = data4;
Data5 = data5;
Data6 = data6;
Data7 = data7;
}
public readonly ulong Data0;
public readonly ulong Data1;
public readonly ulong Data2;
public readonly ulong Data3;
public readonly ulong Data4;
public readonly ulong Data5;
public readonly ulong Data6;
public readonly ulong Data7;
}
private readonly struct MyRefContainingStruct
{
public MyRefContainingStruct(object data)
{
Data = data;
}
public readonly object Data;
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Linq;
using System.Runtime.InteropServices;
using Xunit;
using static System.TestHelpers;
namespace System.SpanTests
{
public static partial class SpanTests
{
[Fact]
public static void FillEmpty()
{
var span = Span<byte>.Empty;
span.Fill(1);
}
[Fact]
public static void FillByteLonger()
{
const byte fill = 5;
var expected = new byte[2048];
for (int i = 0; i < expected.Length; i++)
{
expected[i] = fill;
}
var actual = new byte[2048];
var span = new Span<byte>(actual);
span.Fill(fill);
Assert.Equal<byte>(expected, actual);
}
[Fact]
public static void FillByteUnaligned()
{
const byte fill = 5;
const int length = 32;
var expectedFull = new byte[length];
for (int i = 0; i < length; i++)
{
expectedFull[i] = fill;
}
var actualFull = new byte[length];
var start = 1;
var expectedSpan = new Span<byte>(expectedFull, start, length - start - 1);
var actualSpan = new Span<byte>(actualFull, start, length - start - 1);
actualSpan.Fill(fill);
byte[] actual = actualSpan.ToArray();
byte[] expected = expectedSpan.ToArray();
Assert.Equal<byte>(expected, actual);
Assert.Equal(0, actualFull[0]);
Assert.Equal(0, actualFull[length - 1]);
}
[Fact]
public static void FillValueTypeWithoutReferences()
{
const byte fill = 5;
for (int length = 0; length < 32; length++)
{
var expectedFull = new int[length];
var actualFull = new int[length];
for (int i = 0; i < length; i++)
{
expectedFull[i] = fill;
actualFull[i] = i;
}
var span = new Span<int>(actualFull);
span.Fill(fill);
Assert.Equal<int>(expectedFull, actualFull);
}
}
[Fact]
public static void FillReferenceType()
{
string[] actual = { "a", "b", "c" };
string[] expected = { "d", "d", "d" };
var span = new Span<string>(actual);
span.Fill("d");
Assert.Equal<string>(expected, actual);
}
[Fact]
public static void FillValueTypeWithReferences()
{
TestValueTypeWithReference[] actual = {
new TestValueTypeWithReference() { I = 1, S = "a" },
new TestValueTypeWithReference() { I = 2, S = "b" },
new TestValueTypeWithReference() { I = 3, S = "c" } };
TestValueTypeWithReference[] expected = {
new TestValueTypeWithReference() { I = 5, S = "d" },
new TestValueTypeWithReference() { I = 5, S = "d" },
new TestValueTypeWithReference() { I = 5, S = "d" } };
var span = new Span<TestValueTypeWithReference>(actual);
span.Fill(new TestValueTypeWithReference() { I = 5, S = "d" });
Assert.Equal<TestValueTypeWithReference>(expected, actual);
}
[Fact]
public static unsafe void FillNativeBytes()
{
// Arrange
int length = 50;
byte* ptr = null;
try
{
ptr = (byte*)Marshal.AllocHGlobal((IntPtr)50);
}
// Skipping test if Out-of-Memory, since this test can only be run, if there is enough memory
catch (OutOfMemoryException)
{
Console.WriteLine(
$"Span.Fill test {nameof(FillNativeBytes)} skipped due to {nameof(OutOfMemoryException)}.");
return;
}
try
{
byte initial = 1;
for (int i = 0; i < length; i++)
{
*(ptr + i) = initial;
}
const byte fill = 5;
var span = new Span<byte>(ptr, length);
// Act
span.Fill(fill);
// Assert using custom code for perf and to avoid allocating extra memory
for (int i = 0; i < length; i++)
{
var actual = *(ptr + i);
Assert.Equal(fill, actual);
}
}
finally
{
Marshal.FreeHGlobal(new IntPtr(ptr));
}
}
[Fact]
public static void FillWithRecognizedType()
{
RunTest<sbyte>(0x20);
RunTest<byte>(0x20);
RunTest<bool>(true);
RunTest<short>(0x1234);
RunTest<ushort>(0x1234);
RunTest<char>('x');
RunTest<int>(0x12345678);
RunTest<uint>(0x12345678);
RunTest<long>(0x0123456789abcdef);
RunTest<ulong>(0x0123456789abcdef);
RunTest<nint>(unchecked((nint)0x0123456789abcdef));
RunTest<nuint>(unchecked((nuint)0x0123456789abcdef));
RunTest<Half>((Half)1.0);
RunTest<float>(1.0f);
RunTest<double>(1.0);
RunTest<StringComparison>(StringComparison.CurrentCultureIgnoreCase); // should be treated as underlying primitive
RunTest<string>("Hello world!"); // ref type, no SIMD
RunTest<decimal>(1.0m); // 128-bit struct
RunTest<Guid>(new Guid("29e07627-2481-4f43-8fbf-09cf21180239")); // 128-bit struct
RunTest<My96BitStruct>(new(0x11111111, 0x22222222, 0x33333333)); // 96-bit struct, no SIMD
RunTest<My256BitStruct>(new(0x1111111111111111, 0x2222222222222222, 0x3333333333333333, 0x4444444444444444));
RunTest<My512BitStruct>(new(
0x1111111111111111, 0x2222222222222222, 0x3333333333333333, 0x4444444444444444,
0x5555555555555555, 0x6666666666666666, 0x7777777777777777, 0x8888888888888888)); // 512-bit struct, no SIMD
RunTest<MyRefContainingStruct>(new("Hello world!")); // struct contains refs, no SIMD
static void RunTest<T>(T value)
{
T[] arr = new T[128];
// Run tests for lengths := 0 to 64, ensuring we don't overrun our buffer
for (int i = 0; i <= 64; i++)
{
arr.AsSpan(0, i).Fill(value);
Assert.Equal(Enumerable.Repeat(value, i), arr.Take(i)); // first i entries should've been populated with 'value'
Assert.Equal(Enumerable.Repeat(default(T), arr.Length - i), arr.Skip(i)); // remaining entries should contain default(T)
Array.Clear(arr);
}
}
}
private readonly struct My96BitStruct
{
public My96BitStruct(int data0, int data1, int data2)
{
Data0 = data0;
Data1 = data1;
Data2 = data2;
}
public readonly int Data0;
public readonly int Data1;
public readonly int Data2;
}
private readonly struct My256BitStruct
{
public My256BitStruct(ulong data0, ulong data1, ulong data2, ulong data3)
{
Data0 = data0;
Data1 = data1;
Data2 = data2;
Data3 = data3;
}
public readonly ulong Data0;
public readonly ulong Data1;
public readonly ulong Data2;
public readonly ulong Data3;
}
private readonly struct My512BitStruct
{
public My512BitStruct(ulong data0, ulong data1, ulong data2, ulong data3, ulong data4, ulong data5, ulong data6, ulong data7)
{
Data0 = data0;
Data1 = data1;
Data2 = data2;
Data3 = data3;
Data4 = data4;
Data5 = data5;
Data6 = data6;
Data7 = data7;
}
public readonly ulong Data0;
public readonly ulong Data1;
public readonly ulong Data2;
public readonly ulong Data3;
public readonly ulong Data4;
public readonly ulong Data5;
public readonly ulong Data6;
public readonly ulong Data7;
}
private readonly struct MyRefContainingStruct
{
public MyRefContainingStruct(object data)
{
Data = data;
}
public readonly object Data;
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/coreclr/pal/tests/palsuite/c_runtime/_snwprintf_s/test18/test18.cpp
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/*============================================================================
**
** Source: test18.c
**
** Purpose: Tests swprintf_s with compact format doubles (uppercase)
**
**
**==========================================================================*/
#include <palsuite.h>
#include "../_snwprintf_s.h"
/* memcmp is used to verify the results, so this test is dependent on it. */
/* ditto with wcslen */
PALTEST(c_runtime__snwprintf_s_test18_paltest_snwprintf_test18, "c_runtime/_snwprintf_s/test18/paltest_snwprintf_test18")
{
double val = 2560.001;
double neg = -2560.001;
if (PAL_Initialize(argc, argv) != 0)
{
return FAIL;
}
DoDoubleTest(convert("foo %G"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %lG"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %hG"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %LG"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %I64G"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %5G"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %-5G"), val, convert("foo 2560 "),
convert("foo 2560 "));
DoDoubleTest(convert("foo %.1G"), val, convert("foo 3E+003"),
convert("foo 3E+03"));
DoDoubleTest(convert("foo %.2G"), val, convert("foo 2.6E+003"),
convert("foo 2.6E+03"));
DoDoubleTest(convert("foo %.12G"), val, convert("foo 2560.001"),
convert("foo 2560.001"));
DoDoubleTest(convert("foo %06G"), val, convert("foo 002560"),
convert("foo 002560"));
DoDoubleTest(convert("foo %#G"), val, convert("foo 2560.00"),
convert("foo 2560.00"));
DoDoubleTest(convert("foo %+G"), val, convert("foo +2560"),
convert("foo +2560"));
DoDoubleTest(convert("foo % G"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %+G"), neg, convert("foo -2560"),
convert("foo -2560"));
DoDoubleTest(convert("foo % G"), neg, convert("foo -2560"),
convert("foo -2560"));
PAL_Terminate();
return PASS;
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/*============================================================================
**
** Source: test18.c
**
** Purpose: Tests swprintf_s with compact format doubles (uppercase)
**
**
**==========================================================================*/
#include <palsuite.h>
#include "../_snwprintf_s.h"
/* memcmp is used to verify the results, so this test is dependent on it. */
/* ditto with wcslen */
PALTEST(c_runtime__snwprintf_s_test18_paltest_snwprintf_test18, "c_runtime/_snwprintf_s/test18/paltest_snwprintf_test18")
{
double val = 2560.001;
double neg = -2560.001;
if (PAL_Initialize(argc, argv) != 0)
{
return FAIL;
}
DoDoubleTest(convert("foo %G"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %lG"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %hG"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %LG"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %I64G"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %5G"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %-5G"), val, convert("foo 2560 "),
convert("foo 2560 "));
DoDoubleTest(convert("foo %.1G"), val, convert("foo 3E+003"),
convert("foo 3E+03"));
DoDoubleTest(convert("foo %.2G"), val, convert("foo 2.6E+003"),
convert("foo 2.6E+03"));
DoDoubleTest(convert("foo %.12G"), val, convert("foo 2560.001"),
convert("foo 2560.001"));
DoDoubleTest(convert("foo %06G"), val, convert("foo 002560"),
convert("foo 002560"));
DoDoubleTest(convert("foo %#G"), val, convert("foo 2560.00"),
convert("foo 2560.00"));
DoDoubleTest(convert("foo %+G"), val, convert("foo +2560"),
convert("foo +2560"));
DoDoubleTest(convert("foo % G"), val, convert("foo 2560"),
convert("foo 2560"));
DoDoubleTest(convert("foo %+G"), neg, convert("foo -2560"),
convert("foo -2560"));
DoDoubleTest(convert("foo % G"), neg, convert("foo -2560"),
convert("foo -2560"));
PAL_Terminate();
return PASS;
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Diagnostics.Process/src/System/Diagnostics/Process.ConfigureTerminalForChildProcesses.Unix.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Threading;
using System.Runtime.InteropServices;
namespace System.Diagnostics
{
public partial class Process
{
private static int s_childrenUsingTerminalCount;
internal static void ConfigureTerminalForChildProcesses(int increment, bool configureConsole = true)
{
Debug.Assert(increment != 0);
int childrenUsingTerminalRemaining = Interlocked.Add(ref s_childrenUsingTerminalCount, increment);
if (increment > 0)
{
Debug.Assert(s_processStartLock.IsReadLockHeld);
Debug.Assert(configureConsole);
// At least one child is using the terminal.
Interop.Sys.ConfigureTerminalForChildProcess(childUsesTerminal: true);
}
else
{
Debug.Assert(s_processStartLock.IsWriteLockHeld);
if (childrenUsingTerminalRemaining == 0 && configureConsole)
{
// No more children are using the terminal.
Interop.Sys.ConfigureTerminalForChildProcess(childUsesTerminal: false);
}
}
}
private static unsafe void SetDelayedSigChildConsoleConfigurationHandler()
{
Interop.Sys.SetDelayedSigChildConsoleConfigurationHandler(&DelayedSigChildConsoleConfiguration);
}
[UnmanagedCallersOnly]
private static void DelayedSigChildConsoleConfiguration()
{
// Lock to avoid races with Process.Start
s_processStartLock.EnterWriteLock();
try
{
if (s_childrenUsingTerminalCount == 0)
{
// No more children are using the terminal.
Interop.Sys.ConfigureTerminalForChildProcess(childUsesTerminal: false);
}
}
finally
{
s_processStartLock.ExitWriteLock();
}
}
private static bool AreChildrenUsingTerminal => s_childrenUsingTerminalCount > 0;
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Threading;
using System.Runtime.InteropServices;
namespace System.Diagnostics
{
public partial class Process
{
private static int s_childrenUsingTerminalCount;
internal static void ConfigureTerminalForChildProcesses(int increment, bool configureConsole = true)
{
Debug.Assert(increment != 0);
int childrenUsingTerminalRemaining = Interlocked.Add(ref s_childrenUsingTerminalCount, increment);
if (increment > 0)
{
Debug.Assert(s_processStartLock.IsReadLockHeld);
Debug.Assert(configureConsole);
// At least one child is using the terminal.
Interop.Sys.ConfigureTerminalForChildProcess(childUsesTerminal: true);
}
else
{
Debug.Assert(s_processStartLock.IsWriteLockHeld);
if (childrenUsingTerminalRemaining == 0 && configureConsole)
{
// No more children are using the terminal.
Interop.Sys.ConfigureTerminalForChildProcess(childUsesTerminal: false);
}
}
}
private static unsafe void SetDelayedSigChildConsoleConfigurationHandler()
{
Interop.Sys.SetDelayedSigChildConsoleConfigurationHandler(&DelayedSigChildConsoleConfiguration);
}
[UnmanagedCallersOnly]
private static void DelayedSigChildConsoleConfiguration()
{
// Lock to avoid races with Process.Start
s_processStartLock.EnterWriteLock();
try
{
if (s_childrenUsingTerminalCount == 0)
{
// No more children are using the terminal.
Interop.Sys.ConfigureTerminalForChildProcess(childUsesTerminal: false);
}
}
finally
{
s_processStartLock.ExitWriteLock();
}
}
private static bool AreChildrenUsingTerminal => s_childrenUsingTerminalCount > 0;
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.ComponentModel.TypeConverter/src/System/ComponentModel/BindableAttribute.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics.CodeAnalysis;
namespace System.ComponentModel
{
/// <summary>
/// Specifies whether a property is appropriate to bind data to.
/// </summary>
[AttributeUsage(AttributeTargets.All)]
public sealed class BindableAttribute : Attribute
{
/// <summary>
/// Specifies that a property is appropriate to bind data to. This
/// <see langword='static '/>field is read-only.
/// </summary>
public static readonly BindableAttribute Yes = new BindableAttribute(true);
/// <summary>
/// Specifies that a property is not appropriate to bind data to.
/// This <see langword='static '/>field is read-only.
/// </summary>
public static readonly BindableAttribute No = new BindableAttribute(false);
/// <summary>
/// Specifies the default value for the <see cref='System.ComponentModel.BindableAttribute'/>, which
/// is <see cref='System.ComponentModel.BindableAttribute.No'/>. This <see langword='static '/>field is
/// read-only.
/// </summary>
public static readonly BindableAttribute Default = No;
private readonly bool _isDefault;
/// <summary>
/// Initializes a new instance of the <see cref='System.ComponentModel.BindableAttribute'/> class.
/// </summary>
public BindableAttribute(bool bindable) : this(bindable, BindingDirection.OneWay)
{
}
/// <summary>
/// Initializes a new instance of the <see cref='System.ComponentModel.BindableAttribute'/> class.
/// </summary>
public BindableAttribute(bool bindable, BindingDirection direction)
{
Bindable = bindable;
Direction = direction;
}
/// <summary>
/// Initializes a new instance of the <see cref='System.ComponentModel.BindableAttribute'/> class.
/// </summary>
public BindableAttribute(BindableSupport flags) : this(flags, BindingDirection.OneWay)
{
}
/// <summary>
/// Initializes a new instance of the <see cref='System.ComponentModel.BindableAttribute'/> class.
/// </summary>
public BindableAttribute(BindableSupport flags, BindingDirection direction)
{
Bindable = (flags != BindableSupport.No);
_isDefault = (flags == BindableSupport.Default);
Direction = direction;
}
/// <summary>
/// Gets a value indicating whether a property is appropriate to bind data to.
/// </summary>
public bool Bindable { get; }
/// <summary>
/// Gets a value indicating the direction(s) this property be bound to data.
/// </summary>
public BindingDirection Direction { get; }
public override bool Equals([NotNullWhen(true)] object? obj)
{
if (obj == this)
{
return true;
}
return obj is BindableAttribute other && other.Bindable == Bindable;
}
public override int GetHashCode() => Bindable.GetHashCode();
public override bool IsDefaultAttribute() => Equals(Default) || _isDefault;
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics.CodeAnalysis;
namespace System.ComponentModel
{
/// <summary>
/// Specifies whether a property is appropriate to bind data to.
/// </summary>
[AttributeUsage(AttributeTargets.All)]
public sealed class BindableAttribute : Attribute
{
/// <summary>
/// Specifies that a property is appropriate to bind data to. This
/// <see langword='static '/>field is read-only.
/// </summary>
public static readonly BindableAttribute Yes = new BindableAttribute(true);
/// <summary>
/// Specifies that a property is not appropriate to bind data to.
/// This <see langword='static '/>field is read-only.
/// </summary>
public static readonly BindableAttribute No = new BindableAttribute(false);
/// <summary>
/// Specifies the default value for the <see cref='System.ComponentModel.BindableAttribute'/>, which
/// is <see cref='System.ComponentModel.BindableAttribute.No'/>. This <see langword='static '/>field is
/// read-only.
/// </summary>
public static readonly BindableAttribute Default = No;
private readonly bool _isDefault;
/// <summary>
/// Initializes a new instance of the <see cref='System.ComponentModel.BindableAttribute'/> class.
/// </summary>
public BindableAttribute(bool bindable) : this(bindable, BindingDirection.OneWay)
{
}
/// <summary>
/// Initializes a new instance of the <see cref='System.ComponentModel.BindableAttribute'/> class.
/// </summary>
public BindableAttribute(bool bindable, BindingDirection direction)
{
Bindable = bindable;
Direction = direction;
}
/// <summary>
/// Initializes a new instance of the <see cref='System.ComponentModel.BindableAttribute'/> class.
/// </summary>
public BindableAttribute(BindableSupport flags) : this(flags, BindingDirection.OneWay)
{
}
/// <summary>
/// Initializes a new instance of the <see cref='System.ComponentModel.BindableAttribute'/> class.
/// </summary>
public BindableAttribute(BindableSupport flags, BindingDirection direction)
{
Bindable = (flags != BindableSupport.No);
_isDefault = (flags == BindableSupport.Default);
Direction = direction;
}
/// <summary>
/// Gets a value indicating whether a property is appropriate to bind data to.
/// </summary>
public bool Bindable { get; }
/// <summary>
/// Gets a value indicating the direction(s) this property be bound to data.
/// </summary>
public BindingDirection Direction { get; }
public override bool Equals([NotNullWhen(true)] object? obj)
{
if (obj == this)
{
return true;
}
return obj is BindableAttribute other && other.Bindable == Bindable;
}
public override int GetHashCode() => Bindable.GetHashCode();
public override bool IsDefaultAttribute() => Equals(Default) || _isDefault;
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/Microsoft.Extensions.Caching.Abstractions/src/MemoryCacheEntryOptions.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using Microsoft.Extensions.Primitives;
namespace Microsoft.Extensions.Caching.Memory
{
/// <summary>
/// Represents the cache options applied to an entry of the <see cref="IMemoryCache"/> instance.
/// </summary>
public class MemoryCacheEntryOptions
{
private DateTimeOffset? _absoluteExpiration;
private TimeSpan? _absoluteExpirationRelativeToNow;
private TimeSpan? _slidingExpiration;
private long? _size;
/// <summary>
/// Gets or sets an absolute expiration date for the cache entry.
/// </summary>
public DateTimeOffset? AbsoluteExpiration
{
get
{
return _absoluteExpiration;
}
set
{
_absoluteExpiration = value;
}
}
/// <summary>
/// Gets or sets an absolute expiration time, relative to now.
/// </summary>
public TimeSpan? AbsoluteExpirationRelativeToNow
{
get
{
return _absoluteExpirationRelativeToNow;
}
set
{
if (value <= TimeSpan.Zero)
{
throw new ArgumentOutOfRangeException(
nameof(AbsoluteExpirationRelativeToNow),
value,
"The relative expiration value must be positive.");
}
_absoluteExpirationRelativeToNow = value;
}
}
/// <summary>
/// Gets or sets how long a cache entry can be inactive (e.g. not accessed) before it will be removed.
/// This will not extend the entry lifetime beyond the absolute expiration (if set).
/// </summary>
public TimeSpan? SlidingExpiration
{
get
{
return _slidingExpiration;
}
set
{
if (value <= TimeSpan.Zero)
{
throw new ArgumentOutOfRangeException(
nameof(SlidingExpiration),
value,
"The sliding expiration value must be positive.");
}
_slidingExpiration = value;
}
}
/// <summary>
/// Gets the <see cref="IChangeToken"/> instances which cause the cache entry to expire.
/// </summary>
public IList<IChangeToken> ExpirationTokens { get; } = new List<IChangeToken>();
/// <summary>
/// Gets or sets the callbacks will be fired after the cache entry is evicted from the cache.
/// </summary>
public IList<PostEvictionCallbackRegistration> PostEvictionCallbacks { get; }
= new List<PostEvictionCallbackRegistration>();
/// <summary>
/// Gets or sets the priority for keeping the cache entry in the cache during a
/// memory pressure triggered cleanup. The default is <see cref="CacheItemPriority.Normal"/>.
/// </summary>
public CacheItemPriority Priority { get; set; } = CacheItemPriority.Normal;
/// <summary>
/// Gets or sets the size of the cache entry value.
/// </summary>
public long? Size
{
get => _size;
set
{
if (value < 0)
{
throw new ArgumentOutOfRangeException(nameof(value), value, $"{nameof(value)} must be non-negative.");
}
_size = value;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using Microsoft.Extensions.Primitives;
namespace Microsoft.Extensions.Caching.Memory
{
/// <summary>
/// Represents the cache options applied to an entry of the <see cref="IMemoryCache"/> instance.
/// </summary>
public class MemoryCacheEntryOptions
{
private DateTimeOffset? _absoluteExpiration;
private TimeSpan? _absoluteExpirationRelativeToNow;
private TimeSpan? _slidingExpiration;
private long? _size;
/// <summary>
/// Gets or sets an absolute expiration date for the cache entry.
/// </summary>
public DateTimeOffset? AbsoluteExpiration
{
get
{
return _absoluteExpiration;
}
set
{
_absoluteExpiration = value;
}
}
/// <summary>
/// Gets or sets an absolute expiration time, relative to now.
/// </summary>
public TimeSpan? AbsoluteExpirationRelativeToNow
{
get
{
return _absoluteExpirationRelativeToNow;
}
set
{
if (value <= TimeSpan.Zero)
{
throw new ArgumentOutOfRangeException(
nameof(AbsoluteExpirationRelativeToNow),
value,
"The relative expiration value must be positive.");
}
_absoluteExpirationRelativeToNow = value;
}
}
/// <summary>
/// Gets or sets how long a cache entry can be inactive (e.g. not accessed) before it will be removed.
/// This will not extend the entry lifetime beyond the absolute expiration (if set).
/// </summary>
public TimeSpan? SlidingExpiration
{
get
{
return _slidingExpiration;
}
set
{
if (value <= TimeSpan.Zero)
{
throw new ArgumentOutOfRangeException(
nameof(SlidingExpiration),
value,
"The sliding expiration value must be positive.");
}
_slidingExpiration = value;
}
}
/// <summary>
/// Gets the <see cref="IChangeToken"/> instances which cause the cache entry to expire.
/// </summary>
public IList<IChangeToken> ExpirationTokens { get; } = new List<IChangeToken>();
/// <summary>
/// Gets or sets the callbacks will be fired after the cache entry is evicted from the cache.
/// </summary>
public IList<PostEvictionCallbackRegistration> PostEvictionCallbacks { get; }
= new List<PostEvictionCallbackRegistration>();
/// <summary>
/// Gets or sets the priority for keeping the cache entry in the cache during a
/// memory pressure triggered cleanup. The default is <see cref="CacheItemPriority.Normal"/>.
/// </summary>
public CacheItemPriority Priority { get; set; } = CacheItemPriority.Normal;
/// <summary>
/// Gets or sets the size of the cache entry value.
/// </summary>
public long? Size
{
get => _size;
set
{
if (value < 0)
{
throw new ArgumentOutOfRangeException(nameof(value), value, $"{nameof(value)} must be non-negative.");
}
_size = value;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.ComponentModel.Composition/tests/System/ComponentModel/Composition/MetadataViewProviderTests.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Reflection;
using Xunit;
namespace System.ComponentModel.Composition
{
internal static class TransparentTestCase
{
public static int GetMetadataView_IMetadataViewWithDefaultedIntInTranparentType(ITrans_MetadataViewWithDefaultedInt view)
{
return view.MyInt;
}
}
[MetadataViewImplementation(typeof(MetadataViewWithImplementation))]
public interface IMetadataViewWithImplementation
{
string String1 { get; }
string String2 { get; }
}
public class MetadataViewWithImplementation : IMetadataViewWithImplementation
{
public MetadataViewWithImplementation(IDictionary<string, object> metadata)
{
this.String1 = (string)metadata["String1"];
this.String2 = (string)metadata["String2"];
}
public string String1 { get; private set; }
public string String2 { get; private set; }
}
[MetadataViewImplementation(typeof(MetadataViewWithImplementationNoInterface))]
public interface IMetadataViewWithImplementationNoInterface
{
string String1 { get; }
string String2 { get; }
}
public class MetadataViewWithImplementationNoInterface
{
public MetadataViewWithImplementationNoInterface(IDictionary<string, object> metadata)
{
this.String1 = (string)metadata["String1"];
this.String2 = (string)metadata["String2"];
}
public string String1 { get; private set; }
public string String2 { get; private set; }
}
public class MetadataViewProviderTests
{
[Fact]
public void GetMetadataView_InterfaceWithPropertySetter_ShouldThrowNotSupported()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
Assert.Throws<NotSupportedException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataViewWithPropertySetter>(metadata);
});
}
[Fact]
public void GetMetadataView_InterfaceWithMethod_ShouldThrowNotSupportedException()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
Assert.Throws<NotSupportedException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataViewWithMethod>(metadata);
});
}
[Fact]
public void GetMetadataView_InterfaceWithEvent_ShouldThrowNotSupportedException()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
Assert.Throws<NotSupportedException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataViewWithEvent>(metadata);
});
}
[Fact]
[ActiveIssue("https://github.com/mono/mono/issues/15169", TestRuntimes.Mono)]
public void GetMetadataView_InterfaceWithIndexer_ShouldThrowNotSupportedException()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
Assert.Throws<NotSupportedException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataViewWithIndexer>(metadata);
});
}
[Fact]
public void GetMetadataView_AbstractClass_ShouldThrowMissingMethodException()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
Assert.Throws<CompositionContractMismatchException>(() =>
{
MetadataViewProvider.GetMetadataView<AbstractClassMetadataView>(metadata);
});
}
[Fact]
public void GetMetadataView_AbstractClassWithConstructor_ShouldThrowMemberAccessException()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
Assert.Throws<MemberAccessException>(() =>
{
MetadataViewProvider.GetMetadataView<AbstractClassWithConstructorMetadataView>(metadata);
});
}
[Fact]
public void GetMetadataView_IDictionaryAsTMetadataViewTypeArgument_ShouldReturnMetadata()
{
var metadata = new Dictionary<string, object>();
var result = MetadataViewProvider.GetMetadataView<IDictionary<string, object>>(metadata);
Assert.Same(metadata, result);
}
[Fact]
public void GetMetadataView_IEnumerableAsTMetadataViewTypeArgument_ShouldReturnMetadata()
{
var metadata = new Dictionary<string, object>();
var result = MetadataViewProvider.GetMetadataView<IEnumerable<KeyValuePair<string, object>>>(metadata);
Assert.Same(metadata, result);
}
[Fact]
public void GetMetadataView_DictionaryAsTMetadataViewTypeArgument_ShouldNotThrow()
{
var metadata = new Dictionary<string, object>();
MetadataViewProvider.GetMetadataView<Dictionary<string, object>>(metadata);
}
[Fact]
public void GetMetadataView_PrivateInterfaceAsTMetadataViewTypeArgument_ShouldhrowNotSupportedException()
{
var metadata = new Dictionary<string, object>();
metadata["CanActivate"] = true;
Assert.Throws<NotSupportedException>(() =>
{
MetadataViewProvider.GetMetadataView<IActivator>(metadata);
});
}
[Fact]
public void GetMetadataView_DictionaryWithUncastableValueAsMetadataArgument_ShouldThrowCompositionContractMismatchException()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = true;
Assert.Throws<CompositionContractMismatchException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataView>(metadata);
});
}
[Fact]
public void GetMetadataView_InterfaceWithTwoPropertiesWithSameNameDifferentTypeAsTMetadataViewArgument_ShouldThrowContractMismatch()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = 10;
Assert.Throws<CompositionContractMismatchException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataView2>(metadata);
});
}
[Fact]
public void GetMetadataView_RawMetadata()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = 10;
var view = MetadataViewProvider.GetMetadataView<RawMetadata>(new Dictionary<string, object>(metadata));
Assert.True(view.Count == metadata.Count);
Assert.True(view["Value"] == metadata["Value"]);
}
[Fact]
public void GetMetadataView_InterfaceInheritance()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
metadata["Value2"] = "value2";
var view = MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataView3>(metadata);
Assert.Equal("value", view.Value);
Assert.Equal("value2", view.Value2);
}
[Fact]
public void GetMetadataView_CachesViewType()
{
var metadata1 = new Dictionary<string, object>();
metadata1["Value"] = "value1";
var view1 = MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataView>(metadata1);
Assert.Equal("value1", view1.Value);
var metadata2 = new Dictionary<string, object>();
metadata2["Value"] = "value2";
var view2 = MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataView>(metadata2);
Assert.Equal("value2", view2.Value);
Assert.Equal(view1.GetType(), view2.GetType());
}
private interface IActivator
{
bool CanActivate
{
get;
}
}
public class RawMetadata : Dictionary<string, object>
{
public RawMetadata(IDictionary<string, object> dictionary) : base(dictionary) { }
}
public abstract class AbstractClassMetadataView
{
public abstract object Value { get; }
}
public abstract class AbstractClassWithConstructorMetadataView
{
public AbstractClassWithConstructorMetadataView(IDictionary<string, object> metadata) { }
public abstract object Value { get; }
}
[Fact]
public void GetMetadataView_IMetadataViewWithDefaultedInt()
{
var view = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedInt>(new Dictionary<string, object>());
Assert.Equal(120, view.MyInt);
}
[Fact]
public void GetMetadataView_IMetadataViewWithDefaultedIntInTranparentType()
{
var view = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedInt>(new Dictionary<string, object>());
int result = TransparentTestCase.GetMetadataView_IMetadataViewWithDefaultedIntInTranparentType(view);
Assert.Equal(120, result);
}
[Fact]
public void GetMetadataView_IMetadataViewWithDefaultedIntAndInvalidMetadata()
{
Dictionary<string, object> metadata = new Dictionary<string, object>();
metadata = new Dictionary<string, object>();
metadata.Add("MyInt", 1.2);
var view1 = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedInt>(metadata);
Assert.Equal(120, view1.MyInt);
metadata = new Dictionary<string, object>();
metadata.Add("MyInt", "Hello, World");
var view2 = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedInt>(metadata);
Assert.Equal(120, view2.MyInt);
}
[Fact]
public void GetMetadataView_MetadataViewWithImplementation()
{
Dictionary<string, object> metadata = new Dictionary<string, object>();
metadata = new Dictionary<string, object>();
metadata.Add("String1", "One");
metadata.Add("String2", "Two");
var view1 = MetadataViewProvider.GetMetadataView<IMetadataViewWithImplementation>(metadata);
Assert.Equal("One", view1.String1);
Assert.Equal("Two", view1.String2);
Assert.Equal(typeof(MetadataViewWithImplementation), view1.GetType());
}
[Fact]
public void GetMetadataView_MetadataViewWithImplementationNoInterface()
{
var exception = Assert.Throws<CompositionContractMismatchException>(() =>
{
Dictionary<string, object> metadata = new Dictionary<string, object>();
metadata = new Dictionary<string, object>();
metadata.Add("String1", "One");
metadata.Add("String2", "Two");
var view1 = MetadataViewProvider.GetMetadataView<IMetadataViewWithImplementationNoInterface>(metadata);
});
}
[Fact]
public void GetMetadataView_IMetadataViewWithDefaultedBool()
{
var view = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedBool>(new Dictionary<string, object>());
Assert.False(view.MyBool);
}
[Fact]
public void GetMetadataView_IMetadataViewWithDefaultedInt64()
{
var view = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedInt64>(new Dictionary<string, object>());
Assert.Equal(long.MaxValue, view.MyInt64);
}
[Fact]
public void GetMetadataView_IMetadataViewWithDefaultedString()
{
var view = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedString>(new Dictionary<string, object>());
Assert.Equal("MyString", view.MyString);
}
[Fact]
public void GetMetadataView_IMetadataViewWithTypeMismatchDefaultValue()
{
var exception = Assert.Throws<CompositionContractMismatchException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithTypeMismatchDefaultValue>(new Dictionary<string, object>());
});
Assert.IsType<TargetInvocationException>(exception.InnerException);
}
[Fact]
public void GetMetadataView_IMetadataViewWithTypeMismatchOnUnbox()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = (short)9999;
var exception = Assert.Throws<CompositionContractMismatchException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithTypeMismatchDefaultValue>(new Dictionary<string, object>());
});
Assert.IsType<TargetInvocationException>(exception.InnerException);
}
[Fact]
public void TestMetadataIntConversion()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = (long)45;
var exception = Assert.Throws<CompositionContractMismatchException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_HasInt64>(metadata);
});
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Reflection;
using Xunit;
namespace System.ComponentModel.Composition
{
internal static class TransparentTestCase
{
public static int GetMetadataView_IMetadataViewWithDefaultedIntInTranparentType(ITrans_MetadataViewWithDefaultedInt view)
{
return view.MyInt;
}
}
[MetadataViewImplementation(typeof(MetadataViewWithImplementation))]
public interface IMetadataViewWithImplementation
{
string String1 { get; }
string String2 { get; }
}
public class MetadataViewWithImplementation : IMetadataViewWithImplementation
{
public MetadataViewWithImplementation(IDictionary<string, object> metadata)
{
this.String1 = (string)metadata["String1"];
this.String2 = (string)metadata["String2"];
}
public string String1 { get; private set; }
public string String2 { get; private set; }
}
[MetadataViewImplementation(typeof(MetadataViewWithImplementationNoInterface))]
public interface IMetadataViewWithImplementationNoInterface
{
string String1 { get; }
string String2 { get; }
}
public class MetadataViewWithImplementationNoInterface
{
public MetadataViewWithImplementationNoInterface(IDictionary<string, object> metadata)
{
this.String1 = (string)metadata["String1"];
this.String2 = (string)metadata["String2"];
}
public string String1 { get; private set; }
public string String2 { get; private set; }
}
public class MetadataViewProviderTests
{
[Fact]
public void GetMetadataView_InterfaceWithPropertySetter_ShouldThrowNotSupported()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
Assert.Throws<NotSupportedException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataViewWithPropertySetter>(metadata);
});
}
[Fact]
public void GetMetadataView_InterfaceWithMethod_ShouldThrowNotSupportedException()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
Assert.Throws<NotSupportedException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataViewWithMethod>(metadata);
});
}
[Fact]
public void GetMetadataView_InterfaceWithEvent_ShouldThrowNotSupportedException()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
Assert.Throws<NotSupportedException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataViewWithEvent>(metadata);
});
}
[Fact]
[ActiveIssue("https://github.com/mono/mono/issues/15169", TestRuntimes.Mono)]
public void GetMetadataView_InterfaceWithIndexer_ShouldThrowNotSupportedException()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
Assert.Throws<NotSupportedException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataViewWithIndexer>(metadata);
});
}
[Fact]
public void GetMetadataView_AbstractClass_ShouldThrowMissingMethodException()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
Assert.Throws<CompositionContractMismatchException>(() =>
{
MetadataViewProvider.GetMetadataView<AbstractClassMetadataView>(metadata);
});
}
[Fact]
public void GetMetadataView_AbstractClassWithConstructor_ShouldThrowMemberAccessException()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
Assert.Throws<MemberAccessException>(() =>
{
MetadataViewProvider.GetMetadataView<AbstractClassWithConstructorMetadataView>(metadata);
});
}
[Fact]
public void GetMetadataView_IDictionaryAsTMetadataViewTypeArgument_ShouldReturnMetadata()
{
var metadata = new Dictionary<string, object>();
var result = MetadataViewProvider.GetMetadataView<IDictionary<string, object>>(metadata);
Assert.Same(metadata, result);
}
[Fact]
public void GetMetadataView_IEnumerableAsTMetadataViewTypeArgument_ShouldReturnMetadata()
{
var metadata = new Dictionary<string, object>();
var result = MetadataViewProvider.GetMetadataView<IEnumerable<KeyValuePair<string, object>>>(metadata);
Assert.Same(metadata, result);
}
[Fact]
public void GetMetadataView_DictionaryAsTMetadataViewTypeArgument_ShouldNotThrow()
{
var metadata = new Dictionary<string, object>();
MetadataViewProvider.GetMetadataView<Dictionary<string, object>>(metadata);
}
[Fact]
public void GetMetadataView_PrivateInterfaceAsTMetadataViewTypeArgument_ShouldhrowNotSupportedException()
{
var metadata = new Dictionary<string, object>();
metadata["CanActivate"] = true;
Assert.Throws<NotSupportedException>(() =>
{
MetadataViewProvider.GetMetadataView<IActivator>(metadata);
});
}
[Fact]
public void GetMetadataView_DictionaryWithUncastableValueAsMetadataArgument_ShouldThrowCompositionContractMismatchException()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = true;
Assert.Throws<CompositionContractMismatchException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataView>(metadata);
});
}
[Fact]
public void GetMetadataView_InterfaceWithTwoPropertiesWithSameNameDifferentTypeAsTMetadataViewArgument_ShouldThrowContractMismatch()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = 10;
Assert.Throws<CompositionContractMismatchException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataView2>(metadata);
});
}
[Fact]
public void GetMetadataView_RawMetadata()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = 10;
var view = MetadataViewProvider.GetMetadataView<RawMetadata>(new Dictionary<string, object>(metadata));
Assert.True(view.Count == metadata.Count);
Assert.True(view["Value"] == metadata["Value"]);
}
[Fact]
public void GetMetadataView_InterfaceInheritance()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = "value";
metadata["Value2"] = "value2";
var view = MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataView3>(metadata);
Assert.Equal("value", view.Value);
Assert.Equal("value2", view.Value2);
}
[Fact]
public void GetMetadataView_CachesViewType()
{
var metadata1 = new Dictionary<string, object>();
metadata1["Value"] = "value1";
var view1 = MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataView>(metadata1);
Assert.Equal("value1", view1.Value);
var metadata2 = new Dictionary<string, object>();
metadata2["Value"] = "value2";
var view2 = MetadataViewProvider.GetMetadataView<ITrans_MetadataTests_MetadataView>(metadata2);
Assert.Equal("value2", view2.Value);
Assert.Equal(view1.GetType(), view2.GetType());
}
private interface IActivator
{
bool CanActivate
{
get;
}
}
public class RawMetadata : Dictionary<string, object>
{
public RawMetadata(IDictionary<string, object> dictionary) : base(dictionary) { }
}
public abstract class AbstractClassMetadataView
{
public abstract object Value { get; }
}
public abstract class AbstractClassWithConstructorMetadataView
{
public AbstractClassWithConstructorMetadataView(IDictionary<string, object> metadata) { }
public abstract object Value { get; }
}
[Fact]
public void GetMetadataView_IMetadataViewWithDefaultedInt()
{
var view = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedInt>(new Dictionary<string, object>());
Assert.Equal(120, view.MyInt);
}
[Fact]
public void GetMetadataView_IMetadataViewWithDefaultedIntInTranparentType()
{
var view = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedInt>(new Dictionary<string, object>());
int result = TransparentTestCase.GetMetadataView_IMetadataViewWithDefaultedIntInTranparentType(view);
Assert.Equal(120, result);
}
[Fact]
public void GetMetadataView_IMetadataViewWithDefaultedIntAndInvalidMetadata()
{
Dictionary<string, object> metadata = new Dictionary<string, object>();
metadata = new Dictionary<string, object>();
metadata.Add("MyInt", 1.2);
var view1 = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedInt>(metadata);
Assert.Equal(120, view1.MyInt);
metadata = new Dictionary<string, object>();
metadata.Add("MyInt", "Hello, World");
var view2 = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedInt>(metadata);
Assert.Equal(120, view2.MyInt);
}
[Fact]
public void GetMetadataView_MetadataViewWithImplementation()
{
Dictionary<string, object> metadata = new Dictionary<string, object>();
metadata = new Dictionary<string, object>();
metadata.Add("String1", "One");
metadata.Add("String2", "Two");
var view1 = MetadataViewProvider.GetMetadataView<IMetadataViewWithImplementation>(metadata);
Assert.Equal("One", view1.String1);
Assert.Equal("Two", view1.String2);
Assert.Equal(typeof(MetadataViewWithImplementation), view1.GetType());
}
[Fact]
public void GetMetadataView_MetadataViewWithImplementationNoInterface()
{
var exception = Assert.Throws<CompositionContractMismatchException>(() =>
{
Dictionary<string, object> metadata = new Dictionary<string, object>();
metadata = new Dictionary<string, object>();
metadata.Add("String1", "One");
metadata.Add("String2", "Two");
var view1 = MetadataViewProvider.GetMetadataView<IMetadataViewWithImplementationNoInterface>(metadata);
});
}
[Fact]
public void GetMetadataView_IMetadataViewWithDefaultedBool()
{
var view = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedBool>(new Dictionary<string, object>());
Assert.False(view.MyBool);
}
[Fact]
public void GetMetadataView_IMetadataViewWithDefaultedInt64()
{
var view = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedInt64>(new Dictionary<string, object>());
Assert.Equal(long.MaxValue, view.MyInt64);
}
[Fact]
public void GetMetadataView_IMetadataViewWithDefaultedString()
{
var view = MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithDefaultedString>(new Dictionary<string, object>());
Assert.Equal("MyString", view.MyString);
}
[Fact]
public void GetMetadataView_IMetadataViewWithTypeMismatchDefaultValue()
{
var exception = Assert.Throws<CompositionContractMismatchException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithTypeMismatchDefaultValue>(new Dictionary<string, object>());
});
Assert.IsType<TargetInvocationException>(exception.InnerException);
}
[Fact]
public void GetMetadataView_IMetadataViewWithTypeMismatchOnUnbox()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = (short)9999;
var exception = Assert.Throws<CompositionContractMismatchException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_MetadataViewWithTypeMismatchDefaultValue>(new Dictionary<string, object>());
});
Assert.IsType<TargetInvocationException>(exception.InnerException);
}
[Fact]
public void TestMetadataIntConversion()
{
var metadata = new Dictionary<string, object>();
metadata["Value"] = (long)45;
var exception = Assert.Throws<CompositionContractMismatchException>(() =>
{
MetadataViewProvider.GetMetadataView<ITrans_HasInt64>(metadata);
});
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Diagnostics.DiagnosticSource/src/System/Diagnostics/HttpHandlerDiagnosticListener.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections;
using System.Collections.Generic;
using System.Net;
using System.Reflection;
using System.Reflection.Emit;
using System.Runtime.Serialization;
using System.Text;
// This HttpHandlerDiagnosticListener class is applicable only for .NET 4.6, and not for .NET core.
namespace System.Diagnostics
{
/// <summary>
/// A HttpHandlerDiagnosticListener is a DiagnosticListener for .NET 4.6 and above where
/// HttpClient doesn't have a DiagnosticListener built in. This class is not used for .NET Core
/// because HttpClient in .NET Core already emits DiagnosticSource events. This class compensates for
/// that in .NET 4.6 and above. HttpHandlerDiagnosticListener has no public constructor. To use this,
/// the application just needs to call <see cref="DiagnosticListener.AllListeners" /> and
/// <see cref="DiagnosticListener.AllListenerObservable.Subscribe(IObserver{DiagnosticListener})"/>,
/// then in the <see cref="IObserver{DiagnosticListener}.OnNext(DiagnosticListener)"/> method,
/// when it sees the System.Net.Http.Desktop source, subscribe to it. This will trigger the
/// initialization of this DiagnosticListener.
/// </summary>
internal sealed class HttpHandlerDiagnosticListener : DiagnosticListener
{
/// <summary>
/// Overriding base class implementation just to give us a chance to initialize.
/// </summary>
public override IDisposable Subscribe(IObserver<KeyValuePair<string, object>> observer, Predicate<string> isEnabled)
{
IDisposable result = base.Subscribe(observer, isEnabled);
Initialize();
return result;
}
/// <summary>
/// Overriding base class implementation just to give us a chance to initialize.
/// </summary>
public override IDisposable Subscribe(IObserver<KeyValuePair<string, object>> observer, Func<string, object, object, bool> isEnabled)
{
IDisposable result = base.Subscribe(observer, isEnabled);
Initialize();
return result;
}
/// <summary>
/// Overriding base class implementation just to give us a chance to initialize.
/// </summary>
public override IDisposable Subscribe(IObserver<KeyValuePair<string, object>> observer)
{
IDisposable result = base.Subscribe(observer);
Initialize();
return result;
}
/// <summary>
/// Initializes all the reflection objects it will ever need. Reflection is costly, but it's better to take
/// this one time performance hit than to get it multiple times later, or do it lazily and have to worry about
/// threading issues. If Initialize has been called before, it will not doing anything.
/// </summary>
private void Initialize()
{
lock (this)
{
if (!this.initialized)
{
try
{
// This flag makes sure we only do this once. Even if we failed to initialize in an
// earlier time, we should not retry because this initialization is not cheap and
// the likelihood it will succeed the second time is very small.
this.initialized = true;
PrepareReflectionObjects();
PerformInjection();
}
catch (Exception ex)
{
// If anything went wrong, just no-op. Write an event so at least we can find out.
this.Write(InitializationFailed, new { Exception = ex });
}
}
}
}
#region private helper classes
private class HashtableWrapper : Hashtable, IEnumerable
{
protected Hashtable _table;
public override int Count
{
get
{
return this._table.Count;
}
}
public override bool IsReadOnly
{
get
{
return this._table.IsReadOnly;
}
}
public override bool IsFixedSize
{
get
{
return this._table.IsFixedSize;
}
}
public override bool IsSynchronized
{
get
{
return this._table.IsSynchronized;
}
}
public override object this[object key]
{
get
{
return this._table[key];
}
set
{
this._table[key] = value;
}
}
public override object SyncRoot
{
get
{
return this._table.SyncRoot;
}
}
public override ICollection Keys
{
get
{
return this._table.Keys;
}
}
public override ICollection Values
{
get
{
return this._table.Values;
}
}
internal HashtableWrapper(Hashtable table) : base()
{
this._table = table;
}
public override void Add(object key, object value)
{
this._table.Add(key, value);
}
public override void Clear()
{
this._table.Clear();
}
public override bool Contains(object key)
{
return this._table.Contains(key);
}
public override bool ContainsKey(object key)
{
return this._table.ContainsKey(key);
}
public override bool ContainsValue(object key)
{
return this._table.ContainsValue(key);
}
public override void CopyTo(Array array, int arrayIndex)
{
this._table.CopyTo(array, arrayIndex);
}
public override object Clone()
{
return new HashtableWrapper((Hashtable)this._table.Clone());
}
IEnumerator IEnumerable.GetEnumerator()
{
return this._table.GetEnumerator();
}
public override IDictionaryEnumerator GetEnumerator()
{
return this._table.GetEnumerator();
}
public override void Remove(object key)
{
this._table.Remove(key);
}
}
/// <summary>
/// Helper class used for ServicePointManager.s_ServicePointTable. The goal here is to
/// intercept each new ServicePoint object being added to ServicePointManager.s_ServicePointTable
/// and replace its ConnectionGroupList hashtable field.
/// </summary>
private sealed class ServicePointHashtable : HashtableWrapper
{
public ServicePointHashtable(Hashtable table) : base(table)
{
}
public override object this[object key]
{
get
{
return base[key];
}
set
{
WeakReference weakRef = value as WeakReference;
if (weakRef != null && weakRef.IsAlive)
{
ServicePoint servicePoint = weakRef.Target as ServicePoint;
if (servicePoint != null)
{
// Replace the ConnectionGroup hashtable inside this ServicePoint object,
// which allows us to intercept each new ConnectionGroup object added under
// this ServicePoint.
Hashtable originalTable = s_connectionGroupListField.GetValue(servicePoint) as Hashtable;
ConnectionGroupHashtable newTable = new ConnectionGroupHashtable(originalTable ?? new Hashtable());
s_connectionGroupListField.SetValue(servicePoint, newTable);
}
}
base[key] = value;
}
}
}
/// <summary>
/// Helper class used for ServicePoint.m_ConnectionGroupList. The goal here is to
/// intercept each new ConnectionGroup object being added to ServicePoint.m_ConnectionGroupList
/// and replace its m_ConnectionList arraylist field.
/// </summary>
private sealed class ConnectionGroupHashtable : HashtableWrapper
{
public ConnectionGroupHashtable(Hashtable table) : base(table)
{
}
public override object this[object key]
{
get
{
return base[key];
}
set
{
if (s_connectionGroupType.IsInstanceOfType(value))
{
// Replace the Connection arraylist inside this ConnectionGroup object,
// which allows us to intercept each new Connection object added under
// this ConnectionGroup.
ArrayList originalArrayList = s_connectionListField.GetValue(value) as ArrayList;
ConnectionArrayList newArrayList = new ConnectionArrayList(originalArrayList ?? new ArrayList());
s_connectionListField.SetValue(value, newArrayList);
}
base[key] = value;
}
}
}
/// <summary>
/// Helper class used to wrap the array list object. This class itself doesn't actually
/// have the array elements, but rather access another array list that's given at
/// construction time.
/// </summary>
private class ArrayListWrapper : ArrayList
{
private ArrayList _list;
public override int Capacity
{
get
{
return this._list.Capacity;
}
set
{
this._list.Capacity = value;
}
}
public override int Count
{
get
{
return this._list.Count;
}
}
public override bool IsReadOnly
{
get
{
return this._list.IsReadOnly;
}
}
public override bool IsFixedSize
{
get
{
return this._list.IsFixedSize;
}
}
public override bool IsSynchronized
{
get
{
return this._list.IsSynchronized;
}
}
public override object this[int index]
{
get
{
return this._list[index];
}
set
{
this._list[index] = value;
}
}
public override object SyncRoot
{
get
{
return this._list.SyncRoot;
}
}
internal ArrayListWrapper(ArrayList list) : base()
{
this._list = list;
}
public override int Add(object value)
{
return this._list.Add(value);
}
public override void AddRange(ICollection c)
{
this._list.AddRange(c);
}
public override int BinarySearch(object value)
{
return this._list.BinarySearch(value);
}
public override int BinarySearch(object value, IComparer comparer)
{
return this._list.BinarySearch(value, comparer);
}
public override int BinarySearch(int index, int count, object value, IComparer comparer)
{
return this._list.BinarySearch(index, count, value, comparer);
}
public override void Clear()
{
this._list.Clear();
}
public override object Clone()
{
return new ArrayListWrapper((ArrayList)this._list.Clone());
}
public override bool Contains(object item)
{
return this._list.Contains(item);
}
public override void CopyTo(Array array)
{
this._list.CopyTo(array);
}
public override void CopyTo(Array array, int index)
{
this._list.CopyTo(array, index);
}
public override void CopyTo(int index, Array array, int arrayIndex, int count)
{
this._list.CopyTo(index, array, arrayIndex, count);
}
public override IEnumerator GetEnumerator()
{
return this._list.GetEnumerator();
}
public override IEnumerator GetEnumerator(int index, int count)
{
return this._list.GetEnumerator(index, count);
}
public override int IndexOf(object value)
{
return this._list.IndexOf(value);
}
public override int IndexOf(object value, int startIndex)
{
return this._list.IndexOf(value, startIndex);
}
public override int IndexOf(object value, int startIndex, int count)
{
return this._list.IndexOf(value, startIndex, count);
}
public override void Insert(int index, object value)
{
this._list.Insert(index, value);
}
public override void InsertRange(int index, ICollection c)
{
this._list.InsertRange(index, c);
}
public override int LastIndexOf(object value)
{
return this._list.LastIndexOf(value);
}
public override int LastIndexOf(object value, int startIndex)
{
return this._list.LastIndexOf(value, startIndex);
}
public override int LastIndexOf(object value, int startIndex, int count)
{
return this._list.LastIndexOf(value, startIndex, count);
}
public override void Remove(object value)
{
this._list.Remove(value);
}
public override void RemoveAt(int index)
{
this._list.RemoveAt(index);
}
public override void RemoveRange(int index, int count)
{
this._list.RemoveRange(index, count);
}
public override void Reverse(int index, int count)
{
this._list.Reverse(index, count);
}
public override void SetRange(int index, ICollection c)
{
this._list.SetRange(index, c);
}
public override ArrayList GetRange(int index, int count)
{
return this._list.GetRange(index, count);
}
public override void Sort()
{
this._list.Sort();
}
public override void Sort(IComparer comparer)
{
this._list.Sort(comparer);
}
public override void Sort(int index, int count, IComparer comparer)
{
this._list.Sort(index, count, comparer);
}
public override object[] ToArray()
{
return this._list.ToArray();
}
public override Array ToArray(Type type)
{
return this._list.ToArray(type);
}
public override void TrimToSize()
{
this._list.TrimToSize();
}
}
/// <summary>
/// Helper class used for ConnectionGroup.m_ConnectionList. The goal here is to
/// intercept each new Connection object being added to ConnectionGroup.m_ConnectionList
/// and replace its m_WriteList arraylist field.
/// </summary>
private sealed class ConnectionArrayList : ArrayListWrapper
{
public ConnectionArrayList(ArrayList list) : base(list)
{
}
public override int Add(object value)
{
if (s_connectionType.IsInstanceOfType(value))
{
// Replace the HttpWebRequest arraylist inside this Connection object,
// which allows us to intercept each new HttpWebRequest object added under
// this Connection.
ArrayList originalArrayList = s_writeListField.GetValue(value) as ArrayList;
HttpWebRequestArrayList newArrayList = new HttpWebRequestArrayList(originalArrayList ?? new ArrayList());
s_writeListField.SetValue(value, newArrayList);
}
return base.Add(value);
}
}
/// <summary>
/// Helper class used for Connection.m_WriteList. The goal here is to
/// intercept all new HttpWebRequest objects being added to Connection.m_WriteList
/// and notify the listener about the HttpWebRequest that's about to send a request.
/// It also intercepts all HttpWebRequest objects that are about to get removed from
/// Connection.m_WriteList as they have completed the request.
/// </summary>
private sealed class HttpWebRequestArrayList : ArrayListWrapper
{
public HttpWebRequestArrayList(ArrayList list) : base(list)
{
}
public override int Add(object value)
{
HttpWebRequest request = value as HttpWebRequest;
if (request != null)
{
s_instance.RaiseRequestEvent(request);
}
return base.Add(value);
}
public override void RemoveAt(int index)
{
HttpWebRequest request = base[index] as HttpWebRequest;
if (request != null)
{
HttpWebResponse response = s_httpResponseAccessor(request);
if (response != null)
{
s_instance.RaiseResponseEvent(request, response);
}
else
{
// In case reponse content length is 0 and request is async,
// we won't have a HttpWebResponse set on request object when this method is called
// http://referencesource.microsoft.com/#System/net/System/Net/HttpWebResponse.cs,525
// But we there will be CoreResponseData object that is either exception
// or the internal HTTP reponse representation having status, content and headers
var coreResponse = s_coreResponseAccessor(request);
if (coreResponse != null && s_coreResponseDataType.IsInstanceOfType(coreResponse))
{
HttpStatusCode status = s_coreStatusCodeAccessor(coreResponse);
WebHeaderCollection headers = s_coreHeadersAccessor(coreResponse);
// Manual creation of HttpWebResponse here is not possible as this method is eventually called from the
// HttpWebResponse ctor. So we will send Stop event with the Status and Headers payload
// to notify listeners about response;
// We use two different names for Stop events since one event with payload type that varies creates
// complications for efficient payload parsing and is not supported by DiagnosicSource helper
// libraries (e.g. Microsoft.Extensions.DiagnosticAdapter)
s_instance.RaiseResponseEvent(request, status, headers);
}
}
}
base.RemoveAt(index);
}
}
#endregion
#region private methods
/// <summary>
/// Private constructor. This class implements a singleton pattern and only this class is allowed to create an instance.
/// </summary>
private HttpHandlerDiagnosticListener() : base(DiagnosticListenerName)
{
}
private void RaiseRequestEvent(HttpWebRequest request)
{
if (request.Headers.Get(RequestIdHeaderName) != null)
{
// this request was instrumented by previous RaiseRequestEvent
return;
}
if (this.IsEnabled(ActivityName, request))
{
var activity = new Activity(ActivityName);
// Only send start event to users who subscribed for it, but start activity anyway
if (this.IsEnabled(RequestStartName))
{
this.StartActivity(activity, new { Request = request });
}
else
{
activity.Start();
}
if (activity.IdFormat == ActivityIdFormat.W3C)
{
// do not inject header if it was injected already
// perhaps tracing systems wants to override it
if (request.Headers.Get(TraceParentHeaderName) == null)
{
request.Headers.Add(TraceParentHeaderName, activity.Id);
var traceState = activity.TraceStateString;
if (traceState != null)
{
request.Headers.Add(TraceStateHeaderName, traceState);
}
}
}
else
{
// do not inject header if it was injected already
// perhaps tracing systems wants to override it
if (request.Headers.Get(RequestIdHeaderName) == null)
{
request.Headers.Add(RequestIdHeaderName, activity.Id);
}
}
if (request.Headers.Get(CorrelationContextHeaderName) == null)
{
// we expect baggage to be empty or contain a few items
using (IEnumerator<KeyValuePair<string, string>> e = activity.Baggage.GetEnumerator())
{
if (e.MoveNext())
{
StringBuilder baggage = new StringBuilder();
do
{
KeyValuePair<string, string> item = e.Current;
baggage.Append(WebUtility.UrlEncode(item.Key)).Append('=').Append(WebUtility.UrlEncode(item.Value)).Append(',');
}
while (e.MoveNext());
baggage.Remove(baggage.Length - 1, 1);
request.Headers.Add(CorrelationContextHeaderName, baggage.ToString());
}
}
}
// There is no guarantee that Activity.Current will flow to the Response, so let's stop it here
activity.Stop();
}
}
private void RaiseResponseEvent(HttpWebRequest request, HttpWebResponse response)
{
// Response event could be received several times for the same request in case it was redirected
// IsLastResponse checks if response is the last one (no more redirects will happen)
// based on response StatusCode and number or redirects done so far
bool wasRequestInstrumented = request.Headers.Get(TraceParentHeaderName) != null || request.Headers.Get(RequestIdHeaderName) != null;
if (wasRequestInstrumented && IsLastResponse(request, response.StatusCode))
{
// only send Stop if request was instrumented
this.Write(RequestStopName, new { Request = request, Response = response });
}
}
private void RaiseResponseEvent(HttpWebRequest request, HttpStatusCode statusCode, WebHeaderCollection headers)
{
// Response event could be received several times for the same request in case it was redirected
// IsLastResponse checks if response is the last one (no more redirects will happen)
// based on response StatusCode and number or redirects done so far
if (request.Headers.Get(RequestIdHeaderName) != null && IsLastResponse(request, statusCode))
{
this.Write(RequestStopExName, new { Request = request, StatusCode = statusCode, Headers = headers });
}
}
private bool IsLastResponse(HttpWebRequest request, HttpStatusCode statusCode)
{
if (request.AllowAutoRedirect)
{
if (statusCode == HttpStatusCode.Ambiguous || // 300
statusCode == HttpStatusCode.Moved || // 301
statusCode == HttpStatusCode.Redirect || // 302
statusCode == HttpStatusCode.RedirectMethod || // 303
statusCode == HttpStatusCode.RedirectKeepVerb || // 307
(int)statusCode == 308) // 308 Permanent Redirect is not in .NET Framework yet, and so has to be specified this way.
{
return s_autoRedirectsAccessor(request) >= request.MaximumAutomaticRedirections;
}
}
return true;
}
private static void PrepareReflectionObjects()
{
// At any point, if the operation failed, it should just throw. The caller should catch all exceptions and swallow.
// First step: Get all the reflection objects we will ever need.
Assembly systemNetHttpAssembly = typeof(ServicePoint).Assembly;
s_connectionGroupListField = typeof(ServicePoint).GetField("m_ConnectionGroupList", BindingFlags.Instance | BindingFlags.NonPublic);
s_connectionGroupType = systemNetHttpAssembly?.GetType("System.Net.ConnectionGroup");
s_connectionListField = s_connectionGroupType?.GetField("m_ConnectionList", BindingFlags.Instance | BindingFlags.NonPublic);
s_connectionType = systemNetHttpAssembly?.GetType("System.Net.Connection");
s_writeListField = s_connectionType?.GetField("m_WriteList", BindingFlags.Instance | BindingFlags.NonPublic);
s_httpResponseAccessor = CreateFieldGetter<HttpWebRequest, HttpWebResponse>("_HttpResponse", BindingFlags.NonPublic | BindingFlags.Instance);
s_autoRedirectsAccessor = CreateFieldGetter<HttpWebRequest, int>("_AutoRedirects", BindingFlags.NonPublic | BindingFlags.Instance);
s_coreResponseAccessor = CreateFieldGetter<HttpWebRequest, object>("_CoreResponse", BindingFlags.NonPublic | BindingFlags.Instance);
s_coreResponseDataType = systemNetHttpAssembly?.GetType("System.Net.CoreResponseData");
if (s_coreResponseDataType != null)
{
s_coreStatusCodeAccessor = CreateFieldGetter<HttpStatusCode>(s_coreResponseDataType, "m_StatusCode", BindingFlags.Public | BindingFlags.Instance);
s_coreHeadersAccessor = CreateFieldGetter<WebHeaderCollection>(s_coreResponseDataType, "m_ResponseHeaders", BindingFlags.Public | BindingFlags.Instance);
}
// Double checking to make sure we have all the pieces initialized
if (s_connectionGroupListField == null ||
s_connectionGroupType == null ||
s_connectionListField == null ||
s_connectionType == null ||
s_writeListField == null ||
s_httpResponseAccessor == null ||
s_autoRedirectsAccessor == null ||
s_coreResponseDataType == null ||
s_coreStatusCodeAccessor == null ||
s_coreHeadersAccessor == null)
{
// If anything went wrong here, just return false. There is nothing we can do.
throw new InvalidOperationException(SR.UnableToInitialize);
}
}
private static void PerformInjection()
{
FieldInfo servicePointTableField = typeof(ServicePointManager).GetField("s_ServicePointTable", BindingFlags.Static | BindingFlags.NonPublic);
if (servicePointTableField == null)
{
// If anything went wrong here, just return false. There is nothing we can do.
throw new InvalidOperationException(SR.UnableAccessServicePointTable);
}
Hashtable originalTable = servicePointTableField.GetValue(null) as Hashtable;
ServicePointHashtable newTable = new ServicePointHashtable(originalTable ?? new Hashtable());
servicePointTableField.SetValue(null, newTable);
}
private static Func<TClass, TField> CreateFieldGetter<TClass, TField>(string fieldName, BindingFlags flags) where TClass : class
{
FieldInfo field = typeof(TClass).GetField(fieldName, flags);
if (field != null)
{
string methodName = field.ReflectedType.FullName + ".get_" + field.Name;
DynamicMethod getterMethod = new DynamicMethod(methodName, typeof(TField), new[] { typeof(TClass) }, true);
ILGenerator generator = getterMethod.GetILGenerator();
generator.Emit(OpCodes.Ldarg_0);
generator.Emit(OpCodes.Ldfld, field);
generator.Emit(OpCodes.Ret);
return (Func<TClass, TField>)getterMethod.CreateDelegate(typeof(Func<TClass, TField>));
}
return null;
}
/// <summary>
/// Creates getter for a field defined in private or internal type
/// repesented with classType variable
/// </summary>
private static Func<object, TField> CreateFieldGetter<TField>(Type classType, string fieldName, BindingFlags flags)
{
FieldInfo field = classType.GetField(fieldName, flags);
if (field != null)
{
string methodName = classType.FullName + ".get_" + field.Name;
DynamicMethod getterMethod = new DynamicMethod(methodName, typeof(TField), new[] { typeof(object) }, true);
ILGenerator generator = getterMethod.GetILGenerator();
generator.Emit(OpCodes.Ldarg_0);
generator.Emit(OpCodes.Castclass, classType);
generator.Emit(OpCodes.Ldfld, field);
generator.Emit(OpCodes.Ret);
return (Func<object, TField>)getterMethod.CreateDelegate(typeof(Func<object, TField>));
}
return null;
}
#endregion
internal static HttpHandlerDiagnosticListener s_instance = new HttpHandlerDiagnosticListener();
#region private fields
private const string DiagnosticListenerName = "System.Net.Http.Desktop";
private const string ActivityName = "System.Net.Http.Desktop.HttpRequestOut";
private const string RequestStartName = "System.Net.Http.Desktop.HttpRequestOut.Start";
private const string RequestStopName = "System.Net.Http.Desktop.HttpRequestOut.Stop";
private const string RequestStopExName = "System.Net.Http.Desktop.HttpRequestOut.Ex.Stop";
private const string InitializationFailed = "System.Net.Http.InitializationFailed";
private const string RequestIdHeaderName = "Request-Id";
private const string CorrelationContextHeaderName = "Correlation-Context";
private const string TraceParentHeaderName = "traceparent";
private const string TraceStateHeaderName = "tracestate";
// Fields for controlling initialization of the HttpHandlerDiagnosticListener singleton
private bool initialized;
// Fields for reflection
private static FieldInfo s_connectionGroupListField;
private static Type s_connectionGroupType;
private static FieldInfo s_connectionListField;
private static Type s_connectionType;
private static FieldInfo s_writeListField;
private static Func<HttpWebRequest, HttpWebResponse> s_httpResponseAccessor;
private static Func<HttpWebRequest, int> s_autoRedirectsAccessor;
private static Func<HttpWebRequest, object> s_coreResponseAccessor;
private static Func<object, HttpStatusCode> s_coreStatusCodeAccessor;
private static Func<object, WebHeaderCollection> s_coreHeadersAccessor;
private static Type s_coreResponseDataType;
#endregion
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections;
using System.Collections.Generic;
using System.Net;
using System.Reflection;
using System.Reflection.Emit;
using System.Runtime.Serialization;
using System.Text;
// This HttpHandlerDiagnosticListener class is applicable only for .NET 4.6, and not for .NET core.
namespace System.Diagnostics
{
/// <summary>
/// A HttpHandlerDiagnosticListener is a DiagnosticListener for .NET 4.6 and above where
/// HttpClient doesn't have a DiagnosticListener built in. This class is not used for .NET Core
/// because HttpClient in .NET Core already emits DiagnosticSource events. This class compensates for
/// that in .NET 4.6 and above. HttpHandlerDiagnosticListener has no public constructor. To use this,
/// the application just needs to call <see cref="DiagnosticListener.AllListeners" /> and
/// <see cref="DiagnosticListener.AllListenerObservable.Subscribe(IObserver{DiagnosticListener})"/>,
/// then in the <see cref="IObserver{DiagnosticListener}.OnNext(DiagnosticListener)"/> method,
/// when it sees the System.Net.Http.Desktop source, subscribe to it. This will trigger the
/// initialization of this DiagnosticListener.
/// </summary>
internal sealed class HttpHandlerDiagnosticListener : DiagnosticListener
{
/// <summary>
/// Overriding base class implementation just to give us a chance to initialize.
/// </summary>
public override IDisposable Subscribe(IObserver<KeyValuePair<string, object>> observer, Predicate<string> isEnabled)
{
IDisposable result = base.Subscribe(observer, isEnabled);
Initialize();
return result;
}
/// <summary>
/// Overriding base class implementation just to give us a chance to initialize.
/// </summary>
public override IDisposable Subscribe(IObserver<KeyValuePair<string, object>> observer, Func<string, object, object, bool> isEnabled)
{
IDisposable result = base.Subscribe(observer, isEnabled);
Initialize();
return result;
}
/// <summary>
/// Overriding base class implementation just to give us a chance to initialize.
/// </summary>
public override IDisposable Subscribe(IObserver<KeyValuePair<string, object>> observer)
{
IDisposable result = base.Subscribe(observer);
Initialize();
return result;
}
/// <summary>
/// Initializes all the reflection objects it will ever need. Reflection is costly, but it's better to take
/// this one time performance hit than to get it multiple times later, or do it lazily and have to worry about
/// threading issues. If Initialize has been called before, it will not doing anything.
/// </summary>
private void Initialize()
{
lock (this)
{
if (!this.initialized)
{
try
{
// This flag makes sure we only do this once. Even if we failed to initialize in an
// earlier time, we should not retry because this initialization is not cheap and
// the likelihood it will succeed the second time is very small.
this.initialized = true;
PrepareReflectionObjects();
PerformInjection();
}
catch (Exception ex)
{
// If anything went wrong, just no-op. Write an event so at least we can find out.
this.Write(InitializationFailed, new { Exception = ex });
}
}
}
}
#region private helper classes
private class HashtableWrapper : Hashtable, IEnumerable
{
protected Hashtable _table;
public override int Count
{
get
{
return this._table.Count;
}
}
public override bool IsReadOnly
{
get
{
return this._table.IsReadOnly;
}
}
public override bool IsFixedSize
{
get
{
return this._table.IsFixedSize;
}
}
public override bool IsSynchronized
{
get
{
return this._table.IsSynchronized;
}
}
public override object this[object key]
{
get
{
return this._table[key];
}
set
{
this._table[key] = value;
}
}
public override object SyncRoot
{
get
{
return this._table.SyncRoot;
}
}
public override ICollection Keys
{
get
{
return this._table.Keys;
}
}
public override ICollection Values
{
get
{
return this._table.Values;
}
}
internal HashtableWrapper(Hashtable table) : base()
{
this._table = table;
}
public override void Add(object key, object value)
{
this._table.Add(key, value);
}
public override void Clear()
{
this._table.Clear();
}
public override bool Contains(object key)
{
return this._table.Contains(key);
}
public override bool ContainsKey(object key)
{
return this._table.ContainsKey(key);
}
public override bool ContainsValue(object key)
{
return this._table.ContainsValue(key);
}
public override void CopyTo(Array array, int arrayIndex)
{
this._table.CopyTo(array, arrayIndex);
}
public override object Clone()
{
return new HashtableWrapper((Hashtable)this._table.Clone());
}
IEnumerator IEnumerable.GetEnumerator()
{
return this._table.GetEnumerator();
}
public override IDictionaryEnumerator GetEnumerator()
{
return this._table.GetEnumerator();
}
public override void Remove(object key)
{
this._table.Remove(key);
}
}
/// <summary>
/// Helper class used for ServicePointManager.s_ServicePointTable. The goal here is to
/// intercept each new ServicePoint object being added to ServicePointManager.s_ServicePointTable
/// and replace its ConnectionGroupList hashtable field.
/// </summary>
private sealed class ServicePointHashtable : HashtableWrapper
{
public ServicePointHashtable(Hashtable table) : base(table)
{
}
public override object this[object key]
{
get
{
return base[key];
}
set
{
WeakReference weakRef = value as WeakReference;
if (weakRef != null && weakRef.IsAlive)
{
ServicePoint servicePoint = weakRef.Target as ServicePoint;
if (servicePoint != null)
{
// Replace the ConnectionGroup hashtable inside this ServicePoint object,
// which allows us to intercept each new ConnectionGroup object added under
// this ServicePoint.
Hashtable originalTable = s_connectionGroupListField.GetValue(servicePoint) as Hashtable;
ConnectionGroupHashtable newTable = new ConnectionGroupHashtable(originalTable ?? new Hashtable());
s_connectionGroupListField.SetValue(servicePoint, newTable);
}
}
base[key] = value;
}
}
}
/// <summary>
/// Helper class used for ServicePoint.m_ConnectionGroupList. The goal here is to
/// intercept each new ConnectionGroup object being added to ServicePoint.m_ConnectionGroupList
/// and replace its m_ConnectionList arraylist field.
/// </summary>
private sealed class ConnectionGroupHashtable : HashtableWrapper
{
public ConnectionGroupHashtable(Hashtable table) : base(table)
{
}
public override object this[object key]
{
get
{
return base[key];
}
set
{
if (s_connectionGroupType.IsInstanceOfType(value))
{
// Replace the Connection arraylist inside this ConnectionGroup object,
// which allows us to intercept each new Connection object added under
// this ConnectionGroup.
ArrayList originalArrayList = s_connectionListField.GetValue(value) as ArrayList;
ConnectionArrayList newArrayList = new ConnectionArrayList(originalArrayList ?? new ArrayList());
s_connectionListField.SetValue(value, newArrayList);
}
base[key] = value;
}
}
}
/// <summary>
/// Helper class used to wrap the array list object. This class itself doesn't actually
/// have the array elements, but rather access another array list that's given at
/// construction time.
/// </summary>
private class ArrayListWrapper : ArrayList
{
private ArrayList _list;
public override int Capacity
{
get
{
return this._list.Capacity;
}
set
{
this._list.Capacity = value;
}
}
public override int Count
{
get
{
return this._list.Count;
}
}
public override bool IsReadOnly
{
get
{
return this._list.IsReadOnly;
}
}
public override bool IsFixedSize
{
get
{
return this._list.IsFixedSize;
}
}
public override bool IsSynchronized
{
get
{
return this._list.IsSynchronized;
}
}
public override object this[int index]
{
get
{
return this._list[index];
}
set
{
this._list[index] = value;
}
}
public override object SyncRoot
{
get
{
return this._list.SyncRoot;
}
}
internal ArrayListWrapper(ArrayList list) : base()
{
this._list = list;
}
public override int Add(object value)
{
return this._list.Add(value);
}
public override void AddRange(ICollection c)
{
this._list.AddRange(c);
}
public override int BinarySearch(object value)
{
return this._list.BinarySearch(value);
}
public override int BinarySearch(object value, IComparer comparer)
{
return this._list.BinarySearch(value, comparer);
}
public override int BinarySearch(int index, int count, object value, IComparer comparer)
{
return this._list.BinarySearch(index, count, value, comparer);
}
public override void Clear()
{
this._list.Clear();
}
public override object Clone()
{
return new ArrayListWrapper((ArrayList)this._list.Clone());
}
public override bool Contains(object item)
{
return this._list.Contains(item);
}
public override void CopyTo(Array array)
{
this._list.CopyTo(array);
}
public override void CopyTo(Array array, int index)
{
this._list.CopyTo(array, index);
}
public override void CopyTo(int index, Array array, int arrayIndex, int count)
{
this._list.CopyTo(index, array, arrayIndex, count);
}
public override IEnumerator GetEnumerator()
{
return this._list.GetEnumerator();
}
public override IEnumerator GetEnumerator(int index, int count)
{
return this._list.GetEnumerator(index, count);
}
public override int IndexOf(object value)
{
return this._list.IndexOf(value);
}
public override int IndexOf(object value, int startIndex)
{
return this._list.IndexOf(value, startIndex);
}
public override int IndexOf(object value, int startIndex, int count)
{
return this._list.IndexOf(value, startIndex, count);
}
public override void Insert(int index, object value)
{
this._list.Insert(index, value);
}
public override void InsertRange(int index, ICollection c)
{
this._list.InsertRange(index, c);
}
public override int LastIndexOf(object value)
{
return this._list.LastIndexOf(value);
}
public override int LastIndexOf(object value, int startIndex)
{
return this._list.LastIndexOf(value, startIndex);
}
public override int LastIndexOf(object value, int startIndex, int count)
{
return this._list.LastIndexOf(value, startIndex, count);
}
public override void Remove(object value)
{
this._list.Remove(value);
}
public override void RemoveAt(int index)
{
this._list.RemoveAt(index);
}
public override void RemoveRange(int index, int count)
{
this._list.RemoveRange(index, count);
}
public override void Reverse(int index, int count)
{
this._list.Reverse(index, count);
}
public override void SetRange(int index, ICollection c)
{
this._list.SetRange(index, c);
}
public override ArrayList GetRange(int index, int count)
{
return this._list.GetRange(index, count);
}
public override void Sort()
{
this._list.Sort();
}
public override void Sort(IComparer comparer)
{
this._list.Sort(comparer);
}
public override void Sort(int index, int count, IComparer comparer)
{
this._list.Sort(index, count, comparer);
}
public override object[] ToArray()
{
return this._list.ToArray();
}
public override Array ToArray(Type type)
{
return this._list.ToArray(type);
}
public override void TrimToSize()
{
this._list.TrimToSize();
}
}
/// <summary>
/// Helper class used for ConnectionGroup.m_ConnectionList. The goal here is to
/// intercept each new Connection object being added to ConnectionGroup.m_ConnectionList
/// and replace its m_WriteList arraylist field.
/// </summary>
private sealed class ConnectionArrayList : ArrayListWrapper
{
public ConnectionArrayList(ArrayList list) : base(list)
{
}
public override int Add(object value)
{
if (s_connectionType.IsInstanceOfType(value))
{
// Replace the HttpWebRequest arraylist inside this Connection object,
// which allows us to intercept each new HttpWebRequest object added under
// this Connection.
ArrayList originalArrayList = s_writeListField.GetValue(value) as ArrayList;
HttpWebRequestArrayList newArrayList = new HttpWebRequestArrayList(originalArrayList ?? new ArrayList());
s_writeListField.SetValue(value, newArrayList);
}
return base.Add(value);
}
}
/// <summary>
/// Helper class used for Connection.m_WriteList. The goal here is to
/// intercept all new HttpWebRequest objects being added to Connection.m_WriteList
/// and notify the listener about the HttpWebRequest that's about to send a request.
/// It also intercepts all HttpWebRequest objects that are about to get removed from
/// Connection.m_WriteList as they have completed the request.
/// </summary>
private sealed class HttpWebRequestArrayList : ArrayListWrapper
{
public HttpWebRequestArrayList(ArrayList list) : base(list)
{
}
public override int Add(object value)
{
HttpWebRequest request = value as HttpWebRequest;
if (request != null)
{
s_instance.RaiseRequestEvent(request);
}
return base.Add(value);
}
public override void RemoveAt(int index)
{
HttpWebRequest request = base[index] as HttpWebRequest;
if (request != null)
{
HttpWebResponse response = s_httpResponseAccessor(request);
if (response != null)
{
s_instance.RaiseResponseEvent(request, response);
}
else
{
// In case reponse content length is 0 and request is async,
// we won't have a HttpWebResponse set on request object when this method is called
// http://referencesource.microsoft.com/#System/net/System/Net/HttpWebResponse.cs,525
// But we there will be CoreResponseData object that is either exception
// or the internal HTTP reponse representation having status, content and headers
var coreResponse = s_coreResponseAccessor(request);
if (coreResponse != null && s_coreResponseDataType.IsInstanceOfType(coreResponse))
{
HttpStatusCode status = s_coreStatusCodeAccessor(coreResponse);
WebHeaderCollection headers = s_coreHeadersAccessor(coreResponse);
// Manual creation of HttpWebResponse here is not possible as this method is eventually called from the
// HttpWebResponse ctor. So we will send Stop event with the Status and Headers payload
// to notify listeners about response;
// We use two different names for Stop events since one event with payload type that varies creates
// complications for efficient payload parsing and is not supported by DiagnosicSource helper
// libraries (e.g. Microsoft.Extensions.DiagnosticAdapter)
s_instance.RaiseResponseEvent(request, status, headers);
}
}
}
base.RemoveAt(index);
}
}
#endregion
#region private methods
/// <summary>
/// Private constructor. This class implements a singleton pattern and only this class is allowed to create an instance.
/// </summary>
private HttpHandlerDiagnosticListener() : base(DiagnosticListenerName)
{
}
private void RaiseRequestEvent(HttpWebRequest request)
{
if (request.Headers.Get(RequestIdHeaderName) != null)
{
// this request was instrumented by previous RaiseRequestEvent
return;
}
if (this.IsEnabled(ActivityName, request))
{
var activity = new Activity(ActivityName);
// Only send start event to users who subscribed for it, but start activity anyway
if (this.IsEnabled(RequestStartName))
{
this.StartActivity(activity, new { Request = request });
}
else
{
activity.Start();
}
if (activity.IdFormat == ActivityIdFormat.W3C)
{
// do not inject header if it was injected already
// perhaps tracing systems wants to override it
if (request.Headers.Get(TraceParentHeaderName) == null)
{
request.Headers.Add(TraceParentHeaderName, activity.Id);
var traceState = activity.TraceStateString;
if (traceState != null)
{
request.Headers.Add(TraceStateHeaderName, traceState);
}
}
}
else
{
// do not inject header if it was injected already
// perhaps tracing systems wants to override it
if (request.Headers.Get(RequestIdHeaderName) == null)
{
request.Headers.Add(RequestIdHeaderName, activity.Id);
}
}
if (request.Headers.Get(CorrelationContextHeaderName) == null)
{
// we expect baggage to be empty or contain a few items
using (IEnumerator<KeyValuePair<string, string>> e = activity.Baggage.GetEnumerator())
{
if (e.MoveNext())
{
StringBuilder baggage = new StringBuilder();
do
{
KeyValuePair<string, string> item = e.Current;
baggage.Append(WebUtility.UrlEncode(item.Key)).Append('=').Append(WebUtility.UrlEncode(item.Value)).Append(',');
}
while (e.MoveNext());
baggage.Remove(baggage.Length - 1, 1);
request.Headers.Add(CorrelationContextHeaderName, baggage.ToString());
}
}
}
// There is no guarantee that Activity.Current will flow to the Response, so let's stop it here
activity.Stop();
}
}
private void RaiseResponseEvent(HttpWebRequest request, HttpWebResponse response)
{
// Response event could be received several times for the same request in case it was redirected
// IsLastResponse checks if response is the last one (no more redirects will happen)
// based on response StatusCode and number or redirects done so far
bool wasRequestInstrumented = request.Headers.Get(TraceParentHeaderName) != null || request.Headers.Get(RequestIdHeaderName) != null;
if (wasRequestInstrumented && IsLastResponse(request, response.StatusCode))
{
// only send Stop if request was instrumented
this.Write(RequestStopName, new { Request = request, Response = response });
}
}
private void RaiseResponseEvent(HttpWebRequest request, HttpStatusCode statusCode, WebHeaderCollection headers)
{
// Response event could be received several times for the same request in case it was redirected
// IsLastResponse checks if response is the last one (no more redirects will happen)
// based on response StatusCode and number or redirects done so far
if (request.Headers.Get(RequestIdHeaderName) != null && IsLastResponse(request, statusCode))
{
this.Write(RequestStopExName, new { Request = request, StatusCode = statusCode, Headers = headers });
}
}
private bool IsLastResponse(HttpWebRequest request, HttpStatusCode statusCode)
{
if (request.AllowAutoRedirect)
{
if (statusCode == HttpStatusCode.Ambiguous || // 300
statusCode == HttpStatusCode.Moved || // 301
statusCode == HttpStatusCode.Redirect || // 302
statusCode == HttpStatusCode.RedirectMethod || // 303
statusCode == HttpStatusCode.RedirectKeepVerb || // 307
(int)statusCode == 308) // 308 Permanent Redirect is not in .NET Framework yet, and so has to be specified this way.
{
return s_autoRedirectsAccessor(request) >= request.MaximumAutomaticRedirections;
}
}
return true;
}
private static void PrepareReflectionObjects()
{
// At any point, if the operation failed, it should just throw. The caller should catch all exceptions and swallow.
// First step: Get all the reflection objects we will ever need.
Assembly systemNetHttpAssembly = typeof(ServicePoint).Assembly;
s_connectionGroupListField = typeof(ServicePoint).GetField("m_ConnectionGroupList", BindingFlags.Instance | BindingFlags.NonPublic);
s_connectionGroupType = systemNetHttpAssembly?.GetType("System.Net.ConnectionGroup");
s_connectionListField = s_connectionGroupType?.GetField("m_ConnectionList", BindingFlags.Instance | BindingFlags.NonPublic);
s_connectionType = systemNetHttpAssembly?.GetType("System.Net.Connection");
s_writeListField = s_connectionType?.GetField("m_WriteList", BindingFlags.Instance | BindingFlags.NonPublic);
s_httpResponseAccessor = CreateFieldGetter<HttpWebRequest, HttpWebResponse>("_HttpResponse", BindingFlags.NonPublic | BindingFlags.Instance);
s_autoRedirectsAccessor = CreateFieldGetter<HttpWebRequest, int>("_AutoRedirects", BindingFlags.NonPublic | BindingFlags.Instance);
s_coreResponseAccessor = CreateFieldGetter<HttpWebRequest, object>("_CoreResponse", BindingFlags.NonPublic | BindingFlags.Instance);
s_coreResponseDataType = systemNetHttpAssembly?.GetType("System.Net.CoreResponseData");
if (s_coreResponseDataType != null)
{
s_coreStatusCodeAccessor = CreateFieldGetter<HttpStatusCode>(s_coreResponseDataType, "m_StatusCode", BindingFlags.Public | BindingFlags.Instance);
s_coreHeadersAccessor = CreateFieldGetter<WebHeaderCollection>(s_coreResponseDataType, "m_ResponseHeaders", BindingFlags.Public | BindingFlags.Instance);
}
// Double checking to make sure we have all the pieces initialized
if (s_connectionGroupListField == null ||
s_connectionGroupType == null ||
s_connectionListField == null ||
s_connectionType == null ||
s_writeListField == null ||
s_httpResponseAccessor == null ||
s_autoRedirectsAccessor == null ||
s_coreResponseDataType == null ||
s_coreStatusCodeAccessor == null ||
s_coreHeadersAccessor == null)
{
// If anything went wrong here, just return false. There is nothing we can do.
throw new InvalidOperationException(SR.UnableToInitialize);
}
}
private static void PerformInjection()
{
FieldInfo servicePointTableField = typeof(ServicePointManager).GetField("s_ServicePointTable", BindingFlags.Static | BindingFlags.NonPublic);
if (servicePointTableField == null)
{
// If anything went wrong here, just return false. There is nothing we can do.
throw new InvalidOperationException(SR.UnableAccessServicePointTable);
}
Hashtable originalTable = servicePointTableField.GetValue(null) as Hashtable;
ServicePointHashtable newTable = new ServicePointHashtable(originalTable ?? new Hashtable());
servicePointTableField.SetValue(null, newTable);
}
private static Func<TClass, TField> CreateFieldGetter<TClass, TField>(string fieldName, BindingFlags flags) where TClass : class
{
FieldInfo field = typeof(TClass).GetField(fieldName, flags);
if (field != null)
{
string methodName = field.ReflectedType.FullName + ".get_" + field.Name;
DynamicMethod getterMethod = new DynamicMethod(methodName, typeof(TField), new[] { typeof(TClass) }, true);
ILGenerator generator = getterMethod.GetILGenerator();
generator.Emit(OpCodes.Ldarg_0);
generator.Emit(OpCodes.Ldfld, field);
generator.Emit(OpCodes.Ret);
return (Func<TClass, TField>)getterMethod.CreateDelegate(typeof(Func<TClass, TField>));
}
return null;
}
/// <summary>
/// Creates getter for a field defined in private or internal type
/// repesented with classType variable
/// </summary>
private static Func<object, TField> CreateFieldGetter<TField>(Type classType, string fieldName, BindingFlags flags)
{
FieldInfo field = classType.GetField(fieldName, flags);
if (field != null)
{
string methodName = classType.FullName + ".get_" + field.Name;
DynamicMethod getterMethod = new DynamicMethod(methodName, typeof(TField), new[] { typeof(object) }, true);
ILGenerator generator = getterMethod.GetILGenerator();
generator.Emit(OpCodes.Ldarg_0);
generator.Emit(OpCodes.Castclass, classType);
generator.Emit(OpCodes.Ldfld, field);
generator.Emit(OpCodes.Ret);
return (Func<object, TField>)getterMethod.CreateDelegate(typeof(Func<object, TField>));
}
return null;
}
#endregion
internal static HttpHandlerDiagnosticListener s_instance = new HttpHandlerDiagnosticListener();
#region private fields
private const string DiagnosticListenerName = "System.Net.Http.Desktop";
private const string ActivityName = "System.Net.Http.Desktop.HttpRequestOut";
private const string RequestStartName = "System.Net.Http.Desktop.HttpRequestOut.Start";
private const string RequestStopName = "System.Net.Http.Desktop.HttpRequestOut.Stop";
private const string RequestStopExName = "System.Net.Http.Desktop.HttpRequestOut.Ex.Stop";
private const string InitializationFailed = "System.Net.Http.InitializationFailed";
private const string RequestIdHeaderName = "Request-Id";
private const string CorrelationContextHeaderName = "Correlation-Context";
private const string TraceParentHeaderName = "traceparent";
private const string TraceStateHeaderName = "tracestate";
// Fields for controlling initialization of the HttpHandlerDiagnosticListener singleton
private bool initialized;
// Fields for reflection
private static FieldInfo s_connectionGroupListField;
private static Type s_connectionGroupType;
private static FieldInfo s_connectionListField;
private static Type s_connectionType;
private static FieldInfo s_writeListField;
private static Func<HttpWebRequest, HttpWebResponse> s_httpResponseAccessor;
private static Func<HttpWebRequest, int> s_autoRedirectsAccessor;
private static Func<HttpWebRequest, object> s_coreResponseAccessor;
private static Func<object, HttpStatusCode> s_coreStatusCodeAccessor;
private static Func<object, WebHeaderCollection> s_coreHeadersAccessor;
private static Type s_coreResponseDataType;
#endregion
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/coreclr/tools/Common/TypeSystem/Common/MetadataTypeSystemContext.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using Debug = System.Diagnostics.Debug;
namespace Internal.TypeSystem
{
public abstract partial class MetadataTypeSystemContext : TypeSystemContext
{
private static readonly string[] s_wellKnownTypeNames = new string[] {
"Void",
"Boolean",
"Char",
"SByte",
"Byte",
"Int16",
"UInt16",
"Int32",
"UInt32",
"Int64",
"UInt64",
"IntPtr",
"UIntPtr",
"Single",
"Double",
"ValueType",
"Enum",
"Nullable`1",
"Object",
"String",
"Array",
"MulticastDelegate",
"RuntimeTypeHandle",
"RuntimeMethodHandle",
"RuntimeFieldHandle",
"Exception",
"TypedReference",
"ByReference`1",
};
public static IEnumerable<string> WellKnownTypeNames => s_wellKnownTypeNames;
private MetadataType[] _wellKnownTypes;
public MetadataTypeSystemContext()
{
}
public MetadataTypeSystemContext(TargetDetails details)
: base(details)
{
}
public virtual void SetSystemModule(ModuleDesc systemModule)
{
InitializeSystemModule(systemModule);
// Sanity check the name table
Debug.Assert(s_wellKnownTypeNames[(int)WellKnownType.MulticastDelegate - 1] == "MulticastDelegate");
_wellKnownTypes = new MetadataType[s_wellKnownTypeNames.Length];
// Initialize all well known types - it will save us from checking the name for each loaded type
for (int typeIndex = 0; typeIndex < _wellKnownTypes.Length; typeIndex++)
{
// Require System.Object to be present as a minimal sanity check.
// The set of required well-known types is not strictly defined since different .NET profiles implement different subsets.
MetadataType type = systemModule.GetType("System", s_wellKnownTypeNames[typeIndex], throwIfNotFound: typeIndex == (int)WellKnownType.Object);
if (type != null)
{
type.SetWellKnownType((WellKnownType)(typeIndex + 1));
_wellKnownTypes[typeIndex] = type;
}
}
}
public override DefType GetWellKnownType(WellKnownType wellKnownType, bool throwIfNotFound = true)
{
Debug.Assert(_wellKnownTypes != null, "Forgot to call SetSystemModule?");
int typeIndex = (int)wellKnownType - 1;
DefType type = _wellKnownTypes[typeIndex];
if (type == null && throwIfNotFound)
ThrowHelper.ThrowTypeLoadException("System", s_wellKnownTypeNames[typeIndex], SystemModule);
return type;
}
protected sealed internal override bool ComputeHasStaticConstructor(TypeDesc type)
{
if (type is MetadataType)
{
return ((MetadataType)type).GetStaticConstructor() != null;
}
return false;
}
protected sealed internal override bool IsIDynamicInterfaceCastableInterface(DefType type)
{
MetadataType t = (MetadataType)type;
return t.Module == SystemModule
&& t.Name == "IDynamicInterfaceCastable"
&& t.Namespace == "System.Runtime.InteropServices";
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using Debug = System.Diagnostics.Debug;
namespace Internal.TypeSystem
{
public abstract partial class MetadataTypeSystemContext : TypeSystemContext
{
private static readonly string[] s_wellKnownTypeNames = new string[] {
"Void",
"Boolean",
"Char",
"SByte",
"Byte",
"Int16",
"UInt16",
"Int32",
"UInt32",
"Int64",
"UInt64",
"IntPtr",
"UIntPtr",
"Single",
"Double",
"ValueType",
"Enum",
"Nullable`1",
"Object",
"String",
"Array",
"MulticastDelegate",
"RuntimeTypeHandle",
"RuntimeMethodHandle",
"RuntimeFieldHandle",
"Exception",
"TypedReference",
"ByReference`1",
};
public static IEnumerable<string> WellKnownTypeNames => s_wellKnownTypeNames;
private MetadataType[] _wellKnownTypes;
public MetadataTypeSystemContext()
{
}
public MetadataTypeSystemContext(TargetDetails details)
: base(details)
{
}
public virtual void SetSystemModule(ModuleDesc systemModule)
{
InitializeSystemModule(systemModule);
// Sanity check the name table
Debug.Assert(s_wellKnownTypeNames[(int)WellKnownType.MulticastDelegate - 1] == "MulticastDelegate");
_wellKnownTypes = new MetadataType[s_wellKnownTypeNames.Length];
// Initialize all well known types - it will save us from checking the name for each loaded type
for (int typeIndex = 0; typeIndex < _wellKnownTypes.Length; typeIndex++)
{
// Require System.Object to be present as a minimal sanity check.
// The set of required well-known types is not strictly defined since different .NET profiles implement different subsets.
MetadataType type = systemModule.GetType("System", s_wellKnownTypeNames[typeIndex], throwIfNotFound: typeIndex == (int)WellKnownType.Object);
if (type != null)
{
type.SetWellKnownType((WellKnownType)(typeIndex + 1));
_wellKnownTypes[typeIndex] = type;
}
}
}
public override DefType GetWellKnownType(WellKnownType wellKnownType, bool throwIfNotFound = true)
{
Debug.Assert(_wellKnownTypes != null, "Forgot to call SetSystemModule?");
int typeIndex = (int)wellKnownType - 1;
DefType type = _wellKnownTypes[typeIndex];
if (type == null && throwIfNotFound)
ThrowHelper.ThrowTypeLoadException("System", s_wellKnownTypeNames[typeIndex], SystemModule);
return type;
}
protected sealed internal override bool ComputeHasStaticConstructor(TypeDesc type)
{
if (type is MetadataType)
{
return ((MetadataType)type).GetStaticConstructor() != null;
}
return false;
}
protected sealed internal override bool IsIDynamicInterfaceCastableInterface(DefType type)
{
MetadataType t = (MetadataType)type;
return t.Module == SystemModule
&& t.Name == "IDynamicInterfaceCastable"
&& t.Namespace == "System.Runtime.InteropServices";
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/jit64/regress/ddb/87766/ddb87766.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Runtime.CompilerServices;
public class VInline
{
private int _fi1;
private int _fi2;
public VInline(int ival)
{
_fi1 = ival;
_fi2 = 0;
}
[MethodImpl(MethodImplOptions.NoInlining)]
private void GetI1(ref int i)
{
i = _fi1;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public int Accumulate(int a)
{
int i = 0;
GetI1(ref i); //here's the ldloca, passing the address of i as the arg
i = i / _fi2; //fi2 == 0 so this should always cause an exception
return i;
}
}
public class VIMain
{
public static int Main()
{
int ret = 100;
VInline vi = new VInline(1);
int ival = 2;
try
{
ival = vi.Accumulate(ival); //this call should throw a divide by zero exception
}
catch (DivideByZeroException e)
{
Console.WriteLine("exeption stack trace: " + e.StackTrace.ToString()); //display the stack trace
if (e.StackTrace.ToString().Contains("Accumulate"))
{
Console.WriteLine("Fail, method Accumulate NOT inlined.");
ret = 666;
}
else
{
Console.WriteLine("Pass, method Accumulate inlined.");
}
}
return ret;
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Runtime.CompilerServices;
public class VInline
{
private int _fi1;
private int _fi2;
public VInline(int ival)
{
_fi1 = ival;
_fi2 = 0;
}
[MethodImpl(MethodImplOptions.NoInlining)]
private void GetI1(ref int i)
{
i = _fi1;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public int Accumulate(int a)
{
int i = 0;
GetI1(ref i); //here's the ldloca, passing the address of i as the arg
i = i / _fi2; //fi2 == 0 so this should always cause an exception
return i;
}
}
public class VIMain
{
public static int Main()
{
int ret = 100;
VInline vi = new VInline(1);
int ival = 2;
try
{
ival = vi.Accumulate(ival); //this call should throw a divide by zero exception
}
catch (DivideByZeroException e)
{
Console.WriteLine("exeption stack trace: " + e.StackTrace.ToString()); //display the stack trace
if (e.StackTrace.ToString().Contains("Accumulate"))
{
Console.WriteLine("Fail, method Accumulate NOT inlined.");
ret = 666;
}
else
{
Console.WriteLine("Pass, method Accumulate inlined.");
}
}
return ret;
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/coreclr/debug/shared/s390x/primitives.cpp
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
//*****************************************************************************
// File: primitives.cpp
//
//
// Platform-specific debugger primitives
//
//*****************************************************************************
#include "primitives.h"
#error Unsupported platform
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
//*****************************************************************************
// File: primitives.cpp
//
//
// Platform-specific debugger primitives
//
//*****************************************************************************
#include "primitives.h"
#error Unsupported platform
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/baseservices/compilerservices/FixedAddressValueType/FixedAddressValueType.csproj
|
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<GCStressIncompatible>true</GCStressIncompatible>
<CLRTestPriority>1</CLRTestPriority>
<!-- FixedAddressValueTypeAttribute is not supported on collectible types -->
<UnloadabilityIncompatible>true</UnloadabilityIncompatible>
</PropertyGroup>
<ItemGroup>
<Compile Include="$(MSBuildProjectName).cs" />
</ItemGroup>
<ItemGroup>
<NoWarn Include="42016,42020,42025,42024" />
</ItemGroup>
</Project>
|
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<GCStressIncompatible>true</GCStressIncompatible>
<CLRTestPriority>1</CLRTestPriority>
<!-- FixedAddressValueTypeAttribute is not supported on collectible types -->
<UnloadabilityIncompatible>true</UnloadabilityIncompatible>
</PropertyGroup>
<ItemGroup>
<Compile Include="$(MSBuildProjectName).cs" />
</ItemGroup>
<ItemGroup>
<NoWarn Include="42016,42020,42025,42024" />
</ItemGroup>
</Project>
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Security.Cryptography/src/System/Security/Cryptography/CngKey.Open.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Runtime.Versioning;
using Microsoft.Win32.SafeHandles;
using Internal.Cryptography;
using ErrorCode = Interop.NCrypt.ErrorCode;
namespace System.Security.Cryptography
{
/// <summary>
/// Managed representation of an NCrypt key
/// </summary>
public sealed partial class CngKey : IDisposable
{
//
// Open factory methods
//
[SupportedOSPlatform("windows")]
public static CngKey Open(string keyName)
{
return Open(keyName, provider: CngProvider.MicrosoftSoftwareKeyStorageProvider);
}
[SupportedOSPlatform("windows")]
public static CngKey Open(string keyName, CngProvider provider)
{
return Open(keyName, provider, openOptions: CngKeyOpenOptions.None);
}
[SupportedOSPlatform("windows")]
public static CngKey Open(string keyName!!, CngProvider provider!!, CngKeyOpenOptions openOptions)
{
SafeNCryptProviderHandle providerHandle = provider.OpenStorageProvider();
SafeNCryptKeyHandle keyHandle;
ErrorCode errorCode = Interop.NCrypt.NCryptOpenKey(providerHandle, out keyHandle, keyName, 0, openOptions);
if (errorCode != ErrorCode.ERROR_SUCCESS)
throw errorCode.ToCryptographicException();
return new CngKey(providerHandle, keyHandle);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Runtime.Versioning;
using Microsoft.Win32.SafeHandles;
using Internal.Cryptography;
using ErrorCode = Interop.NCrypt.ErrorCode;
namespace System.Security.Cryptography
{
/// <summary>
/// Managed representation of an NCrypt key
/// </summary>
public sealed partial class CngKey : IDisposable
{
//
// Open factory methods
//
[SupportedOSPlatform("windows")]
public static CngKey Open(string keyName)
{
return Open(keyName, provider: CngProvider.MicrosoftSoftwareKeyStorageProvider);
}
[SupportedOSPlatform("windows")]
public static CngKey Open(string keyName, CngProvider provider)
{
return Open(keyName, provider, openOptions: CngKeyOpenOptions.None);
}
[SupportedOSPlatform("windows")]
public static CngKey Open(string keyName!!, CngProvider provider!!, CngKeyOpenOptions openOptions)
{
SafeNCryptProviderHandle providerHandle = provider.OpenStorageProvider();
SafeNCryptKeyHandle keyHandle;
ErrorCode errorCode = Interop.NCrypt.NCryptOpenKey(providerHandle, out keyHandle, keyName, 0, openOptions);
if (errorCode != ErrorCode.ERROR_SUCCESS)
throw errorCode.ToCryptographicException();
return new CngKey(providerHandle, keyHandle);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/installer/tests/Assets/TestProjects/Directory.Build.targets
|
<Project>
<Import Project="$(MSBuildThisFileDirectory)..\TestUtils\TestProjects.targets" />
<!--
Some known framework references are downstream of dotnet/runtime and don't exist with the build's
versions. Remove them before the SDK tries to download them.
-->
<Target Name="RemoveUpstackKnownFrameworkReferences"
BeforeTargets="ProcessFrameworkReferences">
<ItemGroup>
<KnownFrameworkReference Remove="Microsoft.AspNetCore.App" />
<KnownFrameworkReference Remove="Microsoft.WindowsDesktop.App" />
<KnownFrameworkReference Remove="Microsoft.WindowsDesktop.App.WPF" />
<KnownFrameworkReference Remove="Microsoft.WindowsDesktop.App.WindowsForms" />
</ItemGroup>
</Target>
</Project>
|
<Project>
<Import Project="$(MSBuildThisFileDirectory)..\TestUtils\TestProjects.targets" />
<!--
Some known framework references are downstream of dotnet/runtime and don't exist with the build's
versions. Remove them before the SDK tries to download them.
-->
<Target Name="RemoveUpstackKnownFrameworkReferences"
BeforeTargets="ProcessFrameworkReferences">
<ItemGroup>
<KnownFrameworkReference Remove="Microsoft.AspNetCore.App" />
<KnownFrameworkReference Remove="Microsoft.WindowsDesktop.App" />
<KnownFrameworkReference Remove="Microsoft.WindowsDesktop.App.WPF" />
<KnownFrameworkReference Remove="Microsoft.WindowsDesktop.App.WindowsForms" />
</ItemGroup>
</Target>
</Project>
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/General/Vector128/GreaterThan.UInt32.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace JIT.HardwareIntrinsics.General
{
public static partial class Program
{
private static void GreaterThanUInt32()
{
var test = new VectorBinaryOpTest__GreaterThanUInt32();
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class VectorBinaryOpTest__GreaterThanUInt32
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(UInt32[] inArray1, UInt32[] inArray2, UInt32[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<UInt32>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<UInt32>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt32>();
if ((alignment != 32 && alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<UInt32, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<UInt32, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<UInt32> _fld1;
public Vector128<UInt32> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
return testStruct;
}
public void RunStructFldScenario(VectorBinaryOpTest__GreaterThanUInt32 testClass)
{
var result = Vector128.GreaterThan(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static UInt32[] _data1 = new UInt32[Op1ElementCount];
private static UInt32[] _data2 = new UInt32[Op2ElementCount];
private static Vector128<UInt32> _clsVar1;
private static Vector128<UInt32> _clsVar2;
private Vector128<UInt32> _fld1;
private Vector128<UInt32> _fld2;
private DataTable _dataTable;
static VectorBinaryOpTest__GreaterThanUInt32()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _clsVar1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _clsVar2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
}
public VectorBinaryOpTest__GreaterThanUInt32()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _fld1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _fld2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
_dataTable = new DataTable(_data1, _data2, new UInt32[RetElementCount], LargestVectorSize);
}
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Vector128.GreaterThan(
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var method = typeof(Vector128).GetMethod(nameof(Vector128.GreaterThan), new Type[] {
typeof(Vector128<UInt32>),
typeof(Vector128<UInt32>)
});
if (method is null)
{
method = typeof(Vector128).GetMethod(nameof(Vector128.GreaterThan), 1, new Type[] {
typeof(Vector128<>).MakeGenericType(Type.MakeGenericMethodParameter(0)),
typeof(Vector128<>).MakeGenericType(Type.MakeGenericMethodParameter(0))
});
}
if (method.IsGenericMethodDefinition)
{
method = method.MakeGenericMethod(typeof(UInt32));
}
var result = method.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Vector128.GreaterThan(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr);
var result = Vector128.GreaterThan(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new VectorBinaryOpTest__GreaterThanUInt32();
var result = Vector128.GreaterThan(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Vector128.GreaterThan(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Vector128.GreaterThan(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
private void ValidateResult(Vector128<UInt32> op1, Vector128<UInt32> op2, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray1 = new UInt32[Op1ElementCount];
UInt32[] inArray2 = new UInt32[Op2ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray1 = new UInt32[Op1ElementCount];
UInt32[] inArray2 = new UInt32[Op2ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(UInt32[] left, UInt32[] right, UInt32[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (result[0] != ((left[0] > right[0]) ? uint.MaxValue : 0))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (result[i] != ((left[i] > right[i]) ? uint.MaxValue : 0))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Vector128)}.{nameof(Vector128.GreaterThan)}<UInt32>(Vector128<UInt32>, Vector128<UInt32>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace JIT.HardwareIntrinsics.General
{
public static partial class Program
{
private static void GreaterThanUInt32()
{
var test = new VectorBinaryOpTest__GreaterThanUInt32();
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class VectorBinaryOpTest__GreaterThanUInt32
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(UInt32[] inArray1, UInt32[] inArray2, UInt32[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<UInt32>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<UInt32>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt32>();
if ((alignment != 32 && alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<UInt32, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<UInt32, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<UInt32> _fld1;
public Vector128<UInt32> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
return testStruct;
}
public void RunStructFldScenario(VectorBinaryOpTest__GreaterThanUInt32 testClass)
{
var result = Vector128.GreaterThan(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32);
private static UInt32[] _data1 = new UInt32[Op1ElementCount];
private static UInt32[] _data2 = new UInt32[Op2ElementCount];
private static Vector128<UInt32> _clsVar1;
private static Vector128<UInt32> _clsVar2;
private Vector128<UInt32> _fld1;
private Vector128<UInt32> _fld2;
private DataTable _dataTable;
static VectorBinaryOpTest__GreaterThanUInt32()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _clsVar1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _clsVar2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
}
public VectorBinaryOpTest__GreaterThanUInt32()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _fld1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _fld2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
_dataTable = new DataTable(_data1, _data2, new UInt32[RetElementCount], LargestVectorSize);
}
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Vector128.GreaterThan(
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var method = typeof(Vector128).GetMethod(nameof(Vector128.GreaterThan), new Type[] {
typeof(Vector128<UInt32>),
typeof(Vector128<UInt32>)
});
if (method is null)
{
method = typeof(Vector128).GetMethod(nameof(Vector128.GreaterThan), 1, new Type[] {
typeof(Vector128<>).MakeGenericType(Type.MakeGenericMethodParameter(0)),
typeof(Vector128<>).MakeGenericType(Type.MakeGenericMethodParameter(0))
});
}
if (method.IsGenericMethodDefinition)
{
method = method.MakeGenericMethod(typeof(UInt32));
}
var result = method.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Vector128.GreaterThan(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr);
var result = Vector128.GreaterThan(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new VectorBinaryOpTest__GreaterThanUInt32();
var result = Vector128.GreaterThan(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Vector128.GreaterThan(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Vector128.GreaterThan(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
private void ValidateResult(Vector128<UInt32> op1, Vector128<UInt32> op2, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray1 = new UInt32[Op1ElementCount];
UInt32[] inArray2 = new UInt32[Op2ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray1 = new UInt32[Op1ElementCount];
UInt32[] inArray2 = new UInt32[Op2ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt32>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(UInt32[] left, UInt32[] right, UInt32[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (result[0] != ((left[0] > right[0]) ? uint.MaxValue : 0))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (result[i] != ((left[i] > right[i]) ? uint.MaxValue : 0))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Vector128)}.{nameof(Vector128.GreaterThan)}<UInt32>(Vector128<UInt32>, Vector128<UInt32>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Private.Xml/src/System/Xml/Core/XmlTextEncoder.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.IO;
using System.Text;
using System.Diagnostics;
using System.Globalization;
namespace System.Xml
{
// XmlTextEncoder
//
// This class does special handling of text content for XML. For example
// it will replace special characters with entities whenever necessary.
internal sealed class XmlTextEncoder
{
//
// Fields
//
// output text writer
private readonly TextWriter _textWriter;
// true when writing out the content of attribute value
private bool _inAttribute;
// quote char of the attribute (when inAttribute)
private char _quoteChar;
// caching of attribute value
private StringBuilder? _attrValue;
private bool _cacheAttrValue;
//
// Constructor
//
internal XmlTextEncoder(TextWriter textWriter)
{
_textWriter = textWriter;
_quoteChar = '"';
}
//
// Internal methods and properties
//
internal char QuoteChar
{
set
{
_quoteChar = value;
}
}
internal void StartAttribute(bool cacheAttrValue)
{
_inAttribute = true;
_cacheAttrValue = cacheAttrValue;
if (cacheAttrValue)
{
if (_attrValue == null)
{
_attrValue = new StringBuilder();
}
else
{
_attrValue.Length = 0;
}
}
}
internal void EndAttribute()
{
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Length = 0;
}
_inAttribute = false;
_cacheAttrValue = false;
}
internal string AttributeValue
{
get
{
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
return _attrValue.ToString();
}
else
{
return string.Empty;
}
}
}
internal void WriteSurrogateChar(char lowChar, char highChar)
{
if (!XmlCharType.IsLowSurrogate(lowChar) ||
!XmlCharType.IsHighSurrogate(highChar))
{
throw XmlConvert.CreateInvalidSurrogatePairException(lowChar, highChar);
}
_textWriter.Write(highChar);
_textWriter.Write(lowChar);
}
internal void Write(char[] array!!, int offset, int count)
{
if (0 > offset)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (0 > count)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (count > array.Length - offset)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append(array, offset, count);
}
int endPos = offset + count;
int i = offset;
char ch = (char)0;
while (true)
{
int startPos = i;
while (i < endPos && XmlCharType.IsAttributeValueChar(ch = array[i]))
{
i++;
}
if (startPos < i)
{
_textWriter.Write(array, startPos, i - startPos);
}
if (i == endPos)
{
break;
}
switch (ch)
{
case (char)0x9:
_textWriter.Write(ch);
break;
case (char)0xA:
case (char)0xD:
if (_inAttribute)
{
WriteCharEntityImpl(ch);
}
else
{
_textWriter.Write(ch);
}
break;
case '<':
WriteEntityRefImpl("lt");
break;
case '>':
WriteEntityRefImpl("gt");
break;
case '&':
WriteEntityRefImpl("amp");
break;
case '\'':
if (_inAttribute && _quoteChar == ch)
{
WriteEntityRefImpl("apos");
}
else
{
_textWriter.Write('\'');
}
break;
case '"':
if (_inAttribute && _quoteChar == ch)
{
WriteEntityRefImpl("quot");
}
else
{
_textWriter.Write('"');
}
break;
default:
if (XmlCharType.IsHighSurrogate(ch))
{
if (i + 1 < endPos)
{
WriteSurrogateChar(array[++i], ch);
}
else
{
throw new ArgumentException(SR.Xml_SurrogatePairSplit);
}
}
else if (XmlCharType.IsLowSurrogate(ch))
{
throw XmlConvert.CreateInvalidHighSurrogateCharException(ch);
}
else
{
Debug.Assert((ch < 0x20 && !XmlCharType.IsWhiteSpace(ch)) || (ch > 0xFFFD));
WriteCharEntityImpl(ch);
}
break;
}
i++;
}
}
internal void WriteSurrogateCharEntity(char lowChar, char highChar)
{
if (!XmlCharType.IsLowSurrogate(lowChar) ||
!XmlCharType.IsHighSurrogate(highChar))
{
throw XmlConvert.CreateInvalidSurrogatePairException(lowChar, highChar);
}
int surrogateChar = XmlCharType.CombineSurrogateChar(lowChar, highChar);
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append(highChar);
_attrValue.Append(lowChar);
}
_textWriter.Write("&#x");
_textWriter.Write(surrogateChar.ToString("X", NumberFormatInfo.InvariantInfo));
_textWriter.Write(';');
}
internal void Write(ReadOnlySpan<char> text)
{
if (text.IsEmpty)
{
return;
}
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append(text);
}
// scan through the string to see if there are any characters to be escaped
int len = text.Length;
int i = 0;
int startPos = 0;
char ch = (char)0;
while (true)
{
while (i < len && XmlCharType.IsAttributeValueChar(ch = text[i]))
{
i++;
}
if (i == len)
{
// reached the end of the string -> write it whole out
_textWriter.Write(text);
return;
}
if (_inAttribute)
{
if (ch == 0x9)
{
i++;
continue;
}
}
else
{
if (ch == 0x9 || ch == 0xA || ch == 0xD || ch == '"' || ch == '\'')
{
i++;
continue;
}
}
// some character that needs to be escaped is found:
break;
}
while (true)
{
if (startPos < i)
{
_textWriter.Write(text.Slice(startPos, i - startPos));
}
if (i == len)
{
break;
}
switch (ch)
{
case (char)0x9:
_textWriter.Write(ch);
break;
case (char)0xA:
case (char)0xD:
if (_inAttribute)
{
WriteCharEntityImpl(ch);
}
else
{
_textWriter.Write(ch);
}
break;
case '<':
WriteEntityRefImpl("lt");
break;
case '>':
WriteEntityRefImpl("gt");
break;
case '&':
WriteEntityRefImpl("amp");
break;
case '\'':
if (_inAttribute && _quoteChar == ch)
{
WriteEntityRefImpl("apos");
}
else
{
_textWriter.Write('\'');
}
break;
case '"':
if (_inAttribute && _quoteChar == ch)
{
WriteEntityRefImpl("quot");
}
else
{
_textWriter.Write('"');
}
break;
default:
if (XmlCharType.IsHighSurrogate(ch))
{
if (i + 1 < len)
{
WriteSurrogateChar(text[++i], ch);
}
else
{
throw XmlConvert.CreateInvalidSurrogatePairException(text[i], ch);
}
}
else if (XmlCharType.IsLowSurrogate(ch))
{
throw XmlConvert.CreateInvalidHighSurrogateCharException(ch);
}
else
{
Debug.Assert((ch < 0x20 && !XmlCharType.IsWhiteSpace(ch)) || (ch > 0xFFFD));
WriteCharEntityImpl(ch);
}
break;
}
i++;
startPos = i;
while (i < len && XmlCharType.IsAttributeValueChar(ch = text[i]))
{
i++;
}
}
}
internal void WriteRawWithSurrogateChecking(string text)
{
if (text == null)
{
return;
}
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append(text);
}
int len = text.Length;
int i = 0;
char ch = (char)0;
while (true)
{
while (i < len && (XmlCharType.IsCharData((ch = text[i])) || ch < 0x20))
{
i++;
}
if (i == len)
{
break;
}
if (XmlCharType.IsHighSurrogate(ch))
{
if (i + 1 < len)
{
char lowChar = text[i + 1];
if (XmlCharType.IsLowSurrogate(lowChar))
{
i += 2;
continue;
}
else
{
throw XmlConvert.CreateInvalidSurrogatePairException(lowChar, ch);
}
}
throw new ArgumentException(SR.Xml_InvalidSurrogateMissingLowChar);
}
else if (XmlCharType.IsLowSurrogate(ch))
{
throw XmlConvert.CreateInvalidHighSurrogateCharException(ch);
}
else
{
i++;
}
}
_textWriter.Write(text);
return;
}
internal void WriteRaw(char[] array!!, int offset, int count)
{
if (0 > count)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (0 > offset)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (count > array.Length - offset)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append(array, offset, count);
}
_textWriter.Write(array, offset, count);
}
internal void WriteCharEntity(char ch)
{
if (XmlCharType.IsSurrogate(ch))
{
throw new ArgumentException(SR.Xml_InvalidSurrogateMissingLowChar);
}
string strVal = ((int)ch).ToString("X", NumberFormatInfo.InvariantInfo);
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append("&#x");
_attrValue.Append(strVal);
_attrValue.Append(';');
}
WriteCharEntityImpl(strVal);
}
internal void WriteEntityRef(string name)
{
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append('&');
_attrValue.Append(name);
_attrValue.Append(';');
}
WriteEntityRefImpl(name);
}
//
// Private implementation methods
//
private void WriteCharEntityImpl(char ch)
{
WriteCharEntityImpl(((int)ch).ToString("X", NumberFormatInfo.InvariantInfo));
}
private void WriteCharEntityImpl(string strVal)
{
_textWriter.Write("&#x");
_textWriter.Write(strVal);
_textWriter.Write(';');
}
private void WriteEntityRefImpl(string name)
{
_textWriter.Write('&');
_textWriter.Write(name);
_textWriter.Write(';');
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.IO;
using System.Text;
using System.Diagnostics;
using System.Globalization;
namespace System.Xml
{
// XmlTextEncoder
//
// This class does special handling of text content for XML. For example
// it will replace special characters with entities whenever necessary.
internal sealed class XmlTextEncoder
{
//
// Fields
//
// output text writer
private readonly TextWriter _textWriter;
// true when writing out the content of attribute value
private bool _inAttribute;
// quote char of the attribute (when inAttribute)
private char _quoteChar;
// caching of attribute value
private StringBuilder? _attrValue;
private bool _cacheAttrValue;
//
// Constructor
//
internal XmlTextEncoder(TextWriter textWriter)
{
_textWriter = textWriter;
_quoteChar = '"';
}
//
// Internal methods and properties
//
internal char QuoteChar
{
set
{
_quoteChar = value;
}
}
internal void StartAttribute(bool cacheAttrValue)
{
_inAttribute = true;
_cacheAttrValue = cacheAttrValue;
if (cacheAttrValue)
{
if (_attrValue == null)
{
_attrValue = new StringBuilder();
}
else
{
_attrValue.Length = 0;
}
}
}
internal void EndAttribute()
{
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Length = 0;
}
_inAttribute = false;
_cacheAttrValue = false;
}
internal string AttributeValue
{
get
{
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
return _attrValue.ToString();
}
else
{
return string.Empty;
}
}
}
internal void WriteSurrogateChar(char lowChar, char highChar)
{
if (!XmlCharType.IsLowSurrogate(lowChar) ||
!XmlCharType.IsHighSurrogate(highChar))
{
throw XmlConvert.CreateInvalidSurrogatePairException(lowChar, highChar);
}
_textWriter.Write(highChar);
_textWriter.Write(lowChar);
}
internal void Write(char[] array!!, int offset, int count)
{
if (0 > offset)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (0 > count)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (count > array.Length - offset)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append(array, offset, count);
}
int endPos = offset + count;
int i = offset;
char ch = (char)0;
while (true)
{
int startPos = i;
while (i < endPos && XmlCharType.IsAttributeValueChar(ch = array[i]))
{
i++;
}
if (startPos < i)
{
_textWriter.Write(array, startPos, i - startPos);
}
if (i == endPos)
{
break;
}
switch (ch)
{
case (char)0x9:
_textWriter.Write(ch);
break;
case (char)0xA:
case (char)0xD:
if (_inAttribute)
{
WriteCharEntityImpl(ch);
}
else
{
_textWriter.Write(ch);
}
break;
case '<':
WriteEntityRefImpl("lt");
break;
case '>':
WriteEntityRefImpl("gt");
break;
case '&':
WriteEntityRefImpl("amp");
break;
case '\'':
if (_inAttribute && _quoteChar == ch)
{
WriteEntityRefImpl("apos");
}
else
{
_textWriter.Write('\'');
}
break;
case '"':
if (_inAttribute && _quoteChar == ch)
{
WriteEntityRefImpl("quot");
}
else
{
_textWriter.Write('"');
}
break;
default:
if (XmlCharType.IsHighSurrogate(ch))
{
if (i + 1 < endPos)
{
WriteSurrogateChar(array[++i], ch);
}
else
{
throw new ArgumentException(SR.Xml_SurrogatePairSplit);
}
}
else if (XmlCharType.IsLowSurrogate(ch))
{
throw XmlConvert.CreateInvalidHighSurrogateCharException(ch);
}
else
{
Debug.Assert((ch < 0x20 && !XmlCharType.IsWhiteSpace(ch)) || (ch > 0xFFFD));
WriteCharEntityImpl(ch);
}
break;
}
i++;
}
}
internal void WriteSurrogateCharEntity(char lowChar, char highChar)
{
if (!XmlCharType.IsLowSurrogate(lowChar) ||
!XmlCharType.IsHighSurrogate(highChar))
{
throw XmlConvert.CreateInvalidSurrogatePairException(lowChar, highChar);
}
int surrogateChar = XmlCharType.CombineSurrogateChar(lowChar, highChar);
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append(highChar);
_attrValue.Append(lowChar);
}
_textWriter.Write("&#x");
_textWriter.Write(surrogateChar.ToString("X", NumberFormatInfo.InvariantInfo));
_textWriter.Write(';');
}
internal void Write(ReadOnlySpan<char> text)
{
if (text.IsEmpty)
{
return;
}
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append(text);
}
// scan through the string to see if there are any characters to be escaped
int len = text.Length;
int i = 0;
int startPos = 0;
char ch = (char)0;
while (true)
{
while (i < len && XmlCharType.IsAttributeValueChar(ch = text[i]))
{
i++;
}
if (i == len)
{
// reached the end of the string -> write it whole out
_textWriter.Write(text);
return;
}
if (_inAttribute)
{
if (ch == 0x9)
{
i++;
continue;
}
}
else
{
if (ch == 0x9 || ch == 0xA || ch == 0xD || ch == '"' || ch == '\'')
{
i++;
continue;
}
}
// some character that needs to be escaped is found:
break;
}
while (true)
{
if (startPos < i)
{
_textWriter.Write(text.Slice(startPos, i - startPos));
}
if (i == len)
{
break;
}
switch (ch)
{
case (char)0x9:
_textWriter.Write(ch);
break;
case (char)0xA:
case (char)0xD:
if (_inAttribute)
{
WriteCharEntityImpl(ch);
}
else
{
_textWriter.Write(ch);
}
break;
case '<':
WriteEntityRefImpl("lt");
break;
case '>':
WriteEntityRefImpl("gt");
break;
case '&':
WriteEntityRefImpl("amp");
break;
case '\'':
if (_inAttribute && _quoteChar == ch)
{
WriteEntityRefImpl("apos");
}
else
{
_textWriter.Write('\'');
}
break;
case '"':
if (_inAttribute && _quoteChar == ch)
{
WriteEntityRefImpl("quot");
}
else
{
_textWriter.Write('"');
}
break;
default:
if (XmlCharType.IsHighSurrogate(ch))
{
if (i + 1 < len)
{
WriteSurrogateChar(text[++i], ch);
}
else
{
throw XmlConvert.CreateInvalidSurrogatePairException(text[i], ch);
}
}
else if (XmlCharType.IsLowSurrogate(ch))
{
throw XmlConvert.CreateInvalidHighSurrogateCharException(ch);
}
else
{
Debug.Assert((ch < 0x20 && !XmlCharType.IsWhiteSpace(ch)) || (ch > 0xFFFD));
WriteCharEntityImpl(ch);
}
break;
}
i++;
startPos = i;
while (i < len && XmlCharType.IsAttributeValueChar(ch = text[i]))
{
i++;
}
}
}
internal void WriteRawWithSurrogateChecking(string text)
{
if (text == null)
{
return;
}
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append(text);
}
int len = text.Length;
int i = 0;
char ch = (char)0;
while (true)
{
while (i < len && (XmlCharType.IsCharData((ch = text[i])) || ch < 0x20))
{
i++;
}
if (i == len)
{
break;
}
if (XmlCharType.IsHighSurrogate(ch))
{
if (i + 1 < len)
{
char lowChar = text[i + 1];
if (XmlCharType.IsLowSurrogate(lowChar))
{
i += 2;
continue;
}
else
{
throw XmlConvert.CreateInvalidSurrogatePairException(lowChar, ch);
}
}
throw new ArgumentException(SR.Xml_InvalidSurrogateMissingLowChar);
}
else if (XmlCharType.IsLowSurrogate(ch))
{
throw XmlConvert.CreateInvalidHighSurrogateCharException(ch);
}
else
{
i++;
}
}
_textWriter.Write(text);
return;
}
internal void WriteRaw(char[] array!!, int offset, int count)
{
if (0 > count)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (0 > offset)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (count > array.Length - offset)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append(array, offset, count);
}
_textWriter.Write(array, offset, count);
}
internal void WriteCharEntity(char ch)
{
if (XmlCharType.IsSurrogate(ch))
{
throw new ArgumentException(SR.Xml_InvalidSurrogateMissingLowChar);
}
string strVal = ((int)ch).ToString("X", NumberFormatInfo.InvariantInfo);
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append("&#x");
_attrValue.Append(strVal);
_attrValue.Append(';');
}
WriteCharEntityImpl(strVal);
}
internal void WriteEntityRef(string name)
{
if (_cacheAttrValue)
{
Debug.Assert(_attrValue != null);
_attrValue.Append('&');
_attrValue.Append(name);
_attrValue.Append(';');
}
WriteEntityRefImpl(name);
}
//
// Private implementation methods
//
private void WriteCharEntityImpl(char ch)
{
WriteCharEntityImpl(((int)ch).ToString("X", NumberFormatInfo.InvariantInfo));
}
private void WriteCharEntityImpl(string strVal)
{
_textWriter.Write("&#x");
_textWriter.Write(strVal);
_textWriter.Write(';');
}
private void WriteEntityRefImpl(string name)
{
_textWriter.Write('&');
_textWriter.Write(name);
_textWriter.Write(';');
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Security.Cryptography.ProtectedData/ref/System.Security.Cryptography.ProtectedData.netframework.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// ------------------------------------------------------------------------------
// Changes to this file must follow the https://aka.ms/api-review process.
// ------------------------------------------------------------------------------
using System.Runtime.CompilerServices;
[assembly: TypeForwardedTo(typeof(System.Security.Cryptography.DataProtectionScope))]
[assembly: TypeForwardedTo(typeof(System.Security.Cryptography.ProtectedData))]
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// ------------------------------------------------------------------------------
// Changes to this file must follow the https://aka.ms/api-review process.
// ------------------------------------------------------------------------------
using System.Runtime.CompilerServices;
[assembly: TypeForwardedTo(typeof(System.Security.Cryptography.DataProtectionScope))]
[assembly: TypeForwardedTo(typeof(System.Security.Cryptography.ProtectedData))]
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Private.CoreLib/src/System/Text/Unicode/Utf8Utility.Transcoding.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Buffers;
using System.Diagnostics;
using System.Numerics;
using System.Runtime.CompilerServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
using System.Runtime.Intrinsics.X86;
namespace System.Text.Unicode
{
internal static unsafe partial class Utf8Utility
{
// On method return, pInputBufferRemaining and pOutputBufferRemaining will both point to where
// the next byte would have been consumed from / the next char would have been written to.
// inputLength in bytes, outputCharsRemaining in chars.
public static OperationStatus TranscodeToUtf16(byte* pInputBuffer, int inputLength, char* pOutputBuffer, int outputCharsRemaining, out byte* pInputBufferRemaining, out char* pOutputBufferRemaining)
{
Debug.Assert(inputLength >= 0, "Input length must not be negative.");
Debug.Assert(pInputBuffer != null || inputLength == 0, "Input length must be zero if input buffer pointer is null.");
Debug.Assert(outputCharsRemaining >= 0, "Destination length must not be negative.");
Debug.Assert(pOutputBuffer != null || outputCharsRemaining == 0, "Destination length must be zero if destination buffer pointer is null.");
// First, try vectorized conversion.
{
nuint numElementsConverted = ASCIIUtility.WidenAsciiToUtf16(pInputBuffer, pOutputBuffer, (uint)Math.Min(inputLength, outputCharsRemaining));
pInputBuffer += numElementsConverted;
pOutputBuffer += numElementsConverted;
// Quick check - did we just end up consuming the entire input buffer?
// If so, short-circuit the remainder of the method.
if ((int)numElementsConverted == inputLength)
{
pInputBufferRemaining = pInputBuffer;
pOutputBufferRemaining = pOutputBuffer;
return OperationStatus.Done;
}
inputLength -= (int)numElementsConverted;
outputCharsRemaining -= (int)numElementsConverted;
}
if (inputLength < sizeof(uint))
{
goto ProcessInputOfLessThanDWordSize;
}
byte* pFinalPosWhereCanReadDWordFromInputBuffer = pInputBuffer + (uint)inputLength - 4;
// Begin the main loop.
#if DEBUG
byte* pLastBufferPosProcessed = null; // used for invariant checking in debug builds
#endif
Debug.Assert(pInputBuffer <= pFinalPosWhereCanReadDWordFromInputBuffer);
do
{
// Read 32 bits at a time. This is enough to hold any possible UTF8-encoded scalar.
uint thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
AfterReadDWord:
#if DEBUG
Debug.Assert(pLastBufferPosProcessed < pInputBuffer, "Algorithm should've made forward progress since last read.");
pLastBufferPosProcessed = pInputBuffer;
#endif
// First, check for the common case of all-ASCII bytes.
if (ASCIIUtility.AllBytesInUInt32AreAscii(thisDWord))
{
// We read an all-ASCII sequence.
if (outputCharsRemaining < sizeof(uint))
{
goto ProcessRemainingBytesSlow; // running out of space, but may be able to write some data
}
ASCIIUtility.WidenFourAsciiBytesToUtf16AndWriteToBuffer(ref *pOutputBuffer, thisDWord);
pInputBuffer += 4;
pOutputBuffer += 4;
outputCharsRemaining -= 4;
// If we saw a sequence of all ASCII, there's a good chance a significant amount of following data is also ASCII.
// Below is basically unrolled loops with poor man's vectorization.
uint remainingInputBytes = (uint)(void*)Unsafe.ByteOffset(ref *pInputBuffer, ref *pFinalPosWhereCanReadDWordFromInputBuffer) + 4;
uint maxIters = Math.Min(remainingInputBytes, (uint)outputCharsRemaining) / (2 * sizeof(uint));
uint secondDWord;
int i;
for (i = 0; (uint)i < maxIters; i++)
{
// Reading two DWORDs in parallel benchmarked faster than reading a single QWORD.
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
secondDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer + sizeof(uint));
if (!ASCIIUtility.AllBytesInUInt32AreAscii(thisDWord | secondDWord))
{
goto LoopTerminatedEarlyDueToNonAsciiData;
}
pInputBuffer += 8;
ASCIIUtility.WidenFourAsciiBytesToUtf16AndWriteToBuffer(ref pOutputBuffer[0], thisDWord);
ASCIIUtility.WidenFourAsciiBytesToUtf16AndWriteToBuffer(ref pOutputBuffer[4], secondDWord);
pOutputBuffer += 8;
}
outputCharsRemaining -= 8 * i;
continue; // need to perform a bounds check because we might be running out of data
LoopTerminatedEarlyDueToNonAsciiData:
if (ASCIIUtility.AllBytesInUInt32AreAscii(thisDWord))
{
// The first DWORD contained all-ASCII bytes, so expand it.
ASCIIUtility.WidenFourAsciiBytesToUtf16AndWriteToBuffer(ref *pOutputBuffer, thisDWord);
// continue the outer loop from the second DWORD
Debug.Assert(!ASCIIUtility.AllBytesInUInt32AreAscii(secondDWord));
thisDWord = secondDWord;
pInputBuffer += 4;
pOutputBuffer += 4;
outputCharsRemaining -= 4;
}
outputCharsRemaining -= 8 * i;
// We know that there's *at least* one DWORD of data remaining in the buffer.
// We also know that it's not all-ASCII. We can skip the logic at the beginning of the main loop.
goto AfterReadDWordSkipAllBytesAsciiCheck;
}
AfterReadDWordSkipAllBytesAsciiCheck:
Debug.Assert(!ASCIIUtility.AllBytesInUInt32AreAscii(thisDWord)); // this should have been handled earlier
// Next, try stripping off ASCII bytes one at a time.
// We only handle up to three ASCII bytes here since we handled the four ASCII byte case above.
if (UInt32FirstByteIsAscii(thisDWord))
{
if (outputCharsRemaining >= 3)
{
// Fast-track: we don't need to check the destination length for subsequent
// ASCII bytes since we know we can write them all now.
uint thisDWordLittleEndian = ToLittleEndian(thisDWord);
nuint adjustment = 1;
pOutputBuffer[0] = (char)(byte)thisDWordLittleEndian;
if (UInt32SecondByteIsAscii(thisDWord))
{
adjustment++;
thisDWordLittleEndian >>= 8;
pOutputBuffer[1] = (char)(byte)thisDWordLittleEndian;
if (UInt32ThirdByteIsAscii(thisDWord))
{
adjustment++;
thisDWordLittleEndian >>= 8;
pOutputBuffer[2] = (char)(byte)thisDWordLittleEndian;
}
}
pInputBuffer += adjustment;
pOutputBuffer += adjustment;
outputCharsRemaining -= (int)adjustment;
}
else
{
// Slow-track: we need to make sure each individual write has enough
// of a buffer so that we don't overrun the destination.
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall;
}
uint thisDWordLittleEndian = ToLittleEndian(thisDWord);
pInputBuffer++;
*pOutputBuffer++ = (char)(byte)thisDWordLittleEndian;
outputCharsRemaining--;
if (UInt32SecondByteIsAscii(thisDWord))
{
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall;
}
pInputBuffer++;
thisDWordLittleEndian >>= 8;
*pOutputBuffer++ = (char)(byte)thisDWordLittleEndian;
// We can perform a small optimization here. We know at this point that
// the output buffer is fully consumed (we read two ASCII bytes and wrote
// two ASCII chars, and we checked earlier that the destination buffer
// can't store a third byte). If the next byte is ASCII, we can jump straight
// to the return statement since the end-of-method logic only relies on the
// destination buffer pointer -- NOT the output chars remaining count -- being
// correct. If the next byte is not ASCII, we'll need to continue with the
// rest of the main loop, but we can set the buffer length directly to zero
// rather than decrementing it from 1 to 0.
Debug.Assert(outputCharsRemaining == 1);
if (UInt32ThirdByteIsAscii(thisDWord))
{
goto OutputBufferTooSmall;
}
else
{
outputCharsRemaining = 0;
}
}
}
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessRemainingBytesSlow; // input buffer doesn't contain enough data to read a DWORD
}
else
{
// The input buffer at the current offset contains a non-ASCII byte.
// Read an entire DWORD and fall through to multi-byte consumption logic.
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
}
}
BeforeProcessTwoByteSequence:
// At this point, we know we're working with a multi-byte code unit,
// but we haven't yet validated it.
// The masks and comparands are derived from the Unicode Standard, Table 3-6.
// Additionally, we need to check for valid byte sequences per Table 3-7.
// Check the 2-byte case.
if (UInt32BeginsWithUtf8TwoByteMask(thisDWord))
{
// Per Table 3-7, valid sequences are:
// [ C2..DF ] [ 80..BF ]
if (UInt32BeginsWithOverlongUtf8TwoByteSequence(thisDWord))
{
goto Error;
}
ProcessTwoByteSequenceSkipOverlongFormCheck:
// Optimization: If this is a two-byte-per-character language like Cyrillic or Hebrew,
// there's a good chance that if we see one two-byte run then there's another two-byte
// run immediately after. Let's check that now.
// On little-endian platforms, we can check for the two-byte UTF8 mask *and* validate that
// the value isn't overlong using a single comparison. On big-endian platforms, we'll need
// to validate the mask and validate that the sequence isn't overlong as two separate comparisons.
if ((BitConverter.IsLittleEndian && UInt32EndsWithValidUtf8TwoByteSequenceLittleEndian(thisDWord))
|| (!BitConverter.IsLittleEndian && (UInt32EndsWithUtf8TwoByteMask(thisDWord) && !UInt32EndsWithOverlongUtf8TwoByteSequence(thisDWord))))
{
// We have two runs of two bytes each.
if (outputCharsRemaining < 2)
{
goto ProcessRemainingBytesSlow; // running out of output buffer
}
Unsafe.WriteUnaligned<uint>(pOutputBuffer, ExtractTwoCharsPackedFromTwoAdjacentTwoByteSequences(thisDWord));
pInputBuffer += 4;
pOutputBuffer += 2;
outputCharsRemaining -= 2;
if (pInputBuffer <= pFinalPosWhereCanReadDWordFromInputBuffer)
{
// Optimization: If we read a long run of two-byte sequences, the next sequence is probably
// also two bytes. Check for that first before going back to the beginning of the loop.
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
if (BitConverter.IsLittleEndian)
{
if (UInt32BeginsWithValidUtf8TwoByteSequenceLittleEndian(thisDWord))
{
// The next sequence is a valid two-byte sequence.
goto ProcessTwoByteSequenceSkipOverlongFormCheck;
}
}
else
{
if (UInt32BeginsWithUtf8TwoByteMask(thisDWord))
{
if (UInt32BeginsWithOverlongUtf8TwoByteSequence(thisDWord))
{
goto Error; // The next sequence purports to be a 2-byte sequence but is overlong.
}
goto ProcessTwoByteSequenceSkipOverlongFormCheck;
}
}
// If we reached this point, the next sequence is something other than a valid
// two-byte sequence, so go back to the beginning of the loop.
goto AfterReadDWord;
}
else
{
goto ProcessRemainingBytesSlow; // Running out of data - go down slow path
}
}
// The buffer contains a 2-byte sequence followed by 2 bytes that aren't a 2-byte sequence.
// Unlikely that a 3-byte sequence would follow a 2-byte sequence, so perhaps remaining
// bytes are ASCII?
uint charToWrite = ExtractCharFromFirstTwoByteSequence(thisDWord); // optimistically compute this now, but don't store until we know dest is large enough
if (UInt32ThirdByteIsAscii(thisDWord))
{
if (UInt32FourthByteIsAscii(thisDWord))
{
if (outputCharsRemaining < 3)
{
goto ProcessRemainingBytesSlow; // running out of output buffer
}
pOutputBuffer[0] = (char)charToWrite;
if (BitConverter.IsLittleEndian)
{
thisDWord >>= 16;
pOutputBuffer[1] = (char)(byte)thisDWord;
thisDWord >>= 8;
pOutputBuffer[2] = (char)thisDWord;
}
else
{
pOutputBuffer[2] = (char)(byte)thisDWord;
pOutputBuffer[1] = (char)(byte)(thisDWord >> 8);
}
pInputBuffer += 4;
pOutputBuffer += 3;
outputCharsRemaining -= 3;
continue; // go back to original bounds check and check for ASCII
}
else
{
if (outputCharsRemaining < 2)
{
goto ProcessRemainingBytesSlow; // running out of output buffer
}
pOutputBuffer[0] = (char)charToWrite;
pOutputBuffer[1] = (char)(byte)(thisDWord >> (BitConverter.IsLittleEndian ? 16 : 8));
pInputBuffer += 3;
pOutputBuffer += 2;
outputCharsRemaining -= 2;
// A two-byte sequence followed by an ASCII byte followed by a non-ASCII byte.
// Read in the next DWORD and jump directly to the start of the multi-byte processing block.
if (pFinalPosWhereCanReadDWordFromInputBuffer < pInputBuffer)
{
goto ProcessRemainingBytesSlow; // Running out of data - go down slow path
}
else
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
goto BeforeProcessTwoByteSequence;
}
}
}
else
{
if (outputCharsRemaining == 0)
{
goto ProcessRemainingBytesSlow; // running out of output buffer
}
pOutputBuffer[0] = (char)charToWrite;
pInputBuffer += 2;
pOutputBuffer++;
outputCharsRemaining--;
if (pFinalPosWhereCanReadDWordFromInputBuffer < pInputBuffer)
{
goto ProcessRemainingBytesSlow; // Running out of data - go down slow path
}
else
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
goto BeforeProcessThreeByteSequence; // we know the next byte isn't ASCII, and it's not the start of a 2-byte sequence (this was checked above)
}
}
}
// Check the 3-byte case.
BeforeProcessThreeByteSequence:
if (UInt32BeginsWithUtf8ThreeByteMask(thisDWord))
{
ProcessThreeByteSequenceWithCheck:
// We need to check for overlong or surrogate three-byte sequences.
//
// Per Table 3-7, valid sequences are:
// [ E0 ] [ A0..BF ] [ 80..BF ]
// [ E1..EC ] [ 80..BF ] [ 80..BF ]
// [ ED ] [ 80..9F ] [ 80..BF ]
// [ EE..EF ] [ 80..BF ] [ 80..BF ]
//
// Big-endian examples of using the above validation table:
// E0A0 = 1110 0000 1010 0000 => invalid (overlong ) patterns are 1110 0000 100# ####
// ED9F = 1110 1101 1001 1111 => invalid (surrogate) patterns are 1110 1101 101# ####
// If using the bitmask ......................................... 0000 1111 0010 0000 (=0F20),
// Then invalid (overlong) patterns match the comparand ......... 0000 0000 0000 0000 (=0000),
// And invalid (surrogate) patterns match the comparand ......... 0000 1101 0010 0000 (=0D20).
if (BitConverter.IsLittleEndian)
{
// The "overlong or surrogate" check can be implemented using a single jump, but there's
// some overhead to moving the bits into the correct locations in order to perform the
// correct comparison, and in practice the processor's branch prediction capability is
// good enough that we shouldn't bother. So we'll use two jumps instead.
// Can't extract this check into its own helper method because JITter produces suboptimal
// assembly, even with aggressive inlining.
// Code below becomes 5 instructions: test, jz, lea, test, jz
if (((thisDWord & 0x0000_200Fu) == 0) || (((thisDWord - 0x0000_200Du) & 0x0000_200Fu) == 0))
{
goto Error; // overlong or surrogate
}
}
else
{
if (((thisDWord & 0x0F20_0000u) == 0) || (((thisDWord - 0x0D20_0000u) & 0x0F20_0000u) == 0))
{
goto Error; // overlong or surrogate
}
}
// At this point, we know the incoming scalar is well-formed.
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall; // not enough space in the destination buffer to write
}
// As an optimization, on compatible platforms check if a second three-byte sequence immediately
// follows the one we just read, and if so extract them together.
if (BitConverter.IsLittleEndian)
{
// First, check that the leftover byte from the original DWORD is in the range [ E0..EF ], which
// would indicate the potential start of a second three-byte sequence.
if (((thisDWord - 0xE000_0000u) & 0xF000_0000u) == 0)
{
// The const '3' below is correct because pFinalPosWhereCanReadDWordFromInputBuffer represents
// the final place where we can safely perform a DWORD read, and we want to probe whether it's
// safe to read a DWORD beginning at address &pInputBuffer[3].
if (outputCharsRemaining > 1 && (nint)(void*)Unsafe.ByteOffset(ref *pInputBuffer, ref *pFinalPosWhereCanReadDWordFromInputBuffer) >= 3)
{
// We're going to attempt to read a second 3-byte sequence and write them both out one after the other.
// We need to check the continuation bit mask on the remaining two bytes (and we may as well check the leading
// byte mask again since it's free), then perform overlong + surrogate checks. If the overlong or surrogate
// checks fail, we'll fall through to the remainder of the logic which will transcode the original valid
// 3-byte UTF-8 sequence we read; and on the next iteration of the loop the validation routine will run again,
// fail, and redirect control flow to the error handling logic at the very end of this method.
uint secondDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer + 3);
if (UInt32BeginsWithUtf8ThreeByteMask(secondDWord)
&& ((secondDWord & 0x0000_200Fu) != 0)
&& (((secondDWord - 0x0000_200Du) & 0x0000_200Fu) != 0))
{
pOutputBuffer[0] = (char)ExtractCharFromFirstThreeByteSequence(thisDWord);
pOutputBuffer[1] = (char)ExtractCharFromFirstThreeByteSequence(secondDWord);
pInputBuffer += 6;
pOutputBuffer += 2;
outputCharsRemaining -= 2;
// Drain any ASCII data following the second three-byte sequence.
goto CheckForAsciiByteAfterThreeByteSequence;
}
}
}
}
// Couldn't extract 2x three-byte sequences together, just do this one by itself.
*pOutputBuffer = (char)ExtractCharFromFirstThreeByteSequence(thisDWord);
pInputBuffer += 3;
pOutputBuffer++;
outputCharsRemaining--;
CheckForAsciiByteAfterThreeByteSequence:
// Occasionally one-off ASCII characters like spaces, periods, or newlines will make their way
// in to the text. If this happens strip it off now before seeing if the next character
// consists of three code units.
if (UInt32FourthByteIsAscii(thisDWord))
{
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall;
}
if (BitConverter.IsLittleEndian)
{
*pOutputBuffer = (char)(thisDWord >> 24);
}
else
{
*pOutputBuffer = (char)(byte)thisDWord;
}
pInputBuffer++;
pOutputBuffer++;
outputCharsRemaining--;
}
if (pInputBuffer <= pFinalPosWhereCanReadDWordFromInputBuffer)
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
// Optimization: A three-byte character could indicate CJK text, which makes it likely
// that the character following this one is also CJK. We'll check for a three-byte sequence
// marker now and jump directly to three-byte sequence processing if we see one, skipping
// all of the logic at the beginning of the loop.
if (UInt32BeginsWithUtf8ThreeByteMask(thisDWord))
{
goto ProcessThreeByteSequenceWithCheck; // found a three-byte sequence marker; validate and consume
}
else
{
goto AfterReadDWord; // probably ASCII punctuation or whitespace
}
}
else
{
goto ProcessRemainingBytesSlow; // Running out of data - go down slow path
}
}
// Assume the 4-byte case, but we need to validate.
{
// We need to check for overlong or invalid (over U+10FFFF) four-byte sequences.
//
// Per Table 3-7, valid sequences are:
// [ F0 ] [ 90..BF ] [ 80..BF ] [ 80..BF ]
// [ F1..F3 ] [ 80..BF ] [ 80..BF ] [ 80..BF ]
// [ F4 ] [ 80..8F ] [ 80..BF ] [ 80..BF ]
if (!UInt32BeginsWithUtf8FourByteMask(thisDWord))
{
goto Error;
}
// Now check for overlong / out-of-range sequences.
if (BitConverter.IsLittleEndian)
{
// The DWORD we read is [ 10xxxxxx 10yyyyyy 10zzzzzz 11110www ].
// We want to get the 'w' byte in front of the 'z' byte so that we can perform
// a single range comparison. We'll take advantage of the fact that the JITter
// can detect a ROR / ROL operation, then we'll just zero out the bytes that
// aren't involved in the range check.
uint toCheck = thisDWord & 0x0000_FFFFu;
// At this point, toCheck = [ 00000000 00000000 10zzzzzz 11110www ].
toCheck = BitOperations.RotateRight(toCheck, 8);
// At this point, toCheck = [ 11110www 00000000 00000000 10zzzzzz ].
if (!UnicodeUtility.IsInRangeInclusive(toCheck, 0xF000_0090u, 0xF400_008Fu))
{
goto Error;
}
}
else
{
if (!UnicodeUtility.IsInRangeInclusive(thisDWord, 0xF090_0000u, 0xF48F_FFFFu))
{
goto Error;
}
}
// Validation complete.
if (outputCharsRemaining < 2)
{
// There's no point to falling back to the "drain the input buffer" logic, since we know
// we can't write anything to the destination. So we'll just exit immediately.
goto OutputBufferTooSmall;
}
Unsafe.WriteUnaligned<uint>(pOutputBuffer, ExtractCharsFromFourByteSequence(thisDWord));
pInputBuffer += 4;
pOutputBuffer += 2;
outputCharsRemaining -= 2;
continue; // go back to beginning of loop for processing
}
} while (pInputBuffer <= pFinalPosWhereCanReadDWordFromInputBuffer);
ProcessRemainingBytesSlow:
inputLength = (int)(void*)Unsafe.ByteOffset(ref *pInputBuffer, ref *pFinalPosWhereCanReadDWordFromInputBuffer) + 4;
ProcessInputOfLessThanDWordSize:
while (inputLength > 0)
{
uint firstByte = pInputBuffer[0];
if (firstByte <= 0x7Fu)
{
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall; // we have no hope of writing anything to the output
}
// 1-byte (ASCII) case
*pOutputBuffer = (char)firstByte;
pInputBuffer++;
pOutputBuffer++;
inputLength--;
outputCharsRemaining--;
continue;
}
// Potentially the start of a multi-byte sequence?
firstByte -= 0xC2u;
if ((byte)firstByte <= (0xDFu - 0xC2u))
{
// Potentially a 2-byte sequence?
if (inputLength < 2)
{
goto InputBufferTooSmall; // out of data
}
uint secondByte = pInputBuffer[1];
if (!IsLowByteUtf8ContinuationByte(secondByte))
{
goto Error; // 2-byte marker not followed by continuation byte
}
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall; // we have no hope of writing anything to the output
}
uint asChar = (firstByte << 6) + secondByte + ((0xC2u - 0xC0u) << 6) - 0x80u; // remove UTF-8 markers from scalar
*pOutputBuffer = (char)asChar;
pInputBuffer += 2;
pOutputBuffer++;
inputLength -= 2;
outputCharsRemaining--;
continue;
}
else if ((byte)firstByte <= (0xEFu - 0xC2u))
{
// Potentially a 3-byte sequence?
if (inputLength >= 3)
{
uint secondByte = pInputBuffer[1];
uint thirdByte = pInputBuffer[2];
if (!IsLowByteUtf8ContinuationByte(secondByte) || !IsLowByteUtf8ContinuationByte(thirdByte))
{
goto Error; // 3-byte marker not followed by 2 continuation bytes
}
// To speed up the validation logic below, we're not going to remove the UTF-8 markers from the partial char just yet.
// We account for this in the comparisons below.
uint partialChar = (firstByte << 12) + (secondByte << 6);
if (partialChar < ((0xE0u - 0xC2u) << 12) + (0xA0u << 6))
{
goto Error; // this is an overlong encoding; fail
}
partialChar -= ((0xEDu - 0xC2u) << 12) + (0xA0u << 6); // if partialChar = 0, we're at beginning of UTF-16 surrogate code point range
if (partialChar < 0x0800u /* number of code points in UTF-16 surrogate code point range */)
{
goto Error; // attempted to encode a UTF-16 surrogate code point; fail
}
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall; // we have no hope of writing anything to the output
}
// Now restore the full scalar value.
partialChar += thirdByte;
partialChar += 0xD800; // undo "move to beginning of UTF-16 surrogate code point range" from earlier, fold it with later adds
partialChar -= 0x80u; // remove third byte continuation marker
*pOutputBuffer = (char)partialChar;
pInputBuffer += 3;
pOutputBuffer++;
inputLength -= 3;
outputCharsRemaining--;
continue;
}
else if (inputLength >= 2)
{
uint secondByte = pInputBuffer[1];
if (!IsLowByteUtf8ContinuationByte(secondByte))
{
goto Error; // 3-byte marker not followed by continuation byte
}
// We can't build up the entire scalar value now, but we can check for overlong / surrogate representations
// from just the first two bytes.
uint partialChar = (firstByte << 6) + secondByte; // don't worry about fixing up the UTF-8 markers; we'll account for it in the below comparison
if (partialChar < ((0xE0u - 0xC2u) << 6) + 0xA0u)
{
goto Error; // failed overlong check
}
if (UnicodeUtility.IsInRangeInclusive(partialChar, ((0xEDu - 0xC2u) << 6) + 0xA0u, ((0xEEu - 0xC2u) << 6) + 0x7Fu))
{
goto Error; // failed surrogate check
}
}
goto InputBufferTooSmall; // out of data
}
else if ((byte)firstByte <= (0xF4u - 0xC2u))
{
// Potentially a 4-byte sequence?
if (inputLength < 2)
{
goto InputBufferTooSmall; // ran out of data
}
uint nextByte = pInputBuffer[1];
if (!IsLowByteUtf8ContinuationByte(nextByte))
{
goto Error; // 4-byte marker not followed by a continuation byte
}
uint asPartialChar = (firstByte << 6) + nextByte; // don't worry about fixing up the UTF-8 markers; we'll account for it in the below comparison
if (!UnicodeUtility.IsInRangeInclusive(asPartialChar, ((0xF0u - 0xC2u) << 6) + 0x90u, ((0xF4u - 0xC2u) << 6) + 0x8Fu))
{
goto Error; // failed overlong / out-of-range check
}
if (inputLength < 3)
{
goto InputBufferTooSmall; // ran out of data
}
if (!IsLowByteUtf8ContinuationByte(pInputBuffer[2]))
{
goto Error; // third byte in 4-byte sequence not a continuation byte
}
if (inputLength < 4)
{
goto InputBufferTooSmall; // ran out of data
}
if (!IsLowByteUtf8ContinuationByte(pInputBuffer[3]))
{
goto Error; // fourth byte in 4-byte sequence not a continuation byte
}
// If we read a valid astral scalar value, the only way we could've fallen down this code path
// is that we didn't have enough output buffer to write the result.
goto OutputBufferTooSmall;
}
else
{
goto Error; // didn't begin with [ C2 .. F4 ], so invalid multi-byte sequence header byte
}
}
OperationStatus retVal = OperationStatus.Done;
goto ReturnCommon;
InputBufferTooSmall:
retVal = OperationStatus.NeedMoreData;
goto ReturnCommon;
OutputBufferTooSmall:
retVal = OperationStatus.DestinationTooSmall;
goto ReturnCommon;
Error:
retVal = OperationStatus.InvalidData;
goto ReturnCommon;
ReturnCommon:
pInputBufferRemaining = pInputBuffer;
pOutputBufferRemaining = pOutputBuffer;
return retVal;
}
// On method return, pInputBufferRemaining and pOutputBufferRemaining will both point to where
// the next char would have been consumed from / the next byte would have been written to.
// inputLength in chars, outputBytesRemaining in bytes.
public static OperationStatus TranscodeToUtf8(char* pInputBuffer, int inputLength, byte* pOutputBuffer, int outputBytesRemaining, out char* pInputBufferRemaining, out byte* pOutputBufferRemaining)
{
const int CharsPerDWord = sizeof(uint) / sizeof(char);
Debug.Assert(inputLength >= 0, "Input length must not be negative.");
Debug.Assert(pInputBuffer != null || inputLength == 0, "Input length must be zero if input buffer pointer is null.");
Debug.Assert(outputBytesRemaining >= 0, "Destination length must not be negative.");
Debug.Assert(pOutputBuffer != null || outputBytesRemaining == 0, "Destination length must be zero if destination buffer pointer is null.");
// First, try vectorized conversion.
{
nuint numElementsConverted = ASCIIUtility.NarrowUtf16ToAscii(pInputBuffer, pOutputBuffer, (uint)Math.Min(inputLength, outputBytesRemaining));
pInputBuffer += numElementsConverted;
pOutputBuffer += numElementsConverted;
// Quick check - did we just end up consuming the entire input buffer?
// If so, short-circuit the remainder of the method.
if ((int)numElementsConverted == inputLength)
{
pInputBufferRemaining = pInputBuffer;
pOutputBufferRemaining = pOutputBuffer;
return OperationStatus.Done;
}
inputLength -= (int)numElementsConverted;
outputBytesRemaining -= (int)numElementsConverted;
}
if (inputLength < CharsPerDWord)
{
goto ProcessInputOfLessThanDWordSize;
}
char* pFinalPosWhereCanReadDWordFromInputBuffer = pInputBuffer + (uint)inputLength - CharsPerDWord;
// We have paths for SSE4.1 vectorization inside the inner loop. Since the below
// vector is only used in those code paths, we leave it uninitialized if SSE4.1
// is not enabled.
Vector128<short> nonAsciiUtf16DataMask;
if (Sse41.X64.IsSupported || (AdvSimd.Arm64.IsSupported && BitConverter.IsLittleEndian))
{
nonAsciiUtf16DataMask = Vector128.Create(unchecked((short)0xFF80)); // mask of non-ASCII bits in a UTF-16 char
}
// Begin the main loop.
#if DEBUG
char* pLastBufferPosProcessed = null; // used for invariant checking in debug builds
#endif
uint thisDWord;
Debug.Assert(pInputBuffer <= pFinalPosWhereCanReadDWordFromInputBuffer);
do
{
// Read 32 bits at a time. This is enough to hold any possible UTF16-encoded scalar.
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
AfterReadDWord:
#if DEBUG
Debug.Assert(pLastBufferPosProcessed < pInputBuffer, "Algorithm should've made forward progress since last read.");
pLastBufferPosProcessed = pInputBuffer;
#endif
// First, check for the common case of all-ASCII chars.
if (Utf16Utility.AllCharsInUInt32AreAscii(thisDWord))
{
// We read an all-ASCII sequence (2 chars).
if (outputBytesRemaining < 2)
{
goto ProcessOneCharFromCurrentDWordAndFinish; // running out of space, but may be able to write some data
}
// The high WORD of the local declared below might be populated with garbage
// as a result of our shifts below, but that's ok since we're only going to
// write the low WORD.
//
// [ 00000000 0bbbbbbb | 00000000 0aaaaaaa ] -> [ 00000000 0bbbbbbb | 0bbbbbbb 0aaaaaaa ]
// (Same logic works regardless of endianness.)
uint valueToWrite = thisDWord | (thisDWord >> 8);
Unsafe.WriteUnaligned<ushort>(pOutputBuffer, (ushort)valueToWrite);
pInputBuffer += 2;
pOutputBuffer += 2;
outputBytesRemaining -= 2;
// If we saw a sequence of all ASCII, there's a good chance a significant amount of following data is also ASCII.
// Below is basically unrolled loops with poor man's vectorization.
uint inputCharsRemaining = (uint)(pFinalPosWhereCanReadDWordFromInputBuffer - pInputBuffer) + 2;
uint minElementsRemaining = (uint)Math.Min(inputCharsRemaining, outputBytesRemaining);
if (Sse41.X64.IsSupported || (AdvSimd.Arm64.IsSupported && BitConverter.IsLittleEndian))
{
// Try reading and writing 8 elements per iteration.
uint maxIters = minElementsRemaining / 8;
ulong possibleNonAsciiQWord;
int i;
Vector128<short> utf16Data;
for (i = 0; (uint)i < maxIters; i++)
{
// The linker won't trim out nonAsciiUtf16DataMask unless this is in the loop.
// Luckily, this is a nop and will be elided by the JIT
Unsafe.SkipInit(out nonAsciiUtf16DataMask);
utf16Data = Unsafe.ReadUnaligned<Vector128<short>>(pInputBuffer);
if (AdvSimd.IsSupported)
{
Vector128<short> isUtf16DataNonAscii = AdvSimd.CompareTest(utf16Data, nonAsciiUtf16DataMask);
bool hasNonAsciiDataInVector = AdvSimd.Arm64.MinPairwise(isUtf16DataNonAscii, isUtf16DataNonAscii).AsUInt64().ToScalar() != 0;
if (hasNonAsciiDataInVector)
{
goto LoopTerminatedDueToNonAsciiDataInVectorLocal;
}
Vector64<byte> lower = AdvSimd.ExtractNarrowingSaturateUnsignedLower(utf16Data);
AdvSimd.Store(pOutputBuffer, lower);
}
else
{
if (!Sse41.TestZ(utf16Data, nonAsciiUtf16DataMask))
{
goto LoopTerminatedDueToNonAsciiDataInVectorLocal;
}
// narrow and write
Sse2.StoreScalar((ulong*)pOutputBuffer /* unaligned */, Sse2.PackUnsignedSaturate(utf16Data, utf16Data).AsUInt64());
}
pInputBuffer += 8;
pOutputBuffer += 8;
}
outputBytesRemaining -= 8 * i;
// Can we perform one more iteration, but reading & writing 4 elements instead of 8?
if ((minElementsRemaining & 4) != 0)
{
possibleNonAsciiQWord = Unsafe.ReadUnaligned<ulong>(pInputBuffer);
if (!Utf16Utility.AllCharsInUInt64AreAscii(possibleNonAsciiQWord))
{
goto LoopTerminatedDueToNonAsciiDataInPossibleNonAsciiQWordLocal;
}
utf16Data = Vector128.CreateScalarUnsafe(possibleNonAsciiQWord).AsInt16();
if (AdvSimd.IsSupported)
{
Vector64<byte> lower = AdvSimd.ExtractNarrowingSaturateUnsignedLower(utf16Data);
AdvSimd.StoreSelectedScalar((uint*)pOutputBuffer, lower.AsUInt32(), 0);
}
else
{
Unsafe.WriteUnaligned<uint>(pOutputBuffer, Sse2.ConvertToUInt32(Sse2.PackUnsignedSaturate(utf16Data, utf16Data).AsUInt32()));
}
pInputBuffer += 4;
pOutputBuffer += 4;
outputBytesRemaining -= 4;
}
continue; // Go back to beginning of main loop, read data, check for ASCII
LoopTerminatedDueToNonAsciiDataInVectorLocal:
outputBytesRemaining -= 8 * i;
if (Sse2.X64.IsSupported)
{
possibleNonAsciiQWord = Sse2.X64.ConvertToUInt64(utf16Data.AsUInt64());
}
else
{
possibleNonAsciiQWord = utf16Data.AsUInt64().ToScalar();
}
// Temporarily set 'possibleNonAsciiQWord' to be the low 64 bits of the vector,
// then check whether it's all-ASCII. If so, narrow and write to the destination
// buffer. Since we know that either the high 64 bits or the low 64 bits of the
// vector contains non-ASCII data, by the end of the following block the
// 'possibleNonAsciiQWord' local is guaranteed to contain the non-ASCII segment.
if (Utf16Utility.AllCharsInUInt64AreAscii(possibleNonAsciiQWord)) // all chars in first QWORD are ASCII
{
if (AdvSimd.IsSupported)
{
Vector64<byte> lower = AdvSimd.ExtractNarrowingSaturateUnsignedLower(utf16Data);
AdvSimd.StoreSelectedScalar((uint*)pOutputBuffer, lower.AsUInt32(), 0);
}
else
{
Unsafe.WriteUnaligned<uint>(pOutputBuffer, Sse2.ConvertToUInt32(Sse2.PackUnsignedSaturate(utf16Data, utf16Data).AsUInt32()));
}
pInputBuffer += 4;
pOutputBuffer += 4;
outputBytesRemaining -= 4;
possibleNonAsciiQWord = utf16Data.AsUInt64().GetElement(1);
}
LoopTerminatedDueToNonAsciiDataInPossibleNonAsciiQWordLocal:
Debug.Assert(!Utf16Utility.AllCharsInUInt64AreAscii(possibleNonAsciiQWord)); // this condition should've been checked earlier
thisDWord = (uint)possibleNonAsciiQWord;
if (Utf16Utility.AllCharsInUInt32AreAscii(thisDWord))
{
// [ 00000000 0bbbbbbb | 00000000 0aaaaaaa ] -> [ 00000000 0bbbbbbb | 0bbbbbbb 0aaaaaaa ]
Unsafe.WriteUnaligned<ushort>(pOutputBuffer, (ushort)(thisDWord | (thisDWord >> 8)));
pInputBuffer += 2;
pOutputBuffer += 2;
outputBytesRemaining -= 2;
thisDWord = (uint)(possibleNonAsciiQWord >> 32);
}
goto AfterReadDWordSkipAllCharsAsciiCheck;
}
else
{
// Can't use SSE41 x64, so we'll only read and write 4 elements per iteration.
uint maxIters = minElementsRemaining / 4;
uint secondDWord;
int i;
for (i = 0; (uint)i < maxIters; i++)
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
secondDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer + 2);
if (!Utf16Utility.AllCharsInUInt32AreAscii(thisDWord | secondDWord))
{
goto LoopTerminatedDueToNonAsciiData;
}
// [ 00000000 0bbbbbbb | 00000000 0aaaaaaa ] -> [ 00000000 0bbbbbbb | 0bbbbbbb 0aaaaaaa ]
// (Same logic works regardless of endianness.)
Unsafe.WriteUnaligned<ushort>(pOutputBuffer, (ushort)(thisDWord | (thisDWord >> 8)));
Unsafe.WriteUnaligned<ushort>(pOutputBuffer + 2, (ushort)(secondDWord | (secondDWord >> 8)));
pInputBuffer += 4;
pOutputBuffer += 4;
}
outputBytesRemaining -= 4 * i;
continue; // Go back to beginning of main loop, read data, check for ASCII
LoopTerminatedDueToNonAsciiData:
outputBytesRemaining -= 4 * i;
// First, see if we can drain any ASCII data from the first DWORD.
if (Utf16Utility.AllCharsInUInt32AreAscii(thisDWord))
{
// [ 00000000 0bbbbbbb | 00000000 0aaaaaaa ] -> [ 00000000 0bbbbbbb | 0bbbbbbb 0aaaaaaa ]
// (Same logic works regardless of endianness.)
Unsafe.WriteUnaligned<ushort>(pOutputBuffer, (ushort)(thisDWord | (thisDWord >> 8)));
pInputBuffer += 2;
pOutputBuffer += 2;
outputBytesRemaining -= 2;
thisDWord = secondDWord;
}
goto AfterReadDWordSkipAllCharsAsciiCheck;
}
}
AfterReadDWordSkipAllCharsAsciiCheck:
Debug.Assert(!Utf16Utility.AllCharsInUInt32AreAscii(thisDWord)); // this should have been handled earlier
// Next, try stripping off the first ASCII char if it exists.
// We don't check for a second ASCII char since that should have been handled above.
if (IsFirstCharAscii(thisDWord))
{
if (outputBytesRemaining == 0)
{
goto OutputBufferTooSmall;
}
if (BitConverter.IsLittleEndian)
{
pOutputBuffer[0] = (byte)thisDWord; // extract [ ## ## 00 AA ]
}
else
{
pOutputBuffer[0] = (byte)(thisDWord >> 16); // extract [ 00 AA ## ## ]
}
pInputBuffer++;
pOutputBuffer++;
outputBytesRemaining--;
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessNextCharAndFinish; // input buffer doesn't contain enough data to read a DWORD
}
else
{
// The input buffer at the current offset contains a non-ASCII char.
// Read an entire DWORD and fall through to non-ASCII consumption logic.
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
}
}
// At this point, we know the first char in the buffer is non-ASCII, but we haven't yet validated it.
if (!IsFirstCharAtLeastThreeUtf8Bytes(thisDWord))
{
TryConsumeMultipleTwoByteSequences:
// For certain text (Greek, Cyrillic, ...), 2-byte sequences tend to be clustered. We'll try transcoding them in
// a tight loop without falling back to the main loop.
if (IsSecondCharTwoUtf8Bytes(thisDWord))
{
// We have two runs of two bytes each.
if (outputBytesRemaining < 4)
{
goto ProcessOneCharFromCurrentDWordAndFinish; // running out of output buffer
}
Unsafe.WriteUnaligned<uint>(pOutputBuffer, ExtractTwoUtf8TwoByteSequencesFromTwoPackedUtf16Chars(thisDWord));
pInputBuffer += 2;
pOutputBuffer += 4;
outputBytesRemaining -= 4;
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessNextCharAndFinish; // Running out of data - go down slow path
}
else
{
// Optimization: If we read a long run of two-byte sequences, the next sequence is probably
// also two bytes. Check for that first before going back to the beginning of the loop.
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
if (IsFirstCharTwoUtf8Bytes(thisDWord))
{
// Validated we have a two-byte sequence coming up
goto TryConsumeMultipleTwoByteSequences;
}
// If we reached this point, the next sequence is something other than a valid
// two-byte sequence, so go back to the beginning of the loop.
goto AfterReadDWord;
}
}
if (outputBytesRemaining < 2)
{
goto OutputBufferTooSmall;
}
Unsafe.WriteUnaligned<ushort>(pOutputBuffer, (ushort)ExtractUtf8TwoByteSequenceFromFirstUtf16Char(thisDWord));
// The buffer contains a 2-byte sequence followed by 2 bytes that aren't a 2-byte sequence.
// Unlikely that a 3-byte sequence would follow a 2-byte sequence, so perhaps remaining
// char is ASCII?
if (IsSecondCharAscii(thisDWord))
{
if (outputBytesRemaining >= 3)
{
if (BitConverter.IsLittleEndian)
{
thisDWord >>= 16;
}
pOutputBuffer[2] = (byte)thisDWord;
pInputBuffer += 2;
pOutputBuffer += 3;
outputBytesRemaining -= 3;
continue; // go back to original bounds check and check for ASCII
}
else
{
pInputBuffer++;
pOutputBuffer += 2;
goto OutputBufferTooSmall;
}
}
else
{
pInputBuffer++;
pOutputBuffer += 2;
outputBytesRemaining -= 2;
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessNextCharAndFinish; // Running out of data - go down slow path
}
else
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
goto BeforeProcessThreeByteSequence; // we know the next byte isn't ASCII, and it's not the start of a 2-byte sequence (this was checked above)
}
}
}
// Check the 3-byte case.
BeforeProcessThreeByteSequence:
if (!IsFirstCharSurrogate(thisDWord))
{
// Optimization: A three-byte character could indicate CJK text, which makes it likely
// that the character following this one is also CJK. We'll perform the check now
// rather than jumping to the beginning of the main loop.
if (IsSecondCharAtLeastThreeUtf8Bytes(thisDWord))
{
if (!IsSecondCharSurrogate(thisDWord))
{
if (outputBytesRemaining < 6)
{
goto ConsumeSingleThreeByteRun; // not enough space - try consuming as much as we can
}
WriteTwoUtf16CharsAsTwoUtf8ThreeByteSequences(ref *pOutputBuffer, thisDWord);
pInputBuffer += 2;
pOutputBuffer += 6;
outputBytesRemaining -= 6;
// Try to remain in the 3-byte processing loop if at all possible.
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessNextCharAndFinish; // Running out of data - go down slow path
}
else
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
if (IsFirstCharAtLeastThreeUtf8Bytes(thisDWord))
{
goto BeforeProcessThreeByteSequence;
}
else
{
// Fall back to standard processing loop since we don't know how to optimize this.
goto AfterReadDWord;
}
}
}
}
ConsumeSingleThreeByteRun:
if (outputBytesRemaining < 3)
{
goto OutputBufferTooSmall;
}
WriteFirstUtf16CharAsUtf8ThreeByteSequence(ref *pOutputBuffer, thisDWord);
pInputBuffer++;
pOutputBuffer += 3;
outputBytesRemaining -= 3;
// Occasionally one-off ASCII characters like spaces, periods, or newlines will make their way
// in to the text. If this happens strip it off now before seeing if the next character
// consists of three code units.
if (IsSecondCharAscii(thisDWord))
{
if (outputBytesRemaining == 0)
{
goto OutputBufferTooSmall;
}
if (BitConverter.IsLittleEndian)
{
*pOutputBuffer = (byte)(thisDWord >> 16);
}
else
{
*pOutputBuffer = (byte)(thisDWord);
}
pInputBuffer++;
pOutputBuffer++;
outputBytesRemaining--;
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessNextCharAndFinish; // Running out of data - go down slow path
}
else
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
if (IsFirstCharAtLeastThreeUtf8Bytes(thisDWord))
{
goto BeforeProcessThreeByteSequence;
}
else
{
// Fall back to standard processing loop since we don't know how to optimize this.
goto AfterReadDWord;
}
}
}
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessNextCharAndFinish; // Running out of data - go down slow path
}
else
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
goto AfterReadDWordSkipAllCharsAsciiCheck; // we just checked above that this value isn't ASCII
}
}
// Four byte sequence processing
if (IsWellFormedUtf16SurrogatePair(thisDWord))
{
if (outputBytesRemaining < 4)
{
goto OutputBufferTooSmall;
}
Unsafe.WriteUnaligned<uint>(pOutputBuffer, ExtractFourUtf8BytesFromSurrogatePair(thisDWord));
pInputBuffer += 2;
pOutputBuffer += 4;
outputBytesRemaining -= 4;
continue; // go back to beginning of loop for processing
}
goto Error; // an ill-formed surrogate sequence: high not followed by low, or low not preceded by high
} while (pInputBuffer <= pFinalPosWhereCanReadDWordFromInputBuffer);
ProcessNextCharAndFinish:
inputLength = (int)(pFinalPosWhereCanReadDWordFromInputBuffer - pInputBuffer) + CharsPerDWord;
ProcessInputOfLessThanDWordSize:
Debug.Assert(inputLength < CharsPerDWord);
if (inputLength == 0)
{
goto InputBufferFullyConsumed;
}
uint thisChar = *pInputBuffer;
goto ProcessFinalChar;
ProcessOneCharFromCurrentDWordAndFinish:
if (BitConverter.IsLittleEndian)
{
thisChar = thisDWord & 0xFFFFu; // preserve only the first char
}
else
{
thisChar = thisDWord >> 16; // preserve only the first char
}
ProcessFinalChar:
{
if (thisChar <= 0x7Fu)
{
if (outputBytesRemaining == 0)
{
goto OutputBufferTooSmall; // we have no hope of writing anything to the output
}
// 1-byte (ASCII) case
*pOutputBuffer = (byte)thisChar;
pInputBuffer++;
pOutputBuffer++;
}
else if (thisChar < 0x0800u)
{
if (outputBytesRemaining < 2)
{
goto OutputBufferTooSmall; // we have no hope of writing anything to the output
}
// 2-byte case
pOutputBuffer[1] = (byte)((thisChar & 0x3Fu) | unchecked((uint)(sbyte)0x80)); // [ 10xxxxxx ]
pOutputBuffer[0] = (byte)((thisChar >> 6) | unchecked((uint)(sbyte)0xC0)); // [ 110yyyyy ]
pInputBuffer++;
pOutputBuffer += 2;
}
else if (!UnicodeUtility.IsSurrogateCodePoint(thisChar))
{
if (outputBytesRemaining < 3)
{
goto OutputBufferTooSmall; // we have no hope of writing anything to the output
}
// 3-byte case
pOutputBuffer[2] = (byte)((thisChar & 0x3Fu) | unchecked((uint)(sbyte)0x80)); // [ 10xxxxxx ]
pOutputBuffer[1] = (byte)(((thisChar >> 6) & 0x3Fu) | unchecked((uint)(sbyte)0x80)); // [ 10yyyyyy ]
pOutputBuffer[0] = (byte)((thisChar >> 12) | unchecked((uint)(sbyte)0xE0)); // [ 1110zzzz ]
pInputBuffer++;
pOutputBuffer += 3;
}
else if (thisChar <= 0xDBFFu)
{
// UTF-16 high surrogate code point with no trailing data, report incomplete input buffer
goto InputBufferTooSmall;
}
else
{
// UTF-16 low surrogate code point with no leading data, report error
goto Error;
}
}
// There are two ways we can end up here. Either we were running low on input data,
// or we were running low on space in the destination buffer. If we're running low on
// input data (label targets ProcessInputOfLessThanDWordSize and ProcessNextCharAndFinish),
// then the inputLength value is guaranteed to be between 0 and 1, and we should return Done.
// If we're running low on destination buffer space (label target ProcessOneCharFromCurrentDWordAndFinish),
// then we didn't modify inputLength since entering the main loop, which means it should
// still have a value of >= 2. So checking the value of inputLength is all we need to do to determine
// which of the two scenarios we're in.
if (inputLength > 1)
{
goto OutputBufferTooSmall;
}
InputBufferFullyConsumed:
OperationStatus retVal = OperationStatus.Done;
goto ReturnCommon;
InputBufferTooSmall:
retVal = OperationStatus.NeedMoreData;
goto ReturnCommon;
OutputBufferTooSmall:
retVal = OperationStatus.DestinationTooSmall;
goto ReturnCommon;
Error:
retVal = OperationStatus.InvalidData;
goto ReturnCommon;
ReturnCommon:
pInputBufferRemaining = pInputBuffer;
pOutputBufferRemaining = pOutputBuffer;
return retVal;
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Buffers;
using System.Diagnostics;
using System.Numerics;
using System.Runtime.CompilerServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
using System.Runtime.Intrinsics.X86;
namespace System.Text.Unicode
{
internal static unsafe partial class Utf8Utility
{
// On method return, pInputBufferRemaining and pOutputBufferRemaining will both point to where
// the next byte would have been consumed from / the next char would have been written to.
// inputLength in bytes, outputCharsRemaining in chars.
public static OperationStatus TranscodeToUtf16(byte* pInputBuffer, int inputLength, char* pOutputBuffer, int outputCharsRemaining, out byte* pInputBufferRemaining, out char* pOutputBufferRemaining)
{
Debug.Assert(inputLength >= 0, "Input length must not be negative.");
Debug.Assert(pInputBuffer != null || inputLength == 0, "Input length must be zero if input buffer pointer is null.");
Debug.Assert(outputCharsRemaining >= 0, "Destination length must not be negative.");
Debug.Assert(pOutputBuffer != null || outputCharsRemaining == 0, "Destination length must be zero if destination buffer pointer is null.");
// First, try vectorized conversion.
{
nuint numElementsConverted = ASCIIUtility.WidenAsciiToUtf16(pInputBuffer, pOutputBuffer, (uint)Math.Min(inputLength, outputCharsRemaining));
pInputBuffer += numElementsConverted;
pOutputBuffer += numElementsConverted;
// Quick check - did we just end up consuming the entire input buffer?
// If so, short-circuit the remainder of the method.
if ((int)numElementsConverted == inputLength)
{
pInputBufferRemaining = pInputBuffer;
pOutputBufferRemaining = pOutputBuffer;
return OperationStatus.Done;
}
inputLength -= (int)numElementsConverted;
outputCharsRemaining -= (int)numElementsConverted;
}
if (inputLength < sizeof(uint))
{
goto ProcessInputOfLessThanDWordSize;
}
byte* pFinalPosWhereCanReadDWordFromInputBuffer = pInputBuffer + (uint)inputLength - 4;
// Begin the main loop.
#if DEBUG
byte* pLastBufferPosProcessed = null; // used for invariant checking in debug builds
#endif
Debug.Assert(pInputBuffer <= pFinalPosWhereCanReadDWordFromInputBuffer);
do
{
// Read 32 bits at a time. This is enough to hold any possible UTF8-encoded scalar.
uint thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
AfterReadDWord:
#if DEBUG
Debug.Assert(pLastBufferPosProcessed < pInputBuffer, "Algorithm should've made forward progress since last read.");
pLastBufferPosProcessed = pInputBuffer;
#endif
// First, check for the common case of all-ASCII bytes.
if (ASCIIUtility.AllBytesInUInt32AreAscii(thisDWord))
{
// We read an all-ASCII sequence.
if (outputCharsRemaining < sizeof(uint))
{
goto ProcessRemainingBytesSlow; // running out of space, but may be able to write some data
}
ASCIIUtility.WidenFourAsciiBytesToUtf16AndWriteToBuffer(ref *pOutputBuffer, thisDWord);
pInputBuffer += 4;
pOutputBuffer += 4;
outputCharsRemaining -= 4;
// If we saw a sequence of all ASCII, there's a good chance a significant amount of following data is also ASCII.
// Below is basically unrolled loops with poor man's vectorization.
uint remainingInputBytes = (uint)(void*)Unsafe.ByteOffset(ref *pInputBuffer, ref *pFinalPosWhereCanReadDWordFromInputBuffer) + 4;
uint maxIters = Math.Min(remainingInputBytes, (uint)outputCharsRemaining) / (2 * sizeof(uint));
uint secondDWord;
int i;
for (i = 0; (uint)i < maxIters; i++)
{
// Reading two DWORDs in parallel benchmarked faster than reading a single QWORD.
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
secondDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer + sizeof(uint));
if (!ASCIIUtility.AllBytesInUInt32AreAscii(thisDWord | secondDWord))
{
goto LoopTerminatedEarlyDueToNonAsciiData;
}
pInputBuffer += 8;
ASCIIUtility.WidenFourAsciiBytesToUtf16AndWriteToBuffer(ref pOutputBuffer[0], thisDWord);
ASCIIUtility.WidenFourAsciiBytesToUtf16AndWriteToBuffer(ref pOutputBuffer[4], secondDWord);
pOutputBuffer += 8;
}
outputCharsRemaining -= 8 * i;
continue; // need to perform a bounds check because we might be running out of data
LoopTerminatedEarlyDueToNonAsciiData:
if (ASCIIUtility.AllBytesInUInt32AreAscii(thisDWord))
{
// The first DWORD contained all-ASCII bytes, so expand it.
ASCIIUtility.WidenFourAsciiBytesToUtf16AndWriteToBuffer(ref *pOutputBuffer, thisDWord);
// continue the outer loop from the second DWORD
Debug.Assert(!ASCIIUtility.AllBytesInUInt32AreAscii(secondDWord));
thisDWord = secondDWord;
pInputBuffer += 4;
pOutputBuffer += 4;
outputCharsRemaining -= 4;
}
outputCharsRemaining -= 8 * i;
// We know that there's *at least* one DWORD of data remaining in the buffer.
// We also know that it's not all-ASCII. We can skip the logic at the beginning of the main loop.
goto AfterReadDWordSkipAllBytesAsciiCheck;
}
AfterReadDWordSkipAllBytesAsciiCheck:
Debug.Assert(!ASCIIUtility.AllBytesInUInt32AreAscii(thisDWord)); // this should have been handled earlier
// Next, try stripping off ASCII bytes one at a time.
// We only handle up to three ASCII bytes here since we handled the four ASCII byte case above.
if (UInt32FirstByteIsAscii(thisDWord))
{
if (outputCharsRemaining >= 3)
{
// Fast-track: we don't need to check the destination length for subsequent
// ASCII bytes since we know we can write them all now.
uint thisDWordLittleEndian = ToLittleEndian(thisDWord);
nuint adjustment = 1;
pOutputBuffer[0] = (char)(byte)thisDWordLittleEndian;
if (UInt32SecondByteIsAscii(thisDWord))
{
adjustment++;
thisDWordLittleEndian >>= 8;
pOutputBuffer[1] = (char)(byte)thisDWordLittleEndian;
if (UInt32ThirdByteIsAscii(thisDWord))
{
adjustment++;
thisDWordLittleEndian >>= 8;
pOutputBuffer[2] = (char)(byte)thisDWordLittleEndian;
}
}
pInputBuffer += adjustment;
pOutputBuffer += adjustment;
outputCharsRemaining -= (int)adjustment;
}
else
{
// Slow-track: we need to make sure each individual write has enough
// of a buffer so that we don't overrun the destination.
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall;
}
uint thisDWordLittleEndian = ToLittleEndian(thisDWord);
pInputBuffer++;
*pOutputBuffer++ = (char)(byte)thisDWordLittleEndian;
outputCharsRemaining--;
if (UInt32SecondByteIsAscii(thisDWord))
{
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall;
}
pInputBuffer++;
thisDWordLittleEndian >>= 8;
*pOutputBuffer++ = (char)(byte)thisDWordLittleEndian;
// We can perform a small optimization here. We know at this point that
// the output buffer is fully consumed (we read two ASCII bytes and wrote
// two ASCII chars, and we checked earlier that the destination buffer
// can't store a third byte). If the next byte is ASCII, we can jump straight
// to the return statement since the end-of-method logic only relies on the
// destination buffer pointer -- NOT the output chars remaining count -- being
// correct. If the next byte is not ASCII, we'll need to continue with the
// rest of the main loop, but we can set the buffer length directly to zero
// rather than decrementing it from 1 to 0.
Debug.Assert(outputCharsRemaining == 1);
if (UInt32ThirdByteIsAscii(thisDWord))
{
goto OutputBufferTooSmall;
}
else
{
outputCharsRemaining = 0;
}
}
}
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessRemainingBytesSlow; // input buffer doesn't contain enough data to read a DWORD
}
else
{
// The input buffer at the current offset contains a non-ASCII byte.
// Read an entire DWORD and fall through to multi-byte consumption logic.
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
}
}
BeforeProcessTwoByteSequence:
// At this point, we know we're working with a multi-byte code unit,
// but we haven't yet validated it.
// The masks and comparands are derived from the Unicode Standard, Table 3-6.
// Additionally, we need to check for valid byte sequences per Table 3-7.
// Check the 2-byte case.
if (UInt32BeginsWithUtf8TwoByteMask(thisDWord))
{
// Per Table 3-7, valid sequences are:
// [ C2..DF ] [ 80..BF ]
if (UInt32BeginsWithOverlongUtf8TwoByteSequence(thisDWord))
{
goto Error;
}
ProcessTwoByteSequenceSkipOverlongFormCheck:
// Optimization: If this is a two-byte-per-character language like Cyrillic or Hebrew,
// there's a good chance that if we see one two-byte run then there's another two-byte
// run immediately after. Let's check that now.
// On little-endian platforms, we can check for the two-byte UTF8 mask *and* validate that
// the value isn't overlong using a single comparison. On big-endian platforms, we'll need
// to validate the mask and validate that the sequence isn't overlong as two separate comparisons.
if ((BitConverter.IsLittleEndian && UInt32EndsWithValidUtf8TwoByteSequenceLittleEndian(thisDWord))
|| (!BitConverter.IsLittleEndian && (UInt32EndsWithUtf8TwoByteMask(thisDWord) && !UInt32EndsWithOverlongUtf8TwoByteSequence(thisDWord))))
{
// We have two runs of two bytes each.
if (outputCharsRemaining < 2)
{
goto ProcessRemainingBytesSlow; // running out of output buffer
}
Unsafe.WriteUnaligned<uint>(pOutputBuffer, ExtractTwoCharsPackedFromTwoAdjacentTwoByteSequences(thisDWord));
pInputBuffer += 4;
pOutputBuffer += 2;
outputCharsRemaining -= 2;
if (pInputBuffer <= pFinalPosWhereCanReadDWordFromInputBuffer)
{
// Optimization: If we read a long run of two-byte sequences, the next sequence is probably
// also two bytes. Check for that first before going back to the beginning of the loop.
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
if (BitConverter.IsLittleEndian)
{
if (UInt32BeginsWithValidUtf8TwoByteSequenceLittleEndian(thisDWord))
{
// The next sequence is a valid two-byte sequence.
goto ProcessTwoByteSequenceSkipOverlongFormCheck;
}
}
else
{
if (UInt32BeginsWithUtf8TwoByteMask(thisDWord))
{
if (UInt32BeginsWithOverlongUtf8TwoByteSequence(thisDWord))
{
goto Error; // The next sequence purports to be a 2-byte sequence but is overlong.
}
goto ProcessTwoByteSequenceSkipOverlongFormCheck;
}
}
// If we reached this point, the next sequence is something other than a valid
// two-byte sequence, so go back to the beginning of the loop.
goto AfterReadDWord;
}
else
{
goto ProcessRemainingBytesSlow; // Running out of data - go down slow path
}
}
// The buffer contains a 2-byte sequence followed by 2 bytes that aren't a 2-byte sequence.
// Unlikely that a 3-byte sequence would follow a 2-byte sequence, so perhaps remaining
// bytes are ASCII?
uint charToWrite = ExtractCharFromFirstTwoByteSequence(thisDWord); // optimistically compute this now, but don't store until we know dest is large enough
if (UInt32ThirdByteIsAscii(thisDWord))
{
if (UInt32FourthByteIsAscii(thisDWord))
{
if (outputCharsRemaining < 3)
{
goto ProcessRemainingBytesSlow; // running out of output buffer
}
pOutputBuffer[0] = (char)charToWrite;
if (BitConverter.IsLittleEndian)
{
thisDWord >>= 16;
pOutputBuffer[1] = (char)(byte)thisDWord;
thisDWord >>= 8;
pOutputBuffer[2] = (char)thisDWord;
}
else
{
pOutputBuffer[2] = (char)(byte)thisDWord;
pOutputBuffer[1] = (char)(byte)(thisDWord >> 8);
}
pInputBuffer += 4;
pOutputBuffer += 3;
outputCharsRemaining -= 3;
continue; // go back to original bounds check and check for ASCII
}
else
{
if (outputCharsRemaining < 2)
{
goto ProcessRemainingBytesSlow; // running out of output buffer
}
pOutputBuffer[0] = (char)charToWrite;
pOutputBuffer[1] = (char)(byte)(thisDWord >> (BitConverter.IsLittleEndian ? 16 : 8));
pInputBuffer += 3;
pOutputBuffer += 2;
outputCharsRemaining -= 2;
// A two-byte sequence followed by an ASCII byte followed by a non-ASCII byte.
// Read in the next DWORD and jump directly to the start of the multi-byte processing block.
if (pFinalPosWhereCanReadDWordFromInputBuffer < pInputBuffer)
{
goto ProcessRemainingBytesSlow; // Running out of data - go down slow path
}
else
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
goto BeforeProcessTwoByteSequence;
}
}
}
else
{
if (outputCharsRemaining == 0)
{
goto ProcessRemainingBytesSlow; // running out of output buffer
}
pOutputBuffer[0] = (char)charToWrite;
pInputBuffer += 2;
pOutputBuffer++;
outputCharsRemaining--;
if (pFinalPosWhereCanReadDWordFromInputBuffer < pInputBuffer)
{
goto ProcessRemainingBytesSlow; // Running out of data - go down slow path
}
else
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
goto BeforeProcessThreeByteSequence; // we know the next byte isn't ASCII, and it's not the start of a 2-byte sequence (this was checked above)
}
}
}
// Check the 3-byte case.
BeforeProcessThreeByteSequence:
if (UInt32BeginsWithUtf8ThreeByteMask(thisDWord))
{
ProcessThreeByteSequenceWithCheck:
// We need to check for overlong or surrogate three-byte sequences.
//
// Per Table 3-7, valid sequences are:
// [ E0 ] [ A0..BF ] [ 80..BF ]
// [ E1..EC ] [ 80..BF ] [ 80..BF ]
// [ ED ] [ 80..9F ] [ 80..BF ]
// [ EE..EF ] [ 80..BF ] [ 80..BF ]
//
// Big-endian examples of using the above validation table:
// E0A0 = 1110 0000 1010 0000 => invalid (overlong ) patterns are 1110 0000 100# ####
// ED9F = 1110 1101 1001 1111 => invalid (surrogate) patterns are 1110 1101 101# ####
// If using the bitmask ......................................... 0000 1111 0010 0000 (=0F20),
// Then invalid (overlong) patterns match the comparand ......... 0000 0000 0000 0000 (=0000),
// And invalid (surrogate) patterns match the comparand ......... 0000 1101 0010 0000 (=0D20).
if (BitConverter.IsLittleEndian)
{
// The "overlong or surrogate" check can be implemented using a single jump, but there's
// some overhead to moving the bits into the correct locations in order to perform the
// correct comparison, and in practice the processor's branch prediction capability is
// good enough that we shouldn't bother. So we'll use two jumps instead.
// Can't extract this check into its own helper method because JITter produces suboptimal
// assembly, even with aggressive inlining.
// Code below becomes 5 instructions: test, jz, lea, test, jz
if (((thisDWord & 0x0000_200Fu) == 0) || (((thisDWord - 0x0000_200Du) & 0x0000_200Fu) == 0))
{
goto Error; // overlong or surrogate
}
}
else
{
if (((thisDWord & 0x0F20_0000u) == 0) || (((thisDWord - 0x0D20_0000u) & 0x0F20_0000u) == 0))
{
goto Error; // overlong or surrogate
}
}
// At this point, we know the incoming scalar is well-formed.
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall; // not enough space in the destination buffer to write
}
// As an optimization, on compatible platforms check if a second three-byte sequence immediately
// follows the one we just read, and if so extract them together.
if (BitConverter.IsLittleEndian)
{
// First, check that the leftover byte from the original DWORD is in the range [ E0..EF ], which
// would indicate the potential start of a second three-byte sequence.
if (((thisDWord - 0xE000_0000u) & 0xF000_0000u) == 0)
{
// The const '3' below is correct because pFinalPosWhereCanReadDWordFromInputBuffer represents
// the final place where we can safely perform a DWORD read, and we want to probe whether it's
// safe to read a DWORD beginning at address &pInputBuffer[3].
if (outputCharsRemaining > 1 && (nint)(void*)Unsafe.ByteOffset(ref *pInputBuffer, ref *pFinalPosWhereCanReadDWordFromInputBuffer) >= 3)
{
// We're going to attempt to read a second 3-byte sequence and write them both out one after the other.
// We need to check the continuation bit mask on the remaining two bytes (and we may as well check the leading
// byte mask again since it's free), then perform overlong + surrogate checks. If the overlong or surrogate
// checks fail, we'll fall through to the remainder of the logic which will transcode the original valid
// 3-byte UTF-8 sequence we read; and on the next iteration of the loop the validation routine will run again,
// fail, and redirect control flow to the error handling logic at the very end of this method.
uint secondDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer + 3);
if (UInt32BeginsWithUtf8ThreeByteMask(secondDWord)
&& ((secondDWord & 0x0000_200Fu) != 0)
&& (((secondDWord - 0x0000_200Du) & 0x0000_200Fu) != 0))
{
pOutputBuffer[0] = (char)ExtractCharFromFirstThreeByteSequence(thisDWord);
pOutputBuffer[1] = (char)ExtractCharFromFirstThreeByteSequence(secondDWord);
pInputBuffer += 6;
pOutputBuffer += 2;
outputCharsRemaining -= 2;
// Drain any ASCII data following the second three-byte sequence.
goto CheckForAsciiByteAfterThreeByteSequence;
}
}
}
}
// Couldn't extract 2x three-byte sequences together, just do this one by itself.
*pOutputBuffer = (char)ExtractCharFromFirstThreeByteSequence(thisDWord);
pInputBuffer += 3;
pOutputBuffer++;
outputCharsRemaining--;
CheckForAsciiByteAfterThreeByteSequence:
// Occasionally one-off ASCII characters like spaces, periods, or newlines will make their way
// in to the text. If this happens strip it off now before seeing if the next character
// consists of three code units.
if (UInt32FourthByteIsAscii(thisDWord))
{
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall;
}
if (BitConverter.IsLittleEndian)
{
*pOutputBuffer = (char)(thisDWord >> 24);
}
else
{
*pOutputBuffer = (char)(byte)thisDWord;
}
pInputBuffer++;
pOutputBuffer++;
outputCharsRemaining--;
}
if (pInputBuffer <= pFinalPosWhereCanReadDWordFromInputBuffer)
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
// Optimization: A three-byte character could indicate CJK text, which makes it likely
// that the character following this one is also CJK. We'll check for a three-byte sequence
// marker now and jump directly to three-byte sequence processing if we see one, skipping
// all of the logic at the beginning of the loop.
if (UInt32BeginsWithUtf8ThreeByteMask(thisDWord))
{
goto ProcessThreeByteSequenceWithCheck; // found a three-byte sequence marker; validate and consume
}
else
{
goto AfterReadDWord; // probably ASCII punctuation or whitespace
}
}
else
{
goto ProcessRemainingBytesSlow; // Running out of data - go down slow path
}
}
// Assume the 4-byte case, but we need to validate.
{
// We need to check for overlong or invalid (over U+10FFFF) four-byte sequences.
//
// Per Table 3-7, valid sequences are:
// [ F0 ] [ 90..BF ] [ 80..BF ] [ 80..BF ]
// [ F1..F3 ] [ 80..BF ] [ 80..BF ] [ 80..BF ]
// [ F4 ] [ 80..8F ] [ 80..BF ] [ 80..BF ]
if (!UInt32BeginsWithUtf8FourByteMask(thisDWord))
{
goto Error;
}
// Now check for overlong / out-of-range sequences.
if (BitConverter.IsLittleEndian)
{
// The DWORD we read is [ 10xxxxxx 10yyyyyy 10zzzzzz 11110www ].
// We want to get the 'w' byte in front of the 'z' byte so that we can perform
// a single range comparison. We'll take advantage of the fact that the JITter
// can detect a ROR / ROL operation, then we'll just zero out the bytes that
// aren't involved in the range check.
uint toCheck = thisDWord & 0x0000_FFFFu;
// At this point, toCheck = [ 00000000 00000000 10zzzzzz 11110www ].
toCheck = BitOperations.RotateRight(toCheck, 8);
// At this point, toCheck = [ 11110www 00000000 00000000 10zzzzzz ].
if (!UnicodeUtility.IsInRangeInclusive(toCheck, 0xF000_0090u, 0xF400_008Fu))
{
goto Error;
}
}
else
{
if (!UnicodeUtility.IsInRangeInclusive(thisDWord, 0xF090_0000u, 0xF48F_FFFFu))
{
goto Error;
}
}
// Validation complete.
if (outputCharsRemaining < 2)
{
// There's no point to falling back to the "drain the input buffer" logic, since we know
// we can't write anything to the destination. So we'll just exit immediately.
goto OutputBufferTooSmall;
}
Unsafe.WriteUnaligned<uint>(pOutputBuffer, ExtractCharsFromFourByteSequence(thisDWord));
pInputBuffer += 4;
pOutputBuffer += 2;
outputCharsRemaining -= 2;
continue; // go back to beginning of loop for processing
}
} while (pInputBuffer <= pFinalPosWhereCanReadDWordFromInputBuffer);
ProcessRemainingBytesSlow:
inputLength = (int)(void*)Unsafe.ByteOffset(ref *pInputBuffer, ref *pFinalPosWhereCanReadDWordFromInputBuffer) + 4;
ProcessInputOfLessThanDWordSize:
while (inputLength > 0)
{
uint firstByte = pInputBuffer[0];
if (firstByte <= 0x7Fu)
{
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall; // we have no hope of writing anything to the output
}
// 1-byte (ASCII) case
*pOutputBuffer = (char)firstByte;
pInputBuffer++;
pOutputBuffer++;
inputLength--;
outputCharsRemaining--;
continue;
}
// Potentially the start of a multi-byte sequence?
firstByte -= 0xC2u;
if ((byte)firstByte <= (0xDFu - 0xC2u))
{
// Potentially a 2-byte sequence?
if (inputLength < 2)
{
goto InputBufferTooSmall; // out of data
}
uint secondByte = pInputBuffer[1];
if (!IsLowByteUtf8ContinuationByte(secondByte))
{
goto Error; // 2-byte marker not followed by continuation byte
}
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall; // we have no hope of writing anything to the output
}
uint asChar = (firstByte << 6) + secondByte + ((0xC2u - 0xC0u) << 6) - 0x80u; // remove UTF-8 markers from scalar
*pOutputBuffer = (char)asChar;
pInputBuffer += 2;
pOutputBuffer++;
inputLength -= 2;
outputCharsRemaining--;
continue;
}
else if ((byte)firstByte <= (0xEFu - 0xC2u))
{
// Potentially a 3-byte sequence?
if (inputLength >= 3)
{
uint secondByte = pInputBuffer[1];
uint thirdByte = pInputBuffer[2];
if (!IsLowByteUtf8ContinuationByte(secondByte) || !IsLowByteUtf8ContinuationByte(thirdByte))
{
goto Error; // 3-byte marker not followed by 2 continuation bytes
}
// To speed up the validation logic below, we're not going to remove the UTF-8 markers from the partial char just yet.
// We account for this in the comparisons below.
uint partialChar = (firstByte << 12) + (secondByte << 6);
if (partialChar < ((0xE0u - 0xC2u) << 12) + (0xA0u << 6))
{
goto Error; // this is an overlong encoding; fail
}
partialChar -= ((0xEDu - 0xC2u) << 12) + (0xA0u << 6); // if partialChar = 0, we're at beginning of UTF-16 surrogate code point range
if (partialChar < 0x0800u /* number of code points in UTF-16 surrogate code point range */)
{
goto Error; // attempted to encode a UTF-16 surrogate code point; fail
}
if (outputCharsRemaining == 0)
{
goto OutputBufferTooSmall; // we have no hope of writing anything to the output
}
// Now restore the full scalar value.
partialChar += thirdByte;
partialChar += 0xD800; // undo "move to beginning of UTF-16 surrogate code point range" from earlier, fold it with later adds
partialChar -= 0x80u; // remove third byte continuation marker
*pOutputBuffer = (char)partialChar;
pInputBuffer += 3;
pOutputBuffer++;
inputLength -= 3;
outputCharsRemaining--;
continue;
}
else if (inputLength >= 2)
{
uint secondByte = pInputBuffer[1];
if (!IsLowByteUtf8ContinuationByte(secondByte))
{
goto Error; // 3-byte marker not followed by continuation byte
}
// We can't build up the entire scalar value now, but we can check for overlong / surrogate representations
// from just the first two bytes.
uint partialChar = (firstByte << 6) + secondByte; // don't worry about fixing up the UTF-8 markers; we'll account for it in the below comparison
if (partialChar < ((0xE0u - 0xC2u) << 6) + 0xA0u)
{
goto Error; // failed overlong check
}
if (UnicodeUtility.IsInRangeInclusive(partialChar, ((0xEDu - 0xC2u) << 6) + 0xA0u, ((0xEEu - 0xC2u) << 6) + 0x7Fu))
{
goto Error; // failed surrogate check
}
}
goto InputBufferTooSmall; // out of data
}
else if ((byte)firstByte <= (0xF4u - 0xC2u))
{
// Potentially a 4-byte sequence?
if (inputLength < 2)
{
goto InputBufferTooSmall; // ran out of data
}
uint nextByte = pInputBuffer[1];
if (!IsLowByteUtf8ContinuationByte(nextByte))
{
goto Error; // 4-byte marker not followed by a continuation byte
}
uint asPartialChar = (firstByte << 6) + nextByte; // don't worry about fixing up the UTF-8 markers; we'll account for it in the below comparison
if (!UnicodeUtility.IsInRangeInclusive(asPartialChar, ((0xF0u - 0xC2u) << 6) + 0x90u, ((0xF4u - 0xC2u) << 6) + 0x8Fu))
{
goto Error; // failed overlong / out-of-range check
}
if (inputLength < 3)
{
goto InputBufferTooSmall; // ran out of data
}
if (!IsLowByteUtf8ContinuationByte(pInputBuffer[2]))
{
goto Error; // third byte in 4-byte sequence not a continuation byte
}
if (inputLength < 4)
{
goto InputBufferTooSmall; // ran out of data
}
if (!IsLowByteUtf8ContinuationByte(pInputBuffer[3]))
{
goto Error; // fourth byte in 4-byte sequence not a continuation byte
}
// If we read a valid astral scalar value, the only way we could've fallen down this code path
// is that we didn't have enough output buffer to write the result.
goto OutputBufferTooSmall;
}
else
{
goto Error; // didn't begin with [ C2 .. F4 ], so invalid multi-byte sequence header byte
}
}
OperationStatus retVal = OperationStatus.Done;
goto ReturnCommon;
InputBufferTooSmall:
retVal = OperationStatus.NeedMoreData;
goto ReturnCommon;
OutputBufferTooSmall:
retVal = OperationStatus.DestinationTooSmall;
goto ReturnCommon;
Error:
retVal = OperationStatus.InvalidData;
goto ReturnCommon;
ReturnCommon:
pInputBufferRemaining = pInputBuffer;
pOutputBufferRemaining = pOutputBuffer;
return retVal;
}
// On method return, pInputBufferRemaining and pOutputBufferRemaining will both point to where
// the next char would have been consumed from / the next byte would have been written to.
// inputLength in chars, outputBytesRemaining in bytes.
public static OperationStatus TranscodeToUtf8(char* pInputBuffer, int inputLength, byte* pOutputBuffer, int outputBytesRemaining, out char* pInputBufferRemaining, out byte* pOutputBufferRemaining)
{
const int CharsPerDWord = sizeof(uint) / sizeof(char);
Debug.Assert(inputLength >= 0, "Input length must not be negative.");
Debug.Assert(pInputBuffer != null || inputLength == 0, "Input length must be zero if input buffer pointer is null.");
Debug.Assert(outputBytesRemaining >= 0, "Destination length must not be negative.");
Debug.Assert(pOutputBuffer != null || outputBytesRemaining == 0, "Destination length must be zero if destination buffer pointer is null.");
// First, try vectorized conversion.
{
nuint numElementsConverted = ASCIIUtility.NarrowUtf16ToAscii(pInputBuffer, pOutputBuffer, (uint)Math.Min(inputLength, outputBytesRemaining));
pInputBuffer += numElementsConverted;
pOutputBuffer += numElementsConverted;
// Quick check - did we just end up consuming the entire input buffer?
// If so, short-circuit the remainder of the method.
if ((int)numElementsConverted == inputLength)
{
pInputBufferRemaining = pInputBuffer;
pOutputBufferRemaining = pOutputBuffer;
return OperationStatus.Done;
}
inputLength -= (int)numElementsConverted;
outputBytesRemaining -= (int)numElementsConverted;
}
if (inputLength < CharsPerDWord)
{
goto ProcessInputOfLessThanDWordSize;
}
char* pFinalPosWhereCanReadDWordFromInputBuffer = pInputBuffer + (uint)inputLength - CharsPerDWord;
// We have paths for SSE4.1 vectorization inside the inner loop. Since the below
// vector is only used in those code paths, we leave it uninitialized if SSE4.1
// is not enabled.
Vector128<short> nonAsciiUtf16DataMask;
if (Sse41.X64.IsSupported || (AdvSimd.Arm64.IsSupported && BitConverter.IsLittleEndian))
{
nonAsciiUtf16DataMask = Vector128.Create(unchecked((short)0xFF80)); // mask of non-ASCII bits in a UTF-16 char
}
// Begin the main loop.
#if DEBUG
char* pLastBufferPosProcessed = null; // used for invariant checking in debug builds
#endif
uint thisDWord;
Debug.Assert(pInputBuffer <= pFinalPosWhereCanReadDWordFromInputBuffer);
do
{
// Read 32 bits at a time. This is enough to hold any possible UTF16-encoded scalar.
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
AfterReadDWord:
#if DEBUG
Debug.Assert(pLastBufferPosProcessed < pInputBuffer, "Algorithm should've made forward progress since last read.");
pLastBufferPosProcessed = pInputBuffer;
#endif
// First, check for the common case of all-ASCII chars.
if (Utf16Utility.AllCharsInUInt32AreAscii(thisDWord))
{
// We read an all-ASCII sequence (2 chars).
if (outputBytesRemaining < 2)
{
goto ProcessOneCharFromCurrentDWordAndFinish; // running out of space, but may be able to write some data
}
// The high WORD of the local declared below might be populated with garbage
// as a result of our shifts below, but that's ok since we're only going to
// write the low WORD.
//
// [ 00000000 0bbbbbbb | 00000000 0aaaaaaa ] -> [ 00000000 0bbbbbbb | 0bbbbbbb 0aaaaaaa ]
// (Same logic works regardless of endianness.)
uint valueToWrite = thisDWord | (thisDWord >> 8);
Unsafe.WriteUnaligned<ushort>(pOutputBuffer, (ushort)valueToWrite);
pInputBuffer += 2;
pOutputBuffer += 2;
outputBytesRemaining -= 2;
// If we saw a sequence of all ASCII, there's a good chance a significant amount of following data is also ASCII.
// Below is basically unrolled loops with poor man's vectorization.
uint inputCharsRemaining = (uint)(pFinalPosWhereCanReadDWordFromInputBuffer - pInputBuffer) + 2;
uint minElementsRemaining = (uint)Math.Min(inputCharsRemaining, outputBytesRemaining);
if (Sse41.X64.IsSupported || (AdvSimd.Arm64.IsSupported && BitConverter.IsLittleEndian))
{
// Try reading and writing 8 elements per iteration.
uint maxIters = minElementsRemaining / 8;
ulong possibleNonAsciiQWord;
int i;
Vector128<short> utf16Data;
for (i = 0; (uint)i < maxIters; i++)
{
// The linker won't trim out nonAsciiUtf16DataMask unless this is in the loop.
// Luckily, this is a nop and will be elided by the JIT
Unsafe.SkipInit(out nonAsciiUtf16DataMask);
utf16Data = Unsafe.ReadUnaligned<Vector128<short>>(pInputBuffer);
if (AdvSimd.IsSupported)
{
Vector128<short> isUtf16DataNonAscii = AdvSimd.CompareTest(utf16Data, nonAsciiUtf16DataMask);
bool hasNonAsciiDataInVector = AdvSimd.Arm64.MinPairwise(isUtf16DataNonAscii, isUtf16DataNonAscii).AsUInt64().ToScalar() != 0;
if (hasNonAsciiDataInVector)
{
goto LoopTerminatedDueToNonAsciiDataInVectorLocal;
}
Vector64<byte> lower = AdvSimd.ExtractNarrowingSaturateUnsignedLower(utf16Data);
AdvSimd.Store(pOutputBuffer, lower);
}
else
{
if (!Sse41.TestZ(utf16Data, nonAsciiUtf16DataMask))
{
goto LoopTerminatedDueToNonAsciiDataInVectorLocal;
}
// narrow and write
Sse2.StoreScalar((ulong*)pOutputBuffer /* unaligned */, Sse2.PackUnsignedSaturate(utf16Data, utf16Data).AsUInt64());
}
pInputBuffer += 8;
pOutputBuffer += 8;
}
outputBytesRemaining -= 8 * i;
// Can we perform one more iteration, but reading & writing 4 elements instead of 8?
if ((minElementsRemaining & 4) != 0)
{
possibleNonAsciiQWord = Unsafe.ReadUnaligned<ulong>(pInputBuffer);
if (!Utf16Utility.AllCharsInUInt64AreAscii(possibleNonAsciiQWord))
{
goto LoopTerminatedDueToNonAsciiDataInPossibleNonAsciiQWordLocal;
}
utf16Data = Vector128.CreateScalarUnsafe(possibleNonAsciiQWord).AsInt16();
if (AdvSimd.IsSupported)
{
Vector64<byte> lower = AdvSimd.ExtractNarrowingSaturateUnsignedLower(utf16Data);
AdvSimd.StoreSelectedScalar((uint*)pOutputBuffer, lower.AsUInt32(), 0);
}
else
{
Unsafe.WriteUnaligned<uint>(pOutputBuffer, Sse2.ConvertToUInt32(Sse2.PackUnsignedSaturate(utf16Data, utf16Data).AsUInt32()));
}
pInputBuffer += 4;
pOutputBuffer += 4;
outputBytesRemaining -= 4;
}
continue; // Go back to beginning of main loop, read data, check for ASCII
LoopTerminatedDueToNonAsciiDataInVectorLocal:
outputBytesRemaining -= 8 * i;
if (Sse2.X64.IsSupported)
{
possibleNonAsciiQWord = Sse2.X64.ConvertToUInt64(utf16Data.AsUInt64());
}
else
{
possibleNonAsciiQWord = utf16Data.AsUInt64().ToScalar();
}
// Temporarily set 'possibleNonAsciiQWord' to be the low 64 bits of the vector,
// then check whether it's all-ASCII. If so, narrow and write to the destination
// buffer. Since we know that either the high 64 bits or the low 64 bits of the
// vector contains non-ASCII data, by the end of the following block the
// 'possibleNonAsciiQWord' local is guaranteed to contain the non-ASCII segment.
if (Utf16Utility.AllCharsInUInt64AreAscii(possibleNonAsciiQWord)) // all chars in first QWORD are ASCII
{
if (AdvSimd.IsSupported)
{
Vector64<byte> lower = AdvSimd.ExtractNarrowingSaturateUnsignedLower(utf16Data);
AdvSimd.StoreSelectedScalar((uint*)pOutputBuffer, lower.AsUInt32(), 0);
}
else
{
Unsafe.WriteUnaligned<uint>(pOutputBuffer, Sse2.ConvertToUInt32(Sse2.PackUnsignedSaturate(utf16Data, utf16Data).AsUInt32()));
}
pInputBuffer += 4;
pOutputBuffer += 4;
outputBytesRemaining -= 4;
possibleNonAsciiQWord = utf16Data.AsUInt64().GetElement(1);
}
LoopTerminatedDueToNonAsciiDataInPossibleNonAsciiQWordLocal:
Debug.Assert(!Utf16Utility.AllCharsInUInt64AreAscii(possibleNonAsciiQWord)); // this condition should've been checked earlier
thisDWord = (uint)possibleNonAsciiQWord;
if (Utf16Utility.AllCharsInUInt32AreAscii(thisDWord))
{
// [ 00000000 0bbbbbbb | 00000000 0aaaaaaa ] -> [ 00000000 0bbbbbbb | 0bbbbbbb 0aaaaaaa ]
Unsafe.WriteUnaligned<ushort>(pOutputBuffer, (ushort)(thisDWord | (thisDWord >> 8)));
pInputBuffer += 2;
pOutputBuffer += 2;
outputBytesRemaining -= 2;
thisDWord = (uint)(possibleNonAsciiQWord >> 32);
}
goto AfterReadDWordSkipAllCharsAsciiCheck;
}
else
{
// Can't use SSE41 x64, so we'll only read and write 4 elements per iteration.
uint maxIters = minElementsRemaining / 4;
uint secondDWord;
int i;
for (i = 0; (uint)i < maxIters; i++)
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
secondDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer + 2);
if (!Utf16Utility.AllCharsInUInt32AreAscii(thisDWord | secondDWord))
{
goto LoopTerminatedDueToNonAsciiData;
}
// [ 00000000 0bbbbbbb | 00000000 0aaaaaaa ] -> [ 00000000 0bbbbbbb | 0bbbbbbb 0aaaaaaa ]
// (Same logic works regardless of endianness.)
Unsafe.WriteUnaligned<ushort>(pOutputBuffer, (ushort)(thisDWord | (thisDWord >> 8)));
Unsafe.WriteUnaligned<ushort>(pOutputBuffer + 2, (ushort)(secondDWord | (secondDWord >> 8)));
pInputBuffer += 4;
pOutputBuffer += 4;
}
outputBytesRemaining -= 4 * i;
continue; // Go back to beginning of main loop, read data, check for ASCII
LoopTerminatedDueToNonAsciiData:
outputBytesRemaining -= 4 * i;
// First, see if we can drain any ASCII data from the first DWORD.
if (Utf16Utility.AllCharsInUInt32AreAscii(thisDWord))
{
// [ 00000000 0bbbbbbb | 00000000 0aaaaaaa ] -> [ 00000000 0bbbbbbb | 0bbbbbbb 0aaaaaaa ]
// (Same logic works regardless of endianness.)
Unsafe.WriteUnaligned<ushort>(pOutputBuffer, (ushort)(thisDWord | (thisDWord >> 8)));
pInputBuffer += 2;
pOutputBuffer += 2;
outputBytesRemaining -= 2;
thisDWord = secondDWord;
}
goto AfterReadDWordSkipAllCharsAsciiCheck;
}
}
AfterReadDWordSkipAllCharsAsciiCheck:
Debug.Assert(!Utf16Utility.AllCharsInUInt32AreAscii(thisDWord)); // this should have been handled earlier
// Next, try stripping off the first ASCII char if it exists.
// We don't check for a second ASCII char since that should have been handled above.
if (IsFirstCharAscii(thisDWord))
{
if (outputBytesRemaining == 0)
{
goto OutputBufferTooSmall;
}
if (BitConverter.IsLittleEndian)
{
pOutputBuffer[0] = (byte)thisDWord; // extract [ ## ## 00 AA ]
}
else
{
pOutputBuffer[0] = (byte)(thisDWord >> 16); // extract [ 00 AA ## ## ]
}
pInputBuffer++;
pOutputBuffer++;
outputBytesRemaining--;
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessNextCharAndFinish; // input buffer doesn't contain enough data to read a DWORD
}
else
{
// The input buffer at the current offset contains a non-ASCII char.
// Read an entire DWORD and fall through to non-ASCII consumption logic.
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
}
}
// At this point, we know the first char in the buffer is non-ASCII, but we haven't yet validated it.
if (!IsFirstCharAtLeastThreeUtf8Bytes(thisDWord))
{
TryConsumeMultipleTwoByteSequences:
// For certain text (Greek, Cyrillic, ...), 2-byte sequences tend to be clustered. We'll try transcoding them in
// a tight loop without falling back to the main loop.
if (IsSecondCharTwoUtf8Bytes(thisDWord))
{
// We have two runs of two bytes each.
if (outputBytesRemaining < 4)
{
goto ProcessOneCharFromCurrentDWordAndFinish; // running out of output buffer
}
Unsafe.WriteUnaligned<uint>(pOutputBuffer, ExtractTwoUtf8TwoByteSequencesFromTwoPackedUtf16Chars(thisDWord));
pInputBuffer += 2;
pOutputBuffer += 4;
outputBytesRemaining -= 4;
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessNextCharAndFinish; // Running out of data - go down slow path
}
else
{
// Optimization: If we read a long run of two-byte sequences, the next sequence is probably
// also two bytes. Check for that first before going back to the beginning of the loop.
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
if (IsFirstCharTwoUtf8Bytes(thisDWord))
{
// Validated we have a two-byte sequence coming up
goto TryConsumeMultipleTwoByteSequences;
}
// If we reached this point, the next sequence is something other than a valid
// two-byte sequence, so go back to the beginning of the loop.
goto AfterReadDWord;
}
}
if (outputBytesRemaining < 2)
{
goto OutputBufferTooSmall;
}
Unsafe.WriteUnaligned<ushort>(pOutputBuffer, (ushort)ExtractUtf8TwoByteSequenceFromFirstUtf16Char(thisDWord));
// The buffer contains a 2-byte sequence followed by 2 bytes that aren't a 2-byte sequence.
// Unlikely that a 3-byte sequence would follow a 2-byte sequence, so perhaps remaining
// char is ASCII?
if (IsSecondCharAscii(thisDWord))
{
if (outputBytesRemaining >= 3)
{
if (BitConverter.IsLittleEndian)
{
thisDWord >>= 16;
}
pOutputBuffer[2] = (byte)thisDWord;
pInputBuffer += 2;
pOutputBuffer += 3;
outputBytesRemaining -= 3;
continue; // go back to original bounds check and check for ASCII
}
else
{
pInputBuffer++;
pOutputBuffer += 2;
goto OutputBufferTooSmall;
}
}
else
{
pInputBuffer++;
pOutputBuffer += 2;
outputBytesRemaining -= 2;
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessNextCharAndFinish; // Running out of data - go down slow path
}
else
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
goto BeforeProcessThreeByteSequence; // we know the next byte isn't ASCII, and it's not the start of a 2-byte sequence (this was checked above)
}
}
}
// Check the 3-byte case.
BeforeProcessThreeByteSequence:
if (!IsFirstCharSurrogate(thisDWord))
{
// Optimization: A three-byte character could indicate CJK text, which makes it likely
// that the character following this one is also CJK. We'll perform the check now
// rather than jumping to the beginning of the main loop.
if (IsSecondCharAtLeastThreeUtf8Bytes(thisDWord))
{
if (!IsSecondCharSurrogate(thisDWord))
{
if (outputBytesRemaining < 6)
{
goto ConsumeSingleThreeByteRun; // not enough space - try consuming as much as we can
}
WriteTwoUtf16CharsAsTwoUtf8ThreeByteSequences(ref *pOutputBuffer, thisDWord);
pInputBuffer += 2;
pOutputBuffer += 6;
outputBytesRemaining -= 6;
// Try to remain in the 3-byte processing loop if at all possible.
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessNextCharAndFinish; // Running out of data - go down slow path
}
else
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
if (IsFirstCharAtLeastThreeUtf8Bytes(thisDWord))
{
goto BeforeProcessThreeByteSequence;
}
else
{
// Fall back to standard processing loop since we don't know how to optimize this.
goto AfterReadDWord;
}
}
}
}
ConsumeSingleThreeByteRun:
if (outputBytesRemaining < 3)
{
goto OutputBufferTooSmall;
}
WriteFirstUtf16CharAsUtf8ThreeByteSequence(ref *pOutputBuffer, thisDWord);
pInputBuffer++;
pOutputBuffer += 3;
outputBytesRemaining -= 3;
// Occasionally one-off ASCII characters like spaces, periods, or newlines will make their way
// in to the text. If this happens strip it off now before seeing if the next character
// consists of three code units.
if (IsSecondCharAscii(thisDWord))
{
if (outputBytesRemaining == 0)
{
goto OutputBufferTooSmall;
}
if (BitConverter.IsLittleEndian)
{
*pOutputBuffer = (byte)(thisDWord >> 16);
}
else
{
*pOutputBuffer = (byte)(thisDWord);
}
pInputBuffer++;
pOutputBuffer++;
outputBytesRemaining--;
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessNextCharAndFinish; // Running out of data - go down slow path
}
else
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
if (IsFirstCharAtLeastThreeUtf8Bytes(thisDWord))
{
goto BeforeProcessThreeByteSequence;
}
else
{
// Fall back to standard processing loop since we don't know how to optimize this.
goto AfterReadDWord;
}
}
}
if (pInputBuffer > pFinalPosWhereCanReadDWordFromInputBuffer)
{
goto ProcessNextCharAndFinish; // Running out of data - go down slow path
}
else
{
thisDWord = Unsafe.ReadUnaligned<uint>(pInputBuffer);
goto AfterReadDWordSkipAllCharsAsciiCheck; // we just checked above that this value isn't ASCII
}
}
// Four byte sequence processing
if (IsWellFormedUtf16SurrogatePair(thisDWord))
{
if (outputBytesRemaining < 4)
{
goto OutputBufferTooSmall;
}
Unsafe.WriteUnaligned<uint>(pOutputBuffer, ExtractFourUtf8BytesFromSurrogatePair(thisDWord));
pInputBuffer += 2;
pOutputBuffer += 4;
outputBytesRemaining -= 4;
continue; // go back to beginning of loop for processing
}
goto Error; // an ill-formed surrogate sequence: high not followed by low, or low not preceded by high
} while (pInputBuffer <= pFinalPosWhereCanReadDWordFromInputBuffer);
ProcessNextCharAndFinish:
inputLength = (int)(pFinalPosWhereCanReadDWordFromInputBuffer - pInputBuffer) + CharsPerDWord;
ProcessInputOfLessThanDWordSize:
Debug.Assert(inputLength < CharsPerDWord);
if (inputLength == 0)
{
goto InputBufferFullyConsumed;
}
uint thisChar = *pInputBuffer;
goto ProcessFinalChar;
ProcessOneCharFromCurrentDWordAndFinish:
if (BitConverter.IsLittleEndian)
{
thisChar = thisDWord & 0xFFFFu; // preserve only the first char
}
else
{
thisChar = thisDWord >> 16; // preserve only the first char
}
ProcessFinalChar:
{
if (thisChar <= 0x7Fu)
{
if (outputBytesRemaining == 0)
{
goto OutputBufferTooSmall; // we have no hope of writing anything to the output
}
// 1-byte (ASCII) case
*pOutputBuffer = (byte)thisChar;
pInputBuffer++;
pOutputBuffer++;
}
else if (thisChar < 0x0800u)
{
if (outputBytesRemaining < 2)
{
goto OutputBufferTooSmall; // we have no hope of writing anything to the output
}
// 2-byte case
pOutputBuffer[1] = (byte)((thisChar & 0x3Fu) | unchecked((uint)(sbyte)0x80)); // [ 10xxxxxx ]
pOutputBuffer[0] = (byte)((thisChar >> 6) | unchecked((uint)(sbyte)0xC0)); // [ 110yyyyy ]
pInputBuffer++;
pOutputBuffer += 2;
}
else if (!UnicodeUtility.IsSurrogateCodePoint(thisChar))
{
if (outputBytesRemaining < 3)
{
goto OutputBufferTooSmall; // we have no hope of writing anything to the output
}
// 3-byte case
pOutputBuffer[2] = (byte)((thisChar & 0x3Fu) | unchecked((uint)(sbyte)0x80)); // [ 10xxxxxx ]
pOutputBuffer[1] = (byte)(((thisChar >> 6) & 0x3Fu) | unchecked((uint)(sbyte)0x80)); // [ 10yyyyyy ]
pOutputBuffer[0] = (byte)((thisChar >> 12) | unchecked((uint)(sbyte)0xE0)); // [ 1110zzzz ]
pInputBuffer++;
pOutputBuffer += 3;
}
else if (thisChar <= 0xDBFFu)
{
// UTF-16 high surrogate code point with no trailing data, report incomplete input buffer
goto InputBufferTooSmall;
}
else
{
// UTF-16 low surrogate code point with no leading data, report error
goto Error;
}
}
// There are two ways we can end up here. Either we were running low on input data,
// or we were running low on space in the destination buffer. If we're running low on
// input data (label targets ProcessInputOfLessThanDWordSize and ProcessNextCharAndFinish),
// then the inputLength value is guaranteed to be between 0 and 1, and we should return Done.
// If we're running low on destination buffer space (label target ProcessOneCharFromCurrentDWordAndFinish),
// then we didn't modify inputLength since entering the main loop, which means it should
// still have a value of >= 2. So checking the value of inputLength is all we need to do to determine
// which of the two scenarios we're in.
if (inputLength > 1)
{
goto OutputBufferTooSmall;
}
InputBufferFullyConsumed:
OperationStatus retVal = OperationStatus.Done;
goto ReturnCommon;
InputBufferTooSmall:
retVal = OperationStatus.NeedMoreData;
goto ReturnCommon;
OutputBufferTooSmall:
retVal = OperationStatus.DestinationTooSmall;
goto ReturnCommon;
Error:
retVal = OperationStatus.InvalidData;
goto ReturnCommon;
ReturnCommon:
pInputBufferRemaining = pInputBuffer;
pOutputBufferRemaining = pOutputBuffer;
return retVal;
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/General/Vector128/OnesComplement.Int32.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace JIT.HardwareIntrinsics.General
{
public static partial class Program
{
private static void OnesComplementInt32()
{
var test = new VectorUnaryOpTest__OnesComplementInt32();
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class VectorUnaryOpTest__OnesComplementInt32
{
private struct DataTable
{
private byte[] inArray1;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Int32[] inArray1, Int32[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Int32>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Int32>();
if ((alignment != 32 && alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Int32, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<Int32> _fld1;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref testStruct._fld1), ref Unsafe.As<Int32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
return testStruct;
}
public void RunStructFldScenario(VectorUnaryOpTest__OnesComplementInt32 testClass)
{
var result = Vector128.OnesComplement(_fld1);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Int32>>() / sizeof(Int32);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Int32>>() / sizeof(Int32);
private static Int32[] _data1 = new Int32[Op1ElementCount];
private static Vector128<Int32> _clsVar1;
private Vector128<Int32> _fld1;
private DataTable _dataTable;
static VectorUnaryOpTest__OnesComplementInt32()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _clsVar1), ref Unsafe.As<Int32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
}
public VectorUnaryOpTest__OnesComplementInt32()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _fld1), ref Unsafe.As<Int32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt32(); }
_dataTable = new DataTable(_data1, new Int32[RetElementCount], LargestVectorSize);
}
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Vector128.OnesComplement(
Unsafe.Read<Vector128<Int32>>(_dataTable.inArray1Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var method = typeof(Vector128).GetMethod(nameof(Vector128.OnesComplement), new Type[] {
typeof(Vector128<Int32>)
});
if (method is null)
{
method = typeof(Vector128).GetMethod(nameof(Vector128.OnesComplement), 1, new Type[] {
typeof(Vector128<>).MakeGenericType(Type.MakeGenericMethodParameter(0))
});
}
if (method.IsGenericMethodDefinition)
{
method = method.MakeGenericMethod(typeof(Int32));
}
var result = method.Invoke(null, new object[] {
Unsafe.Read<Vector128<Int32>>(_dataTable.inArray1Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Vector128.OnesComplement(
_clsVar1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<Int32>>(_dataTable.inArray1Ptr);
var result = Vector128.OnesComplement(op1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new VectorUnaryOpTest__OnesComplementInt32();
var result = Vector128.OnesComplement(test._fld1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Vector128.OnesComplement(_fld1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Vector128.OnesComplement(test._fld1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
private void ValidateResult(Vector128<Int32> op1, void* result, [CallerMemberName] string method = "")
{
Int32[] inArray1 = new Int32[Op1ElementCount];
Int32[] outArray = new Int32[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Int32, byte>(ref inArray1[0]), op1);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Int32>>());
ValidateResult(inArray1, outArray, method);
}
private void ValidateResult(void* op1, void* result, [CallerMemberName] string method = "")
{
Int32[] inArray1 = new Int32[Op1ElementCount];
Int32[] outArray = new Int32[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Int32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Int32>>());
ValidateResult(inArray1, outArray, method);
}
private void ValidateResult(Int32[] firstOp, Int32[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (result[0] != (int)(~firstOp[0]))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (result[i] != (int)(~firstOp[i]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Vector128)}.{nameof(Vector128.OnesComplement)}<Int32>(Vector128<Int32>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace JIT.HardwareIntrinsics.General
{
public static partial class Program
{
private static void OnesComplementInt32()
{
var test = new VectorUnaryOpTest__OnesComplementInt32();
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class VectorUnaryOpTest__OnesComplementInt32
{
private struct DataTable
{
private byte[] inArray1;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Int32[] inArray1, Int32[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Int32>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Int32>();
if ((alignment != 32 && alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Int32, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<Int32> _fld1;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref testStruct._fld1), ref Unsafe.As<Int32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
return testStruct;
}
public void RunStructFldScenario(VectorUnaryOpTest__OnesComplementInt32 testClass)
{
var result = Vector128.OnesComplement(_fld1);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Int32>>() / sizeof(Int32);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Int32>>() / sizeof(Int32);
private static Int32[] _data1 = new Int32[Op1ElementCount];
private static Vector128<Int32> _clsVar1;
private Vector128<Int32> _fld1;
private DataTable _dataTable;
static VectorUnaryOpTest__OnesComplementInt32()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _clsVar1), ref Unsafe.As<Int32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
}
public VectorUnaryOpTest__OnesComplementInt32()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _fld1), ref Unsafe.As<Int32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt32(); }
_dataTable = new DataTable(_data1, new Int32[RetElementCount], LargestVectorSize);
}
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Vector128.OnesComplement(
Unsafe.Read<Vector128<Int32>>(_dataTable.inArray1Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var method = typeof(Vector128).GetMethod(nameof(Vector128.OnesComplement), new Type[] {
typeof(Vector128<Int32>)
});
if (method is null)
{
method = typeof(Vector128).GetMethod(nameof(Vector128.OnesComplement), 1, new Type[] {
typeof(Vector128<>).MakeGenericType(Type.MakeGenericMethodParameter(0))
});
}
if (method.IsGenericMethodDefinition)
{
method = method.MakeGenericMethod(typeof(Int32));
}
var result = method.Invoke(null, new object[] {
Unsafe.Read<Vector128<Int32>>(_dataTable.inArray1Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Vector128.OnesComplement(
_clsVar1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<Int32>>(_dataTable.inArray1Ptr);
var result = Vector128.OnesComplement(op1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new VectorUnaryOpTest__OnesComplementInt32();
var result = Vector128.OnesComplement(test._fld1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Vector128.OnesComplement(_fld1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Vector128.OnesComplement(test._fld1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
private void ValidateResult(Vector128<Int32> op1, void* result, [CallerMemberName] string method = "")
{
Int32[] inArray1 = new Int32[Op1ElementCount];
Int32[] outArray = new Int32[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Int32, byte>(ref inArray1[0]), op1);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Int32>>());
ValidateResult(inArray1, outArray, method);
}
private void ValidateResult(void* op1, void* result, [CallerMemberName] string method = "")
{
Int32[] inArray1 = new Int32[Op1ElementCount];
Int32[] outArray = new Int32[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Int32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Int32>>());
ValidateResult(inArray1, outArray, method);
}
private void ValidateResult(Int32[] firstOp, Int32[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (result[0] != (int)(~firstOp[0]))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (result[i] != (int)(~firstOp[i]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Vector128)}.{nameof(Vector128.OnesComplement)}<Int32>(Vector128<Int32>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/native/libs/System.Globalization.Native/pal_calendarData.c
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
#include <assert.h>
#include <stdbool.h>
#include <stdlib.h>
#include <string.h>
#include "pal_locale_internal.h"
#include "pal_errors_internal.h"
#include "pal_calendarData.h"
#if defined(TARGET_UNIX)
#include <strings.h>
#define STRING_COPY(destination, numberOfElements, source) \
strncpy(destination, source, numberOfElements); \
destination[numberOfElements - 1] = 0;
#elif defined(TARGET_WINDOWS)
#define strcasecmp _stricmp
#define STRING_COPY(destination, numberOfElements, source) strncpy_s(destination, numberOfElements, source, _TRUNCATE);
#endif
#define GREGORIAN_NAME "gregorian"
#define JAPANESE_NAME "japanese"
#define BUDDHIST_NAME "buddhist"
#define HEBREW_NAME "hebrew"
#define DANGI_NAME "dangi"
#define PERSIAN_NAME "persian"
#define ISLAMIC_NAME "islamic"
#define ISLAMIC_UMALQURA_NAME "islamic-umalqura"
#define ROC_NAME "roc"
#define JAPANESE_LOCALE_AND_CALENDAR "ja_JP@calendar=japanese"
static const UChar UDAT_MONTH_DAY_UCHAR[] = {'M', 'M', 'M', 'M', 'd', '\0'};
static const UChar UDAT_YEAR_NUM_MONTH_DAY_UCHAR[] = {'y', 'M', 'd', '\0'};
static const UChar UDAT_YEAR_MONTH_UCHAR[] = {'y', 'M', 'M', 'M', 'M', '\0'};
/*
Function:
GetCalendarName
Gets the associated ICU calendar name for the CalendarId.
*/
static const char* GetCalendarName(CalendarId calendarId)
{
switch (calendarId)
{
case JAPAN:
return JAPANESE_NAME;
case THAI:
return BUDDHIST_NAME;
case HEBREW:
return HEBREW_NAME;
case KOREA:
return DANGI_NAME;
case PERSIAN:
return PERSIAN_NAME;
case HIJRI:
return ISLAMIC_NAME;
case UMALQURA:
return ISLAMIC_UMALQURA_NAME;
case TAIWAN:
return ROC_NAME;
case GREGORIAN:
case GREGORIAN_US:
case GREGORIAN_ARABIC:
case GREGORIAN_ME_FRENCH:
case GREGORIAN_XLIT_ENGLISH:
case GREGORIAN_XLIT_FRENCH:
case JULIAN:
case LUNAR_ETO_CHN:
case LUNAR_ETO_KOR:
case LUNAR_ETO_ROKUYOU:
case SAKA:
// don't support the lunisolar calendars until we have a solid understanding
// of how they map to the ICU/CLDR calendars
case CHINESELUNISOLAR:
case KOREANLUNISOLAR:
case JAPANESELUNISOLAR:
case TAIWANLUNISOLAR:
default:
return GREGORIAN_NAME;
}
}
/*
Function:
GetCalendarId
Gets the associated CalendarId for the ICU calendar name.
*/
static CalendarId GetCalendarId(const char* calendarName)
{
if (strcasecmp(calendarName, GREGORIAN_NAME) == 0)
// TODO: what about the other gregorian types?
return GREGORIAN;
else if (strcasecmp(calendarName, JAPANESE_NAME) == 0)
return JAPAN;
else if (strcasecmp(calendarName, BUDDHIST_NAME) == 0)
return THAI;
else if (strcasecmp(calendarName, HEBREW_NAME) == 0)
return HEBREW;
else if (strcasecmp(calendarName, DANGI_NAME) == 0)
return KOREA;
else if (strcasecmp(calendarName, PERSIAN_NAME) == 0)
return PERSIAN;
else if (strcasecmp(calendarName, ISLAMIC_NAME) == 0)
return HIJRI;
else if (strcasecmp(calendarName, ISLAMIC_UMALQURA_NAME) == 0)
return UMALQURA;
else if (strcasecmp(calendarName, ROC_NAME) == 0)
return TAIWAN;
else
return UNINITIALIZED_VALUE;
}
/*
Function:
GetCalendars
Returns the list of CalendarIds that are available for the specified locale.
*/
int32_t GlobalizationNative_GetCalendars(
const UChar* localeName, CalendarId* calendars, int32_t calendarsCapacity)
{
UErrorCode err = U_ZERO_ERROR;
char locale[ULOC_FULLNAME_CAPACITY];
GetLocale(localeName, locale, ULOC_FULLNAME_CAPACITY, false, &err);
UEnumeration* pEnum = ucal_getKeywordValuesForLocale("calendar", locale, true, &err);
int stringEnumeratorCount = uenum_count(pEnum, &err);
int calendarsReturned = 0;
for (int i = 0; i < stringEnumeratorCount && calendarsReturned < calendarsCapacity; i++)
{
int32_t calendarNameLength = 0;
const char* calendarName = uenum_next(pEnum, &calendarNameLength, &err);
if (U_SUCCESS(err))
{
CalendarId calendarId = GetCalendarId(calendarName);
if (calendarId != UNINITIALIZED_VALUE)
{
calendars[calendarsReturned] = calendarId;
calendarsReturned++;
}
}
}
uenum_close(pEnum);
return calendarsReturned;
}
/*
Function:
GetMonthDayPattern
Gets the Month-Day DateTime pattern for the specified locale.
*/
static ResultCode GetMonthDayPattern(const char* locale,
UChar* sMonthDay,
int32_t stringCapacity)
{
UErrorCode err = U_ZERO_ERROR;
UDateTimePatternGenerator* pGenerator = udatpg_open(locale, &err);
udatpg_getBestPattern(pGenerator, UDAT_MONTH_DAY_UCHAR, -1, sMonthDay, stringCapacity, &err);
udatpg_close(pGenerator);
return GetResultCode(err);
}
/*
Function:
GetNativeCalendarName
Gets the native calendar name.
*/
static ResultCode GetNativeCalendarName(const char* locale,
CalendarId calendarId,
UChar* nativeName,
int32_t stringCapacity)
{
UErrorCode err = U_ZERO_ERROR;
ULocaleDisplayNames* pDisplayNames = uldn_open(locale, ULDN_STANDARD_NAMES, &err);
uldn_keyValueDisplayName(pDisplayNames, "calendar", GetCalendarName(calendarId), nativeName, stringCapacity, &err);
uldn_close(pDisplayNames);
return GetResultCode(err);
}
/*
Function:
GetCalendarInfo
Gets a single string of calendar information by filling the result parameter
with the requested value.
*/
ResultCode GlobalizationNative_GetCalendarInfo(
const UChar* localeName, CalendarId calendarId, CalendarDataType dataType, UChar* result, int32_t resultCapacity)
{
UErrorCode err = U_ZERO_ERROR;
char locale[ULOC_FULLNAME_CAPACITY];
GetLocale(localeName, locale, ULOC_FULLNAME_CAPACITY, false, &err);
if (U_FAILURE(err))
return UnknownError;
switch (dataType)
{
case CalendarData_NativeName:
return GetNativeCalendarName(locale, calendarId, result, resultCapacity);
case CalendarData_MonthDay:
return GetMonthDayPattern(locale, result, resultCapacity);
default:
assert(false);
return UnknownError;
}
}
/*
Function:
InvokeCallbackForDatePattern
Gets the ICU date pattern for the specified locale and EStyle and invokes the
callback with the result.
*/
static int InvokeCallbackForDatePattern(const char* locale,
UDateFormatStyle style,
EnumCalendarInfoCallback callback,
const void* context)
{
UErrorCode err = U_ZERO_ERROR;
UDateFormat* pFormat = udat_open(UDAT_NONE, style, locale, NULL, 0, NULL, 0, &err);
if (U_FAILURE(err))
return false;
UErrorCode ignore = U_ZERO_ERROR;
int32_t patternLen = udat_toPattern(pFormat, false, NULL, 0, &ignore) + 1;
UChar* pattern = (UChar*)calloc((size_t)patternLen, sizeof(UChar));
if (pattern == NULL)
{
udat_close(pFormat);
return false;
}
udat_toPattern(pFormat, false, pattern, patternLen, &err);
udat_close(pFormat);
if (U_SUCCESS(err))
{
callback(pattern, context);
}
free(pattern);
return UErrorCodeToBool(err);
}
/*
Function:
InvokeCallbackForDateTimePattern
Gets the DateTime pattern for the specified skeleton and invokes the callback
with the retrieved value.
*/
static int InvokeCallbackForDateTimePattern(const char* locale,
const UChar* patternSkeleton,
EnumCalendarInfoCallback callback,
const void* context)
{
UErrorCode err = U_ZERO_ERROR;
UDateTimePatternGenerator* pGenerator = udatpg_open(locale, &err);
if (U_FAILURE(err))
return false;
UErrorCode ignore = U_ZERO_ERROR;
int32_t patternLen = udatpg_getBestPattern(pGenerator, patternSkeleton, -1, NULL, 0, &ignore) + 1;
UChar* bestPattern = (UChar*)calloc((size_t)patternLen, sizeof(UChar));
if (bestPattern == NULL)
{
udatpg_close(pGenerator);
return false;
}
udatpg_getBestPattern(pGenerator, patternSkeleton, -1, bestPattern, patternLen, &err);
udatpg_close(pGenerator);
if (U_SUCCESS(err))
{
callback(bestPattern, context);
}
free(bestPattern);
return UErrorCodeToBool(err);
}
/*
Function:
EnumSymbols
Enumerates all of the symbols of a type for a locale and calendar and invokes a callback
for each value.
*/
static int32_t EnumSymbols(const char* locale,
CalendarId calendarId,
UDateFormatSymbolType type,
int32_t startIndex,
EnumCalendarInfoCallback callback,
const void* context)
{
UErrorCode err = U_ZERO_ERROR;
UDateFormat* pFormat = udat_open(UDAT_DEFAULT, UDAT_DEFAULT, locale, NULL, 0, NULL, 0, &err);
if (U_FAILURE(err))
return false;
char localeWithCalendarName[ULOC_FULLNAME_CAPACITY];
STRING_COPY(localeWithCalendarName, sizeof(localeWithCalendarName), locale);
uloc_setKeywordValue("calendar", GetCalendarName(calendarId), localeWithCalendarName, ULOC_FULLNAME_CAPACITY, &err);
UCalendar* pCalendar = ucal_open(NULL, 0, localeWithCalendarName, UCAL_DEFAULT, &err);
if (U_FAILURE(err))
{
udat_close(pFormat);
return false;
}
udat_setCalendar(pFormat, pCalendar);
int32_t symbolCount = udat_countSymbols(pFormat, type);
UChar stackSymbolBuf[100];
UChar* symbolBuf;
for (int32_t i = startIndex; U_SUCCESS(err) && i < symbolCount; i++)
{
UErrorCode ignore = U_ZERO_ERROR;
int symbolLen = udat_getSymbols(pFormat, type, i, NULL, 0, &ignore) + 1;
if ((size_t)symbolLen <= sizeof(stackSymbolBuf) / sizeof(stackSymbolBuf[0]))
{
symbolBuf = stackSymbolBuf;
}
else
{
symbolBuf = (UChar*)calloc((size_t)symbolLen, sizeof(UChar));
if (symbolBuf == NULL)
{
err = U_MEMORY_ALLOCATION_ERROR;
break;
}
}
udat_getSymbols(pFormat, type, i, symbolBuf, symbolLen, &err);
if (U_SUCCESS(err))
{
callback(symbolBuf, context);
}
if (symbolBuf != stackSymbolBuf)
{
free(symbolBuf);
}
}
udat_close(pFormat);
ucal_close(pCalendar);
return UErrorCodeToBool(err);
}
static void EnumUResourceBundle(const UResourceBundle* bundle,
EnumCalendarInfoCallback callback,
const void* context)
{
int32_t eraNameCount = ures_getSize(bundle);
for (int i = 0; i < eraNameCount; i++)
{
UErrorCode status = U_ZERO_ERROR;
int32_t ignore; // We don't care about the length of the string as it is null terminated.
const UChar* eraName = ures_getStringByIndex(bundle, i, &ignore, &status);
if (U_SUCCESS(status))
{
callback(eraName, context);
}
}
}
static void CloseResBundle(UResourceBundle* rootResBundle,
UResourceBundle* calResBundle,
UResourceBundle* targetCalResBundle,
UResourceBundle* erasColResBundle,
UResourceBundle* erasResBundle)
{
ures_close(rootResBundle);
ures_close(calResBundle);
ures_close(targetCalResBundle);
ures_close(erasColResBundle);
ures_close(erasResBundle);
}
/*
Function:
EnumAbbrevEraNames
Enumerates all the abbreviated era names of the specified locale and calendar, invoking the
callback function for each era name.
*/
static int32_t EnumAbbrevEraNames(const char* locale,
CalendarId calendarId,
EnumCalendarInfoCallback callback,
const void* context)
{
// The C-API for ICU provides no way to get at the abbreviated era names for a calendar (so we can't use EnumSymbols
// here). Instead we will try to walk the ICU resource tables directly and fall back to regular era names if can't
// find good data.
char localeNameBuf[ULOC_FULLNAME_CAPACITY];
char parentNameBuf[ULOC_FULLNAME_CAPACITY];
STRING_COPY(localeNameBuf, sizeof(localeNameBuf), locale);
char* localeNamePtr = localeNameBuf;
char* parentNamePtr = parentNameBuf;
while (true)
{
UErrorCode status = U_ZERO_ERROR;
const char* name = GetCalendarName(calendarId);
UResourceBundle* rootResBundle = ures_open(NULL, localeNamePtr, &status);
UResourceBundle* calResBundle = ures_getByKey(rootResBundle, "calendar", NULL, &status);
UResourceBundle* targetCalResBundle = ures_getByKey(calResBundle, name, NULL, &status);
UResourceBundle* erasColResBundle = ures_getByKey(targetCalResBundle, "eras", NULL, &status);
UResourceBundle* erasResBundle = ures_getByKey(erasColResBundle, "narrow", NULL, &status);
if (U_SUCCESS(status))
{
EnumUResourceBundle(erasResBundle, callback, context);
CloseResBundle(rootResBundle, calResBundle, targetCalResBundle, erasColResBundle, erasResBundle);
return true;
}
// Couldn't find the data we need for this locale, we should fallback.
if (localeNameBuf[0] == 0x0)
{
CloseResBundle(rootResBundle, calResBundle, targetCalResBundle, erasColResBundle, erasResBundle);
// We are already at the root locale so there is nothing to fall back to, just use the regular eras.
break;
}
uloc_getParent(localeNamePtr, parentNamePtr, ULOC_FULLNAME_CAPACITY, &status);
if (U_FAILURE(status))
{
CloseResBundle(rootResBundle, calResBundle, targetCalResBundle, erasColResBundle, erasResBundle);
// Something bad happened getting the parent name, bail out.
break;
}
// Swap localeNamePtr and parentNamePtr, parentNamePtr is what we want to use on the next iteration
// and we can use the current localeName as scratch space if we have to fall back on that
// iteration.
char* temp = localeNamePtr;
localeNamePtr = parentNamePtr;
parentNamePtr = temp;
CloseResBundle(rootResBundle, calResBundle, targetCalResBundle, erasColResBundle, erasResBundle);
}
// Walking the resource bundles didn't work, just use the regular eras.
return EnumSymbols(locale, calendarId, UDAT_ERAS, 0, callback, context);
}
/*
Function:
EnumCalendarInfo
Retrieves a collection of calendar string data specified by the locale,
calendar, and data type.
Allows for a collection of calendar string data to be retrieved by invoking
the callback for each value in the collection.
The context parameter is passed through to the callback along with each string.
*/
int32_t GlobalizationNative_EnumCalendarInfo(EnumCalendarInfoCallback callback,
const UChar* localeName,
CalendarId calendarId,
CalendarDataType dataType,
const void* context)
{
UErrorCode err = U_ZERO_ERROR;
char locale[ULOC_FULLNAME_CAPACITY];
GetLocale(localeName, locale, ULOC_FULLNAME_CAPACITY, false, &err);
if (U_FAILURE(err))
return false;
switch (dataType)
{
case CalendarData_ShortDates:
// ShortDates to map kShort and kMedium in ICU, but also adding the "yMd"
// skeleton as well, as this closely matches what is used on Windows
return InvokeCallbackForDatePattern(locale, UDAT_SHORT, callback, context) &&
InvokeCallbackForDatePattern(locale, UDAT_MEDIUM, callback, context) &&
InvokeCallbackForDateTimePattern(locale, UDAT_YEAR_NUM_MONTH_DAY_UCHAR, callback, context);
case CalendarData_LongDates:
// LongDates map to kFull and kLong in ICU.
return InvokeCallbackForDatePattern(locale, UDAT_FULL, callback, context) &&
InvokeCallbackForDatePattern(locale, UDAT_LONG, callback, context);
case CalendarData_YearMonths:
return InvokeCallbackForDateTimePattern(locale, UDAT_YEAR_MONTH_UCHAR, callback, context);
case CalendarData_DayNames:
return EnumSymbols(locale, calendarId, UDAT_STANDALONE_WEEKDAYS, 1, callback, context);
case CalendarData_AbbrevDayNames:
return EnumSymbols(locale, calendarId, UDAT_STANDALONE_SHORT_WEEKDAYS, 1, callback, context);
case CalendarData_MonthNames:
return EnumSymbols(locale, calendarId, UDAT_STANDALONE_MONTHS, 0, callback, context);
case CalendarData_AbbrevMonthNames:
return EnumSymbols(locale, calendarId, UDAT_STANDALONE_SHORT_MONTHS, 0, callback, context);
case CalendarData_SuperShortDayNames:
// UDAT_STANDALONE_SHORTER_WEEKDAYS was added in ICU 51, and CentOS 7 currently uses ICU 50.
// fallback to UDAT_STANDALONE_NARROW_WEEKDAYS in that case.
#if HAVE_UDAT_STANDALONE_SHORTER_WEEKDAYS
return EnumSymbols(locale, calendarId, UDAT_STANDALONE_SHORTER_WEEKDAYS, 1, callback, context);
#else
return EnumSymbols(locale, calendarId, UDAT_STANDALONE_NARROW_WEEKDAYS, 1, callback, context);
#endif
case CalendarData_MonthGenitiveNames:
return EnumSymbols(locale, calendarId, UDAT_MONTHS, 0, callback, context);
case CalendarData_AbbrevMonthGenitiveNames:
return EnumSymbols(locale, calendarId, UDAT_SHORT_MONTHS, 0, callback, context);
case CalendarData_EraNames:
return EnumSymbols(locale, calendarId, UDAT_ERAS, 0, callback, context);
case CalendarData_AbbrevEraNames:
return EnumAbbrevEraNames(locale, calendarId, callback, context);
default:
assert(false);
return false;
}
}
/*
Function:
GetLatestJapaneseEra
Gets the latest era in the Japanese calendar.
*/
int32_t GlobalizationNative_GetLatestJapaneseEra()
{
UErrorCode err = U_ZERO_ERROR;
UCalendar* pCal = ucal_open(NULL, 0, JAPANESE_LOCALE_AND_CALENDAR, UCAL_TRADITIONAL, &err);
if (U_FAILURE(err))
return 0;
ucal_set(pCal, UCAL_EXTENDED_YEAR, 9999);
int32_t ret = ucal_get(pCal, UCAL_ERA, &err);
ucal_close(pCal);
return U_SUCCESS(err) ? ret : 0;
}
/*
Function:
GetJapaneseEraInfo
Gets the starting Gregorian date of the specified Japanese Era.
*/
int32_t GlobalizationNative_GetJapaneseEraStartDate(int32_t era,
int32_t* startYear,
int32_t* startMonth,
int32_t* startDay)
{
*startYear = -1;
*startMonth = -1;
*startDay = -1;
UErrorCode err = U_ZERO_ERROR;
UCalendar* pCal = ucal_open(NULL, 0, JAPANESE_LOCALE_AND_CALENDAR, UCAL_TRADITIONAL, &err);
if (U_FAILURE(err))
return false;
ucal_set(pCal, UCAL_ERA, era);
ucal_set(pCal, UCAL_YEAR, 1);
// UCAL_EXTENDED_YEAR is the gregorian year for the JapaneseCalendar
*startYear = ucal_get(pCal, UCAL_EXTENDED_YEAR, &err);
if (U_FAILURE(err))
{
ucal_close(pCal);
return false;
}
// set the date to Jan 1
ucal_set(pCal, UCAL_MONTH, 0);
ucal_set(pCal, UCAL_DATE, 1);
int32_t currentEra;
for (int month = 0; U_SUCCESS(err) && month <= 12; month++)
{
currentEra = ucal_get(pCal, UCAL_ERA, &err);
if (currentEra == era)
{
for (int day = 0; U_SUCCESS(err) && day < 31; day++)
{
// subtract 1 day at a time until we get out of the specified Era
ucal_add(pCal, UCAL_DATE, -1, &err);
currentEra = ucal_get(pCal, UCAL_ERA, &err);
if (U_SUCCESS(err) && currentEra != era)
{
// add back 1 day to get back into the specified Era
ucal_add(pCal, UCAL_DATE, 1, &err);
*startMonth =
ucal_get(pCal, UCAL_MONTH, &err) + 1; // ICU Calendar months are 0-based, but .NET is 1-based
*startDay = ucal_get(pCal, UCAL_DATE, &err);
ucal_close(pCal);
return UErrorCodeToBool(err);
}
}
}
// add 1 month at a time until we get into the specified Era
ucal_add(pCal, UCAL_MONTH, 1, &err);
}
ucal_close(pCal);
return false;
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
#include <assert.h>
#include <stdbool.h>
#include <stdlib.h>
#include <string.h>
#include "pal_locale_internal.h"
#include "pal_errors_internal.h"
#include "pal_calendarData.h"
#if defined(TARGET_UNIX)
#include <strings.h>
#define STRING_COPY(destination, numberOfElements, source) \
strncpy(destination, source, numberOfElements); \
destination[numberOfElements - 1] = 0;
#elif defined(TARGET_WINDOWS)
#define strcasecmp _stricmp
#define STRING_COPY(destination, numberOfElements, source) strncpy_s(destination, numberOfElements, source, _TRUNCATE);
#endif
#define GREGORIAN_NAME "gregorian"
#define JAPANESE_NAME "japanese"
#define BUDDHIST_NAME "buddhist"
#define HEBREW_NAME "hebrew"
#define DANGI_NAME "dangi"
#define PERSIAN_NAME "persian"
#define ISLAMIC_NAME "islamic"
#define ISLAMIC_UMALQURA_NAME "islamic-umalqura"
#define ROC_NAME "roc"
#define JAPANESE_LOCALE_AND_CALENDAR "ja_JP@calendar=japanese"
static const UChar UDAT_MONTH_DAY_UCHAR[] = {'M', 'M', 'M', 'M', 'd', '\0'};
static const UChar UDAT_YEAR_NUM_MONTH_DAY_UCHAR[] = {'y', 'M', 'd', '\0'};
static const UChar UDAT_YEAR_MONTH_UCHAR[] = {'y', 'M', 'M', 'M', 'M', '\0'};
/*
Function:
GetCalendarName
Gets the associated ICU calendar name for the CalendarId.
*/
static const char* GetCalendarName(CalendarId calendarId)
{
switch (calendarId)
{
case JAPAN:
return JAPANESE_NAME;
case THAI:
return BUDDHIST_NAME;
case HEBREW:
return HEBREW_NAME;
case KOREA:
return DANGI_NAME;
case PERSIAN:
return PERSIAN_NAME;
case HIJRI:
return ISLAMIC_NAME;
case UMALQURA:
return ISLAMIC_UMALQURA_NAME;
case TAIWAN:
return ROC_NAME;
case GREGORIAN:
case GREGORIAN_US:
case GREGORIAN_ARABIC:
case GREGORIAN_ME_FRENCH:
case GREGORIAN_XLIT_ENGLISH:
case GREGORIAN_XLIT_FRENCH:
case JULIAN:
case LUNAR_ETO_CHN:
case LUNAR_ETO_KOR:
case LUNAR_ETO_ROKUYOU:
case SAKA:
// don't support the lunisolar calendars until we have a solid understanding
// of how they map to the ICU/CLDR calendars
case CHINESELUNISOLAR:
case KOREANLUNISOLAR:
case JAPANESELUNISOLAR:
case TAIWANLUNISOLAR:
default:
return GREGORIAN_NAME;
}
}
/*
Function:
GetCalendarId
Gets the associated CalendarId for the ICU calendar name.
*/
static CalendarId GetCalendarId(const char* calendarName)
{
if (strcasecmp(calendarName, GREGORIAN_NAME) == 0)
// TODO: what about the other gregorian types?
return GREGORIAN;
else if (strcasecmp(calendarName, JAPANESE_NAME) == 0)
return JAPAN;
else if (strcasecmp(calendarName, BUDDHIST_NAME) == 0)
return THAI;
else if (strcasecmp(calendarName, HEBREW_NAME) == 0)
return HEBREW;
else if (strcasecmp(calendarName, DANGI_NAME) == 0)
return KOREA;
else if (strcasecmp(calendarName, PERSIAN_NAME) == 0)
return PERSIAN;
else if (strcasecmp(calendarName, ISLAMIC_NAME) == 0)
return HIJRI;
else if (strcasecmp(calendarName, ISLAMIC_UMALQURA_NAME) == 0)
return UMALQURA;
else if (strcasecmp(calendarName, ROC_NAME) == 0)
return TAIWAN;
else
return UNINITIALIZED_VALUE;
}
/*
Function:
GetCalendars
Returns the list of CalendarIds that are available for the specified locale.
*/
int32_t GlobalizationNative_GetCalendars(
const UChar* localeName, CalendarId* calendars, int32_t calendarsCapacity)
{
UErrorCode err = U_ZERO_ERROR;
char locale[ULOC_FULLNAME_CAPACITY];
GetLocale(localeName, locale, ULOC_FULLNAME_CAPACITY, false, &err);
UEnumeration* pEnum = ucal_getKeywordValuesForLocale("calendar", locale, true, &err);
int stringEnumeratorCount = uenum_count(pEnum, &err);
int calendarsReturned = 0;
for (int i = 0; i < stringEnumeratorCount && calendarsReturned < calendarsCapacity; i++)
{
int32_t calendarNameLength = 0;
const char* calendarName = uenum_next(pEnum, &calendarNameLength, &err);
if (U_SUCCESS(err))
{
CalendarId calendarId = GetCalendarId(calendarName);
if (calendarId != UNINITIALIZED_VALUE)
{
calendars[calendarsReturned] = calendarId;
calendarsReturned++;
}
}
}
uenum_close(pEnum);
return calendarsReturned;
}
/*
Function:
GetMonthDayPattern
Gets the Month-Day DateTime pattern for the specified locale.
*/
static ResultCode GetMonthDayPattern(const char* locale,
UChar* sMonthDay,
int32_t stringCapacity)
{
UErrorCode err = U_ZERO_ERROR;
UDateTimePatternGenerator* pGenerator = udatpg_open(locale, &err);
udatpg_getBestPattern(pGenerator, UDAT_MONTH_DAY_UCHAR, -1, sMonthDay, stringCapacity, &err);
udatpg_close(pGenerator);
return GetResultCode(err);
}
/*
Function:
GetNativeCalendarName
Gets the native calendar name.
*/
static ResultCode GetNativeCalendarName(const char* locale,
CalendarId calendarId,
UChar* nativeName,
int32_t stringCapacity)
{
UErrorCode err = U_ZERO_ERROR;
ULocaleDisplayNames* pDisplayNames = uldn_open(locale, ULDN_STANDARD_NAMES, &err);
uldn_keyValueDisplayName(pDisplayNames, "calendar", GetCalendarName(calendarId), nativeName, stringCapacity, &err);
uldn_close(pDisplayNames);
return GetResultCode(err);
}
/*
Function:
GetCalendarInfo
Gets a single string of calendar information by filling the result parameter
with the requested value.
*/
ResultCode GlobalizationNative_GetCalendarInfo(
const UChar* localeName, CalendarId calendarId, CalendarDataType dataType, UChar* result, int32_t resultCapacity)
{
UErrorCode err = U_ZERO_ERROR;
char locale[ULOC_FULLNAME_CAPACITY];
GetLocale(localeName, locale, ULOC_FULLNAME_CAPACITY, false, &err);
if (U_FAILURE(err))
return UnknownError;
switch (dataType)
{
case CalendarData_NativeName:
return GetNativeCalendarName(locale, calendarId, result, resultCapacity);
case CalendarData_MonthDay:
return GetMonthDayPattern(locale, result, resultCapacity);
default:
assert(false);
return UnknownError;
}
}
/*
Function:
InvokeCallbackForDatePattern
Gets the ICU date pattern for the specified locale and EStyle and invokes the
callback with the result.
*/
static int InvokeCallbackForDatePattern(const char* locale,
UDateFormatStyle style,
EnumCalendarInfoCallback callback,
const void* context)
{
UErrorCode err = U_ZERO_ERROR;
UDateFormat* pFormat = udat_open(UDAT_NONE, style, locale, NULL, 0, NULL, 0, &err);
if (U_FAILURE(err))
return false;
UErrorCode ignore = U_ZERO_ERROR;
int32_t patternLen = udat_toPattern(pFormat, false, NULL, 0, &ignore) + 1;
UChar* pattern = (UChar*)calloc((size_t)patternLen, sizeof(UChar));
if (pattern == NULL)
{
udat_close(pFormat);
return false;
}
udat_toPattern(pFormat, false, pattern, patternLen, &err);
udat_close(pFormat);
if (U_SUCCESS(err))
{
callback(pattern, context);
}
free(pattern);
return UErrorCodeToBool(err);
}
/*
Function:
InvokeCallbackForDateTimePattern
Gets the DateTime pattern for the specified skeleton and invokes the callback
with the retrieved value.
*/
static int InvokeCallbackForDateTimePattern(const char* locale,
const UChar* patternSkeleton,
EnumCalendarInfoCallback callback,
const void* context)
{
UErrorCode err = U_ZERO_ERROR;
UDateTimePatternGenerator* pGenerator = udatpg_open(locale, &err);
if (U_FAILURE(err))
return false;
UErrorCode ignore = U_ZERO_ERROR;
int32_t patternLen = udatpg_getBestPattern(pGenerator, patternSkeleton, -1, NULL, 0, &ignore) + 1;
UChar* bestPattern = (UChar*)calloc((size_t)patternLen, sizeof(UChar));
if (bestPattern == NULL)
{
udatpg_close(pGenerator);
return false;
}
udatpg_getBestPattern(pGenerator, patternSkeleton, -1, bestPattern, patternLen, &err);
udatpg_close(pGenerator);
if (U_SUCCESS(err))
{
callback(bestPattern, context);
}
free(bestPattern);
return UErrorCodeToBool(err);
}
/*
Function:
EnumSymbols
Enumerates all of the symbols of a type for a locale and calendar and invokes a callback
for each value.
*/
static int32_t EnumSymbols(const char* locale,
CalendarId calendarId,
UDateFormatSymbolType type,
int32_t startIndex,
EnumCalendarInfoCallback callback,
const void* context)
{
UErrorCode err = U_ZERO_ERROR;
UDateFormat* pFormat = udat_open(UDAT_DEFAULT, UDAT_DEFAULT, locale, NULL, 0, NULL, 0, &err);
if (U_FAILURE(err))
return false;
char localeWithCalendarName[ULOC_FULLNAME_CAPACITY];
STRING_COPY(localeWithCalendarName, sizeof(localeWithCalendarName), locale);
uloc_setKeywordValue("calendar", GetCalendarName(calendarId), localeWithCalendarName, ULOC_FULLNAME_CAPACITY, &err);
UCalendar* pCalendar = ucal_open(NULL, 0, localeWithCalendarName, UCAL_DEFAULT, &err);
if (U_FAILURE(err))
{
udat_close(pFormat);
return false;
}
udat_setCalendar(pFormat, pCalendar);
int32_t symbolCount = udat_countSymbols(pFormat, type);
UChar stackSymbolBuf[100];
UChar* symbolBuf;
for (int32_t i = startIndex; U_SUCCESS(err) && i < symbolCount; i++)
{
UErrorCode ignore = U_ZERO_ERROR;
int symbolLen = udat_getSymbols(pFormat, type, i, NULL, 0, &ignore) + 1;
if ((size_t)symbolLen <= sizeof(stackSymbolBuf) / sizeof(stackSymbolBuf[0]))
{
symbolBuf = stackSymbolBuf;
}
else
{
symbolBuf = (UChar*)calloc((size_t)symbolLen, sizeof(UChar));
if (symbolBuf == NULL)
{
err = U_MEMORY_ALLOCATION_ERROR;
break;
}
}
udat_getSymbols(pFormat, type, i, symbolBuf, symbolLen, &err);
if (U_SUCCESS(err))
{
callback(symbolBuf, context);
}
if (symbolBuf != stackSymbolBuf)
{
free(symbolBuf);
}
}
udat_close(pFormat);
ucal_close(pCalendar);
return UErrorCodeToBool(err);
}
static void EnumUResourceBundle(const UResourceBundle* bundle,
EnumCalendarInfoCallback callback,
const void* context)
{
int32_t eraNameCount = ures_getSize(bundle);
for (int i = 0; i < eraNameCount; i++)
{
UErrorCode status = U_ZERO_ERROR;
int32_t ignore; // We don't care about the length of the string as it is null terminated.
const UChar* eraName = ures_getStringByIndex(bundle, i, &ignore, &status);
if (U_SUCCESS(status))
{
callback(eraName, context);
}
}
}
static void CloseResBundle(UResourceBundle* rootResBundle,
UResourceBundle* calResBundle,
UResourceBundle* targetCalResBundle,
UResourceBundle* erasColResBundle,
UResourceBundle* erasResBundle)
{
ures_close(rootResBundle);
ures_close(calResBundle);
ures_close(targetCalResBundle);
ures_close(erasColResBundle);
ures_close(erasResBundle);
}
/*
Function:
EnumAbbrevEraNames
Enumerates all the abbreviated era names of the specified locale and calendar, invoking the
callback function for each era name.
*/
static int32_t EnumAbbrevEraNames(const char* locale,
CalendarId calendarId,
EnumCalendarInfoCallback callback,
const void* context)
{
// The C-API for ICU provides no way to get at the abbreviated era names for a calendar (so we can't use EnumSymbols
// here). Instead we will try to walk the ICU resource tables directly and fall back to regular era names if can't
// find good data.
char localeNameBuf[ULOC_FULLNAME_CAPACITY];
char parentNameBuf[ULOC_FULLNAME_CAPACITY];
STRING_COPY(localeNameBuf, sizeof(localeNameBuf), locale);
char* localeNamePtr = localeNameBuf;
char* parentNamePtr = parentNameBuf;
while (true)
{
UErrorCode status = U_ZERO_ERROR;
const char* name = GetCalendarName(calendarId);
UResourceBundle* rootResBundle = ures_open(NULL, localeNamePtr, &status);
UResourceBundle* calResBundle = ures_getByKey(rootResBundle, "calendar", NULL, &status);
UResourceBundle* targetCalResBundle = ures_getByKey(calResBundle, name, NULL, &status);
UResourceBundle* erasColResBundle = ures_getByKey(targetCalResBundle, "eras", NULL, &status);
UResourceBundle* erasResBundle = ures_getByKey(erasColResBundle, "narrow", NULL, &status);
if (U_SUCCESS(status))
{
EnumUResourceBundle(erasResBundle, callback, context);
CloseResBundle(rootResBundle, calResBundle, targetCalResBundle, erasColResBundle, erasResBundle);
return true;
}
// Couldn't find the data we need for this locale, we should fallback.
if (localeNameBuf[0] == 0x0)
{
CloseResBundle(rootResBundle, calResBundle, targetCalResBundle, erasColResBundle, erasResBundle);
// We are already at the root locale so there is nothing to fall back to, just use the regular eras.
break;
}
uloc_getParent(localeNamePtr, parentNamePtr, ULOC_FULLNAME_CAPACITY, &status);
if (U_FAILURE(status))
{
CloseResBundle(rootResBundle, calResBundle, targetCalResBundle, erasColResBundle, erasResBundle);
// Something bad happened getting the parent name, bail out.
break;
}
// Swap localeNamePtr and parentNamePtr, parentNamePtr is what we want to use on the next iteration
// and we can use the current localeName as scratch space if we have to fall back on that
// iteration.
char* temp = localeNamePtr;
localeNamePtr = parentNamePtr;
parentNamePtr = temp;
CloseResBundle(rootResBundle, calResBundle, targetCalResBundle, erasColResBundle, erasResBundle);
}
// Walking the resource bundles didn't work, just use the regular eras.
return EnumSymbols(locale, calendarId, UDAT_ERAS, 0, callback, context);
}
/*
Function:
EnumCalendarInfo
Retrieves a collection of calendar string data specified by the locale,
calendar, and data type.
Allows for a collection of calendar string data to be retrieved by invoking
the callback for each value in the collection.
The context parameter is passed through to the callback along with each string.
*/
int32_t GlobalizationNative_EnumCalendarInfo(EnumCalendarInfoCallback callback,
const UChar* localeName,
CalendarId calendarId,
CalendarDataType dataType,
const void* context)
{
UErrorCode err = U_ZERO_ERROR;
char locale[ULOC_FULLNAME_CAPACITY];
GetLocale(localeName, locale, ULOC_FULLNAME_CAPACITY, false, &err);
if (U_FAILURE(err))
return false;
switch (dataType)
{
case CalendarData_ShortDates:
// ShortDates to map kShort and kMedium in ICU, but also adding the "yMd"
// skeleton as well, as this closely matches what is used on Windows
return InvokeCallbackForDatePattern(locale, UDAT_SHORT, callback, context) &&
InvokeCallbackForDatePattern(locale, UDAT_MEDIUM, callback, context) &&
InvokeCallbackForDateTimePattern(locale, UDAT_YEAR_NUM_MONTH_DAY_UCHAR, callback, context);
case CalendarData_LongDates:
// LongDates map to kFull and kLong in ICU.
return InvokeCallbackForDatePattern(locale, UDAT_FULL, callback, context) &&
InvokeCallbackForDatePattern(locale, UDAT_LONG, callback, context);
case CalendarData_YearMonths:
return InvokeCallbackForDateTimePattern(locale, UDAT_YEAR_MONTH_UCHAR, callback, context);
case CalendarData_DayNames:
return EnumSymbols(locale, calendarId, UDAT_STANDALONE_WEEKDAYS, 1, callback, context);
case CalendarData_AbbrevDayNames:
return EnumSymbols(locale, calendarId, UDAT_STANDALONE_SHORT_WEEKDAYS, 1, callback, context);
case CalendarData_MonthNames:
return EnumSymbols(locale, calendarId, UDAT_STANDALONE_MONTHS, 0, callback, context);
case CalendarData_AbbrevMonthNames:
return EnumSymbols(locale, calendarId, UDAT_STANDALONE_SHORT_MONTHS, 0, callback, context);
case CalendarData_SuperShortDayNames:
// UDAT_STANDALONE_SHORTER_WEEKDAYS was added in ICU 51, and CentOS 7 currently uses ICU 50.
// fallback to UDAT_STANDALONE_NARROW_WEEKDAYS in that case.
#if HAVE_UDAT_STANDALONE_SHORTER_WEEKDAYS
return EnumSymbols(locale, calendarId, UDAT_STANDALONE_SHORTER_WEEKDAYS, 1, callback, context);
#else
return EnumSymbols(locale, calendarId, UDAT_STANDALONE_NARROW_WEEKDAYS, 1, callback, context);
#endif
case CalendarData_MonthGenitiveNames:
return EnumSymbols(locale, calendarId, UDAT_MONTHS, 0, callback, context);
case CalendarData_AbbrevMonthGenitiveNames:
return EnumSymbols(locale, calendarId, UDAT_SHORT_MONTHS, 0, callback, context);
case CalendarData_EraNames:
return EnumSymbols(locale, calendarId, UDAT_ERAS, 0, callback, context);
case CalendarData_AbbrevEraNames:
return EnumAbbrevEraNames(locale, calendarId, callback, context);
default:
assert(false);
return false;
}
}
/*
Function:
GetLatestJapaneseEra
Gets the latest era in the Japanese calendar.
*/
int32_t GlobalizationNative_GetLatestJapaneseEra()
{
UErrorCode err = U_ZERO_ERROR;
UCalendar* pCal = ucal_open(NULL, 0, JAPANESE_LOCALE_AND_CALENDAR, UCAL_TRADITIONAL, &err);
if (U_FAILURE(err))
return 0;
ucal_set(pCal, UCAL_EXTENDED_YEAR, 9999);
int32_t ret = ucal_get(pCal, UCAL_ERA, &err);
ucal_close(pCal);
return U_SUCCESS(err) ? ret : 0;
}
/*
Function:
GetJapaneseEraInfo
Gets the starting Gregorian date of the specified Japanese Era.
*/
int32_t GlobalizationNative_GetJapaneseEraStartDate(int32_t era,
int32_t* startYear,
int32_t* startMonth,
int32_t* startDay)
{
*startYear = -1;
*startMonth = -1;
*startDay = -1;
UErrorCode err = U_ZERO_ERROR;
UCalendar* pCal = ucal_open(NULL, 0, JAPANESE_LOCALE_AND_CALENDAR, UCAL_TRADITIONAL, &err);
if (U_FAILURE(err))
return false;
ucal_set(pCal, UCAL_ERA, era);
ucal_set(pCal, UCAL_YEAR, 1);
// UCAL_EXTENDED_YEAR is the gregorian year for the JapaneseCalendar
*startYear = ucal_get(pCal, UCAL_EXTENDED_YEAR, &err);
if (U_FAILURE(err))
{
ucal_close(pCal);
return false;
}
// set the date to Jan 1
ucal_set(pCal, UCAL_MONTH, 0);
ucal_set(pCal, UCAL_DATE, 1);
int32_t currentEra;
for (int month = 0; U_SUCCESS(err) && month <= 12; month++)
{
currentEra = ucal_get(pCal, UCAL_ERA, &err);
if (currentEra == era)
{
for (int day = 0; U_SUCCESS(err) && day < 31; day++)
{
// subtract 1 day at a time until we get out of the specified Era
ucal_add(pCal, UCAL_DATE, -1, &err);
currentEra = ucal_get(pCal, UCAL_ERA, &err);
if (U_SUCCESS(err) && currentEra != era)
{
// add back 1 day to get back into the specified Era
ucal_add(pCal, UCAL_DATE, 1, &err);
*startMonth =
ucal_get(pCal, UCAL_MONTH, &err) + 1; // ICU Calendar months are 0-based, but .NET is 1-based
*startDay = ucal_get(pCal, UCAL_DATE, &err);
ucal_close(pCal);
return UErrorCodeToBool(err);
}
}
}
// add 1 month at a time until we get into the specified Era
ucal_add(pCal, UCAL_MONTH, 1, &err);
}
ucal_close(pCal);
return false;
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/readytorun/r2rdump/BasicTests/files/Windows_NT.x64.Debug/MultipleRuntimeFunctions.xml
|
<?xml version="1.0" encoding="utf-8"?>
<R2RDump>
<R2RReader xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Filename>MultipleRuntimeFunctions.ni.dll</Filename>
<IsR2R>true</IsR2R>
<Machine>Amd64</Machine>
<ImageBase>6891812028416</ImageBase>
</R2RReader>
<Header>
<R2RHeader xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<SignatureString>RTR</SignatureString>
<Signature>5395538</Signature>
<MajorVersion>2</MajorVersion>
<MinorVersion>2</MinorVersion>
<Flags>3</Flags>
</R2RHeader>
</Header>
<Sections>
<Count>10</Count>
<Section Index="READYTORUN_SECTION_COMPILER_IDENTIFIER">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_COMPILER_IDENTIFIER" />
<Contents>
<CompilerIdentifier>CoreCLR 4.5.30319.0 __BUILDMACHINE__</CompilerIdentifier>
</Contents>
</Section>
<Section Index="READYTORUN_SECTION_IMPORT_SECTIONS">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_IMPORT_SECTIONS" />
<Contents>
<ImportSection Index="0">
<R2RImportSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="0">
<Flags>CORCOMPILE_IMPORT_FLAGS_EAGER</Flags>
<Type>CORCOMPILE_IMPORT_TYPE_UNKNOWN</Type>
<Entries>
<ImportSectionEntry Index="0">
<StartOffset>0</StartOffset>
<Section>0</Section>
</ImportSectionEntry>
</Entries>
</R2RImportSection>
<ImportSectionEntry xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="0">
<StartOffset>0</StartOffset>
<Section>0</Section>
</ImportSectionEntry>
</ImportSection>
<ImportSection Index="1">
<R2RImportSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="1">
<Flags>CORCOMPILE_IMPORT_FLAGS_PCODE</Flags>
<Type>CORCOMPILE_IMPORT_TYPE_STUB_DISPATCH</Type>
<Entries>
<ImportSectionEntry Index="0">
<StartOffset>0</StartOffset>
<Section>6891812037761</Section>
</ImportSectionEntry>
<ImportSectionEntry Index="1">
<StartOffset>8</StartOffset>
<Section>6891812037761</Section>
</ImportSectionEntry>
</Entries>
</R2RImportSection>
<ImportSectionEntry xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="0">
<StartOffset>0</StartOffset>
<Section>6891812037761</Section>
</ImportSectionEntry>
<ImportSectionEntry xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="1">
<StartOffset>8</StartOffset>
<Section>6891812037761</Section>
</ImportSectionEntry>
</ImportSection>
</Contents>
</Section>
<Section Index="READYTORUN_SECTION_RUNTIME_FUNCTIONS">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_RUNTIME_FUNCTIONS" />
<Contents />
</Section>
<Section Index="READYTORUN_SECTION_METHODDEF_ENTRYPOINTS">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_METHODDEF_ENTRYPOINTS" />
<Contents />
</Section>
<Section Index="READYTORUN_SECTION_EXCEPTION_INFO">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_EXCEPTION_INFO" />
<Contents />
</Section>
<Section Index="READYTORUN_SECTION_DEBUG_INFO">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_DEBUG_INFO" />
<Contents />
</Section>
<Section Index="READYTORUN_SECTION_DELAYLOAD_METHODCALL_THUNKS">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_DELAYLOAD_METHODCALL_THUNKS" />
<Contents />
</Section>
<Section Index="READYTORUN_SECTION_AVAILABLE_TYPES">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_AVAILABLE_TYPES" />
<Contents>
<AvailableType Index="0">MultipleRuntimeFunctions.MultipleRuntimeFunctions</AvailableType>
<AvailableType Index="1">.<Module></AvailableType>
</Contents>
</Section>
<Section Index="READYTORUN_SECTION_INSTANCE_METHOD_ENTRYPOINTS">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_INSTANCE_METHOD_ENTRYPOINTS" />
<Contents />
</Section>
<Section Index="READYTORUN_SECTION_INLINING_INFO">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_INLINING_INFO" />
<Contents />
</Section>
</Sections>
<Methods Count="3">
<Method Index="0">
<R2RMethod xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="0">
<Name>MethodWithMultipleRuntimeFunctions</Name>
<SignatureString>MultipleRuntimeFunctions.MultipleRuntimeFunctions.MethodWithMultipleRuntimeFunctions()</SignatureString>
<IsGeneric>false</IsGeneric>
<DeclaringType>MultipleRuntimeFunctions.MultipleRuntimeFunctions</DeclaringType>
<Token>100663297</Token>
<Rid>1</Rid>
<EntryPointRuntimeFunctionId>0</EntryPointRuntimeFunctionId>
</R2RMethod>
<GcInfo>
<GcInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>94</Size>
<Offset>3240</Offset>
<CodeLength>61</CodeLength>
<Version>2</Version>
<ReturnKind>RT_Scalar</ReturnKind>
<ValidRangeStart>0</ValidRangeStart>
<ValidRangeEnd>0</ValidRangeEnd>
<SecurityObjectStackSlot>-1</SecurityObjectStackSlot>
<GSCookieStackSlot>-1</GSCookieStackSlot>
<PSPSymStackSlot>0</PSPSymStackSlot>
<GenericsInstContextStackSlot>-1</GenericsInstContextStackSlot>
<StackBaseRegister>5</StackBaseRegister>
<SizeOfEditAndContinuePreservedArea>4294967295</SizeOfEditAndContinuePreservedArea>
<ReversePInvokeFrameStackSlot>-1</ReversePInvokeFrameStackSlot>
<SizeOfStackOutgoingAndScratchArea>0</SizeOfStackOutgoingAndScratchArea>
<NumSafePoints>0</NumSafePoints>
<NumInterruptibleRanges>3</NumInterruptibleRanges>
<SafePointOffsets />
<InterruptibleRanges>
<InterruptibleRange Index="0">
<StartOffset>14</StartOffset>
<StopOffset>28</StopOffset>
</InterruptibleRange>
<InterruptibleRange Index="1">
<StartOffset>29</StartOffset>
<StopOffset>30</StopOffset>
</InterruptibleRange>
<InterruptibleRange Index="2">
<StartOffset>52</StartOffset>
<StopOffset>55</StopOffset>
</InterruptibleRange>
</InterruptibleRanges>
<SlotTable>
<NumRegisters>0</NumRegisters>
<NumStackSlots>0</NumStackSlots>
<NumUntracked>0</NumUntracked>
<NumSlots>0</NumSlots>
<GcSlots />
</SlotTable>
</GcInfo>
</GcInfo>
<RuntimeFunctions>
<RuntimeFunction Index="0">
<MethodRid>1</MethodRid>
<RuntimeFunction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="0">
<Size>36</Size>
<CodeOffset>0</CodeOffset>
</RuntimeFunction>
<UnwindInfo>
<UnwindInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>12</Size>
<Version>1</Version>
<Flags>3</Flags>
<SizeOfProlog>5</SizeOfProlog>
<CountOfUnwindCodes>2</CountOfUnwindCodes>
<FrameRegister>EAX</FrameRegister>
<FrameOffset>0</FrameOffset>
<UnwindCode>
<UnwindCode Index="0">
<CodeOffset>5</CodeOffset>
<UnwindOp>UWOP_ALLOC_SMALL</UnwindOp>
<OpInfo>1</OpInfo>
<OffsetLow>5</OffsetLow>
<OffsetHigh>1</OffsetHigh>
<FrameOffset>4613</FrameOffset>
</UnwindCode>
<UnwindCode Index="1">
<CodeOffset>1</CodeOffset>
<UnwindOp>UWOP_PUSH_NONVOL</UnwindOp>
<OpInfo>5</OpInfo>
<OffsetLow>1</OffsetLow>
<OffsetHigh>5</OffsetHigh>
<FrameOffset>20481</FrameOffset>
</UnwindCode>
</UnwindCode>
<PersonalityRoutineRVA>9339</PersonalityRoutineRVA>
</UnwindInfo>
</UnwindInfo>
</RuntimeFunction>
<RuntimeFunction Index="1">
<MethodRid>1</MethodRid>
<RuntimeFunction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="1">
<Size>25</Size>
<CodeOffset>36</CodeOffset>
</RuntimeFunction>
<UnwindInfo>
<UnwindInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>12</Size>
<Version>1</Version>
<Flags>3</Flags>
<SizeOfProlog>5</SizeOfProlog>
<CountOfUnwindCodes>2</CountOfUnwindCodes>
<FrameRegister>EAX</FrameRegister>
<FrameOffset>0</FrameOffset>
<UnwindCode>
<UnwindCode Index="0">
<CodeOffset>5</CodeOffset>
<UnwindOp>UWOP_ALLOC_SMALL</UnwindOp>
<OpInfo>1</OpInfo>
<OffsetLow>5</OffsetLow>
<OffsetHigh>1</OffsetHigh>
<FrameOffset>4613</FrameOffset>
</UnwindCode>
<UnwindCode Index="1">
<CodeOffset>1</CodeOffset>
<UnwindOp>UWOP_PUSH_NONVOL</UnwindOp>
<OpInfo>5</OpInfo>
<OffsetLow>1</OffsetLow>
<OffsetHigh>5</OffsetHigh>
<FrameOffset>20481</FrameOffset>
</UnwindCode>
</UnwindCode>
<PersonalityRoutineRVA>9339</PersonalityRoutineRVA>
</UnwindInfo>
</UnwindInfo>
</RuntimeFunction>
</RuntimeFunctions>
</Method>
<Method Index="1">
<R2RMethod xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="1">
<Name>Main</Name>
<SignatureString>MultipleRuntimeFunctions.MultipleRuntimeFunctions.Main(String[])</SignatureString>
<IsGeneric>false</IsGeneric>
<DeclaringType>MultipleRuntimeFunctions.MultipleRuntimeFunctions</DeclaringType>
<Token>100663298</Token>
<Rid>2</Rid>
<EntryPointRuntimeFunctionId>2</EntryPointRuntimeFunctionId>
</R2RMethod>
<GcInfo>
<GcInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>73</Size>
<Offset>3276</Offset>
<CodeLength>29</CodeLength>
<Version>2</Version>
<ReturnKind>RT_Scalar</ReturnKind>
<ValidRangeStart>0</ValidRangeStart>
<ValidRangeEnd>0</ValidRangeEnd>
<SecurityObjectStackSlot>-1</SecurityObjectStackSlot>
<GSCookieStackSlot>-1</GSCookieStackSlot>
<PSPSymStackSlot>-1</PSPSymStackSlot>
<GenericsInstContextStackSlot>-1</GenericsInstContextStackSlot>
<StackBaseRegister>5</StackBaseRegister>
<SizeOfEditAndContinuePreservedArea>4294967295</SizeOfEditAndContinuePreservedArea>
<ReversePInvokeFrameStackSlot>-1</ReversePInvokeFrameStackSlot>
<SizeOfStackOutgoingAndScratchArea>32</SizeOfStackOutgoingAndScratchArea>
<NumSafePoints>0</NumSafePoints>
<NumInterruptibleRanges>1</NumInterruptibleRanges>
<SafePointOffsets />
<InterruptibleRanges>
<InterruptibleRange Index="0">
<StartOffset>14</StartOffset>
<StopOffset>23</StopOffset>
</InterruptibleRange>
</InterruptibleRanges>
<SlotTable>
<NumRegisters>0</NumRegisters>
<NumStackSlots>0</NumStackSlots>
<NumUntracked>1</NumUntracked>
<NumSlots>1</NumSlots>
<GcSlots>
<GcSlot Index="0">
<RegisterNumber>-1</RegisterNumber>
<StackSlot>
<SpOffset>16</SpOffset>
<Base>GC_FRAMEREG_REL</Base>
</StackSlot>
<Flags>GC_SLOT_UNTRACKED</Flags>
</GcSlot>
</GcSlots>
</SlotTable>
</GcInfo>
</GcInfo>
<RuntimeFunctions>
<RuntimeFunction Index="2">
<MethodRid>2</MethodRid>
<RuntimeFunction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="2">
<Size>29</Size>
<CodeOffset>0</CodeOffset>
</RuntimeFunction>
<UnwindInfo>
<UnwindInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>12</Size>
<Version>1</Version>
<Flags>3</Flags>
<SizeOfProlog>5</SizeOfProlog>
<CountOfUnwindCodes>2</CountOfUnwindCodes>
<FrameRegister>EAX</FrameRegister>
<FrameOffset>0</FrameOffset>
<UnwindCode>
<UnwindCode Index="0">
<CodeOffset>5</CodeOffset>
<UnwindOp>UWOP_ALLOC_SMALL</UnwindOp>
<OpInfo>3</OpInfo>
<OffsetLow>5</OffsetLow>
<OffsetHigh>3</OffsetHigh>
<FrameOffset>12805</FrameOffset>
</UnwindCode>
<UnwindCode Index="1">
<CodeOffset>1</CodeOffset>
<UnwindOp>UWOP_PUSH_NONVOL</UnwindOp>
<OpInfo>5</OpInfo>
<OffsetLow>1</OffsetLow>
<OffsetHigh>5</OffsetHigh>
<FrameOffset>20481</FrameOffset>
</UnwindCode>
</UnwindCode>
<PersonalityRoutineRVA>9339</PersonalityRoutineRVA>
</UnwindInfo>
</UnwindInfo>
</RuntimeFunction>
</RuntimeFunctions>
</Method>
<Method Index="2">
<R2RMethod xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="2">
<Name>.ctor</Name>
<SignatureString>MultipleRuntimeFunctions.MultipleRuntimeFunctions..ctor()</SignatureString>
<IsGeneric>false</IsGeneric>
<DeclaringType>MultipleRuntimeFunctions.MultipleRuntimeFunctions</DeclaringType>
<Token>100663299</Token>
<Rid>3</Rid>
<EntryPointRuntimeFunctionId>3</EntryPointRuntimeFunctionId>
</R2RMethod>
<GcInfo>
<GcInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>106</Size>
<Offset>3300</Offset>
<CodeLength>32</CodeLength>
<Version>2</Version>
<ReturnKind>RT_Scalar</ReturnKind>
<ValidRangeStart>0</ValidRangeStart>
<ValidRangeEnd>0</ValidRangeEnd>
<SecurityObjectStackSlot>-1</SecurityObjectStackSlot>
<GSCookieStackSlot>-1</GSCookieStackSlot>
<PSPSymStackSlot>-1</PSPSymStackSlot>
<GenericsInstContextStackSlot>-1</GenericsInstContextStackSlot>
<StackBaseRegister>5</StackBaseRegister>
<SizeOfEditAndContinuePreservedArea>4294967295</SizeOfEditAndContinuePreservedArea>
<ReversePInvokeFrameStackSlot>-1</ReversePInvokeFrameStackSlot>
<SizeOfStackOutgoingAndScratchArea>32</SizeOfStackOutgoingAndScratchArea>
<NumSafePoints>0</NumSafePoints>
<NumInterruptibleRanges>1</NumInterruptibleRanges>
<SafePointOffsets />
<InterruptibleRanges>
<InterruptibleRange Index="0">
<StartOffset>14</StartOffset>
<StopOffset>26</StopOffset>
</InterruptibleRange>
</InterruptibleRanges>
<SlotTable>
<NumRegisters>1</NumRegisters>
<NumStackSlots>0</NumStackSlots>
<NumUntracked>1</NumUntracked>
<NumSlots>2</NumSlots>
<GcSlots>
<GcSlot Index="0">
<RegisterNumber>1</RegisterNumber>
<Flags>GC_SLOT_BASE</Flags>
</GcSlot>
<GcSlot Index="1">
<RegisterNumber>-1</RegisterNumber>
<StackSlot>
<SpOffset>16</SpOffset>
<Base>GC_FRAMEREG_REL</Base>
</StackSlot>
<Flags>GC_SLOT_UNTRACKED</Flags>
</GcSlot>
</GcSlots>
</SlotTable>
</GcInfo>
<GcTransition xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="18">
<SlotId>0</SlotId>
<IsLive>true</IsLive>
<ChunkId>0</ChunkId>
<SlotState>ECX is live</SlotState>
</GcTransition>
<GcTransition xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="24">
<SlotId>0</SlotId>
<IsLive>false</IsLive>
<ChunkId>0</ChunkId>
<SlotState>ECX is dead</SlotState>
</GcTransition>
</GcInfo>
<RuntimeFunctions>
<RuntimeFunction Index="3">
<MethodRid>3</MethodRid>
<RuntimeFunction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="3">
<Size>32</Size>
<CodeOffset>0</CodeOffset>
</RuntimeFunction>
<UnwindInfo>
<UnwindInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>12</Size>
<Version>1</Version>
<Flags>3</Flags>
<SizeOfProlog>5</SizeOfProlog>
<CountOfUnwindCodes>2</CountOfUnwindCodes>
<FrameRegister>EAX</FrameRegister>
<FrameOffset>0</FrameOffset>
<UnwindCode>
<UnwindCode Index="0">
<CodeOffset>5</CodeOffset>
<UnwindOp>UWOP_ALLOC_SMALL</UnwindOp>
<OpInfo>3</OpInfo>
<OffsetLow>5</OffsetLow>
<OffsetHigh>3</OffsetHigh>
<FrameOffset>12805</FrameOffset>
</UnwindCode>
<UnwindCode Index="1">
<CodeOffset>1</CodeOffset>
<UnwindOp>UWOP_PUSH_NONVOL</UnwindOp>
<OpInfo>5</OpInfo>
<OffsetLow>1</OffsetLow>
<OffsetHigh>5</OffsetHigh>
<FrameOffset>20481</FrameOffset>
</UnwindCode>
</UnwindCode>
<PersonalityRoutineRVA>9339</PersonalityRoutineRVA>
</UnwindInfo>
</UnwindInfo>
</RuntimeFunction>
</RuntimeFunctions>
</Method>
</Methods>
</R2RDump>
|
<?xml version="1.0" encoding="utf-8"?>
<R2RDump>
<R2RReader xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Filename>MultipleRuntimeFunctions.ni.dll</Filename>
<IsR2R>true</IsR2R>
<Machine>Amd64</Machine>
<ImageBase>6891812028416</ImageBase>
</R2RReader>
<Header>
<R2RHeader xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<SignatureString>RTR</SignatureString>
<Signature>5395538</Signature>
<MajorVersion>2</MajorVersion>
<MinorVersion>2</MinorVersion>
<Flags>3</Flags>
</R2RHeader>
</Header>
<Sections>
<Count>10</Count>
<Section Index="READYTORUN_SECTION_COMPILER_IDENTIFIER">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_COMPILER_IDENTIFIER" />
<Contents>
<CompilerIdentifier>CoreCLR 4.5.30319.0 __BUILDMACHINE__</CompilerIdentifier>
</Contents>
</Section>
<Section Index="READYTORUN_SECTION_IMPORT_SECTIONS">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_IMPORT_SECTIONS" />
<Contents>
<ImportSection Index="0">
<R2RImportSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="0">
<Flags>CORCOMPILE_IMPORT_FLAGS_EAGER</Flags>
<Type>CORCOMPILE_IMPORT_TYPE_UNKNOWN</Type>
<Entries>
<ImportSectionEntry Index="0">
<StartOffset>0</StartOffset>
<Section>0</Section>
</ImportSectionEntry>
</Entries>
</R2RImportSection>
<ImportSectionEntry xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="0">
<StartOffset>0</StartOffset>
<Section>0</Section>
</ImportSectionEntry>
</ImportSection>
<ImportSection Index="1">
<R2RImportSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="1">
<Flags>CORCOMPILE_IMPORT_FLAGS_PCODE</Flags>
<Type>CORCOMPILE_IMPORT_TYPE_STUB_DISPATCH</Type>
<Entries>
<ImportSectionEntry Index="0">
<StartOffset>0</StartOffset>
<Section>6891812037761</Section>
</ImportSectionEntry>
<ImportSectionEntry Index="1">
<StartOffset>8</StartOffset>
<Section>6891812037761</Section>
</ImportSectionEntry>
</Entries>
</R2RImportSection>
<ImportSectionEntry xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="0">
<StartOffset>0</StartOffset>
<Section>6891812037761</Section>
</ImportSectionEntry>
<ImportSectionEntry xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="1">
<StartOffset>8</StartOffset>
<Section>6891812037761</Section>
</ImportSectionEntry>
</ImportSection>
</Contents>
</Section>
<Section Index="READYTORUN_SECTION_RUNTIME_FUNCTIONS">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_RUNTIME_FUNCTIONS" />
<Contents />
</Section>
<Section Index="READYTORUN_SECTION_METHODDEF_ENTRYPOINTS">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_METHODDEF_ENTRYPOINTS" />
<Contents />
</Section>
<Section Index="READYTORUN_SECTION_EXCEPTION_INFO">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_EXCEPTION_INFO" />
<Contents />
</Section>
<Section Index="READYTORUN_SECTION_DEBUG_INFO">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_DEBUG_INFO" />
<Contents />
</Section>
<Section Index="READYTORUN_SECTION_DELAYLOAD_METHODCALL_THUNKS">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_DELAYLOAD_METHODCALL_THUNKS" />
<Contents />
</Section>
<Section Index="READYTORUN_SECTION_AVAILABLE_TYPES">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_AVAILABLE_TYPES" />
<Contents>
<AvailableType Index="0">MultipleRuntimeFunctions.MultipleRuntimeFunctions</AvailableType>
<AvailableType Index="1">.<Module></AvailableType>
</Contents>
</Section>
<Section Index="READYTORUN_SECTION_INSTANCE_METHOD_ENTRYPOINTS">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_INSTANCE_METHOD_ENTRYPOINTS" />
<Contents />
</Section>
<Section Index="READYTORUN_SECTION_INLINING_INFO">
<R2RSection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="READYTORUN_SECTION_INLINING_INFO" />
<Contents />
</Section>
</Sections>
<Methods Count="3">
<Method Index="0">
<R2RMethod xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="0">
<Name>MethodWithMultipleRuntimeFunctions</Name>
<SignatureString>MultipleRuntimeFunctions.MultipleRuntimeFunctions.MethodWithMultipleRuntimeFunctions()</SignatureString>
<IsGeneric>false</IsGeneric>
<DeclaringType>MultipleRuntimeFunctions.MultipleRuntimeFunctions</DeclaringType>
<Token>100663297</Token>
<Rid>1</Rid>
<EntryPointRuntimeFunctionId>0</EntryPointRuntimeFunctionId>
</R2RMethod>
<GcInfo>
<GcInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>94</Size>
<Offset>3240</Offset>
<CodeLength>61</CodeLength>
<Version>2</Version>
<ReturnKind>RT_Scalar</ReturnKind>
<ValidRangeStart>0</ValidRangeStart>
<ValidRangeEnd>0</ValidRangeEnd>
<SecurityObjectStackSlot>-1</SecurityObjectStackSlot>
<GSCookieStackSlot>-1</GSCookieStackSlot>
<PSPSymStackSlot>0</PSPSymStackSlot>
<GenericsInstContextStackSlot>-1</GenericsInstContextStackSlot>
<StackBaseRegister>5</StackBaseRegister>
<SizeOfEditAndContinuePreservedArea>4294967295</SizeOfEditAndContinuePreservedArea>
<ReversePInvokeFrameStackSlot>-1</ReversePInvokeFrameStackSlot>
<SizeOfStackOutgoingAndScratchArea>0</SizeOfStackOutgoingAndScratchArea>
<NumSafePoints>0</NumSafePoints>
<NumInterruptibleRanges>3</NumInterruptibleRanges>
<SafePointOffsets />
<InterruptibleRanges>
<InterruptibleRange Index="0">
<StartOffset>14</StartOffset>
<StopOffset>28</StopOffset>
</InterruptibleRange>
<InterruptibleRange Index="1">
<StartOffset>29</StartOffset>
<StopOffset>30</StopOffset>
</InterruptibleRange>
<InterruptibleRange Index="2">
<StartOffset>52</StartOffset>
<StopOffset>55</StopOffset>
</InterruptibleRange>
</InterruptibleRanges>
<SlotTable>
<NumRegisters>0</NumRegisters>
<NumStackSlots>0</NumStackSlots>
<NumUntracked>0</NumUntracked>
<NumSlots>0</NumSlots>
<GcSlots />
</SlotTable>
</GcInfo>
</GcInfo>
<RuntimeFunctions>
<RuntimeFunction Index="0">
<MethodRid>1</MethodRid>
<RuntimeFunction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="0">
<Size>36</Size>
<CodeOffset>0</CodeOffset>
</RuntimeFunction>
<UnwindInfo>
<UnwindInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>12</Size>
<Version>1</Version>
<Flags>3</Flags>
<SizeOfProlog>5</SizeOfProlog>
<CountOfUnwindCodes>2</CountOfUnwindCodes>
<FrameRegister>EAX</FrameRegister>
<FrameOffset>0</FrameOffset>
<UnwindCode>
<UnwindCode Index="0">
<CodeOffset>5</CodeOffset>
<UnwindOp>UWOP_ALLOC_SMALL</UnwindOp>
<OpInfo>1</OpInfo>
<OffsetLow>5</OffsetLow>
<OffsetHigh>1</OffsetHigh>
<FrameOffset>4613</FrameOffset>
</UnwindCode>
<UnwindCode Index="1">
<CodeOffset>1</CodeOffset>
<UnwindOp>UWOP_PUSH_NONVOL</UnwindOp>
<OpInfo>5</OpInfo>
<OffsetLow>1</OffsetLow>
<OffsetHigh>5</OffsetHigh>
<FrameOffset>20481</FrameOffset>
</UnwindCode>
</UnwindCode>
<PersonalityRoutineRVA>9339</PersonalityRoutineRVA>
</UnwindInfo>
</UnwindInfo>
</RuntimeFunction>
<RuntimeFunction Index="1">
<MethodRid>1</MethodRid>
<RuntimeFunction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="1">
<Size>25</Size>
<CodeOffset>36</CodeOffset>
</RuntimeFunction>
<UnwindInfo>
<UnwindInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>12</Size>
<Version>1</Version>
<Flags>3</Flags>
<SizeOfProlog>5</SizeOfProlog>
<CountOfUnwindCodes>2</CountOfUnwindCodes>
<FrameRegister>EAX</FrameRegister>
<FrameOffset>0</FrameOffset>
<UnwindCode>
<UnwindCode Index="0">
<CodeOffset>5</CodeOffset>
<UnwindOp>UWOP_ALLOC_SMALL</UnwindOp>
<OpInfo>1</OpInfo>
<OffsetLow>5</OffsetLow>
<OffsetHigh>1</OffsetHigh>
<FrameOffset>4613</FrameOffset>
</UnwindCode>
<UnwindCode Index="1">
<CodeOffset>1</CodeOffset>
<UnwindOp>UWOP_PUSH_NONVOL</UnwindOp>
<OpInfo>5</OpInfo>
<OffsetLow>1</OffsetLow>
<OffsetHigh>5</OffsetHigh>
<FrameOffset>20481</FrameOffset>
</UnwindCode>
</UnwindCode>
<PersonalityRoutineRVA>9339</PersonalityRoutineRVA>
</UnwindInfo>
</UnwindInfo>
</RuntimeFunction>
</RuntimeFunctions>
</Method>
<Method Index="1">
<R2RMethod xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="1">
<Name>Main</Name>
<SignatureString>MultipleRuntimeFunctions.MultipleRuntimeFunctions.Main(String[])</SignatureString>
<IsGeneric>false</IsGeneric>
<DeclaringType>MultipleRuntimeFunctions.MultipleRuntimeFunctions</DeclaringType>
<Token>100663298</Token>
<Rid>2</Rid>
<EntryPointRuntimeFunctionId>2</EntryPointRuntimeFunctionId>
</R2RMethod>
<GcInfo>
<GcInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>73</Size>
<Offset>3276</Offset>
<CodeLength>29</CodeLength>
<Version>2</Version>
<ReturnKind>RT_Scalar</ReturnKind>
<ValidRangeStart>0</ValidRangeStart>
<ValidRangeEnd>0</ValidRangeEnd>
<SecurityObjectStackSlot>-1</SecurityObjectStackSlot>
<GSCookieStackSlot>-1</GSCookieStackSlot>
<PSPSymStackSlot>-1</PSPSymStackSlot>
<GenericsInstContextStackSlot>-1</GenericsInstContextStackSlot>
<StackBaseRegister>5</StackBaseRegister>
<SizeOfEditAndContinuePreservedArea>4294967295</SizeOfEditAndContinuePreservedArea>
<ReversePInvokeFrameStackSlot>-1</ReversePInvokeFrameStackSlot>
<SizeOfStackOutgoingAndScratchArea>32</SizeOfStackOutgoingAndScratchArea>
<NumSafePoints>0</NumSafePoints>
<NumInterruptibleRanges>1</NumInterruptibleRanges>
<SafePointOffsets />
<InterruptibleRanges>
<InterruptibleRange Index="0">
<StartOffset>14</StartOffset>
<StopOffset>23</StopOffset>
</InterruptibleRange>
</InterruptibleRanges>
<SlotTable>
<NumRegisters>0</NumRegisters>
<NumStackSlots>0</NumStackSlots>
<NumUntracked>1</NumUntracked>
<NumSlots>1</NumSlots>
<GcSlots>
<GcSlot Index="0">
<RegisterNumber>-1</RegisterNumber>
<StackSlot>
<SpOffset>16</SpOffset>
<Base>GC_FRAMEREG_REL</Base>
</StackSlot>
<Flags>GC_SLOT_UNTRACKED</Flags>
</GcSlot>
</GcSlots>
</SlotTable>
</GcInfo>
</GcInfo>
<RuntimeFunctions>
<RuntimeFunction Index="2">
<MethodRid>2</MethodRid>
<RuntimeFunction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="2">
<Size>29</Size>
<CodeOffset>0</CodeOffset>
</RuntimeFunction>
<UnwindInfo>
<UnwindInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>12</Size>
<Version>1</Version>
<Flags>3</Flags>
<SizeOfProlog>5</SizeOfProlog>
<CountOfUnwindCodes>2</CountOfUnwindCodes>
<FrameRegister>EAX</FrameRegister>
<FrameOffset>0</FrameOffset>
<UnwindCode>
<UnwindCode Index="0">
<CodeOffset>5</CodeOffset>
<UnwindOp>UWOP_ALLOC_SMALL</UnwindOp>
<OpInfo>3</OpInfo>
<OffsetLow>5</OffsetLow>
<OffsetHigh>3</OffsetHigh>
<FrameOffset>12805</FrameOffset>
</UnwindCode>
<UnwindCode Index="1">
<CodeOffset>1</CodeOffset>
<UnwindOp>UWOP_PUSH_NONVOL</UnwindOp>
<OpInfo>5</OpInfo>
<OffsetLow>1</OffsetLow>
<OffsetHigh>5</OffsetHigh>
<FrameOffset>20481</FrameOffset>
</UnwindCode>
</UnwindCode>
<PersonalityRoutineRVA>9339</PersonalityRoutineRVA>
</UnwindInfo>
</UnwindInfo>
</RuntimeFunction>
</RuntimeFunctions>
</Method>
<Method Index="2">
<R2RMethod xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="2">
<Name>.ctor</Name>
<SignatureString>MultipleRuntimeFunctions.MultipleRuntimeFunctions..ctor()</SignatureString>
<IsGeneric>false</IsGeneric>
<DeclaringType>MultipleRuntimeFunctions.MultipleRuntimeFunctions</DeclaringType>
<Token>100663299</Token>
<Rid>3</Rid>
<EntryPointRuntimeFunctionId>3</EntryPointRuntimeFunctionId>
</R2RMethod>
<GcInfo>
<GcInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>106</Size>
<Offset>3300</Offset>
<CodeLength>32</CodeLength>
<Version>2</Version>
<ReturnKind>RT_Scalar</ReturnKind>
<ValidRangeStart>0</ValidRangeStart>
<ValidRangeEnd>0</ValidRangeEnd>
<SecurityObjectStackSlot>-1</SecurityObjectStackSlot>
<GSCookieStackSlot>-1</GSCookieStackSlot>
<PSPSymStackSlot>-1</PSPSymStackSlot>
<GenericsInstContextStackSlot>-1</GenericsInstContextStackSlot>
<StackBaseRegister>5</StackBaseRegister>
<SizeOfEditAndContinuePreservedArea>4294967295</SizeOfEditAndContinuePreservedArea>
<ReversePInvokeFrameStackSlot>-1</ReversePInvokeFrameStackSlot>
<SizeOfStackOutgoingAndScratchArea>32</SizeOfStackOutgoingAndScratchArea>
<NumSafePoints>0</NumSafePoints>
<NumInterruptibleRanges>1</NumInterruptibleRanges>
<SafePointOffsets />
<InterruptibleRanges>
<InterruptibleRange Index="0">
<StartOffset>14</StartOffset>
<StopOffset>26</StopOffset>
</InterruptibleRange>
</InterruptibleRanges>
<SlotTable>
<NumRegisters>1</NumRegisters>
<NumStackSlots>0</NumStackSlots>
<NumUntracked>1</NumUntracked>
<NumSlots>2</NumSlots>
<GcSlots>
<GcSlot Index="0">
<RegisterNumber>1</RegisterNumber>
<Flags>GC_SLOT_BASE</Flags>
</GcSlot>
<GcSlot Index="1">
<RegisterNumber>-1</RegisterNumber>
<StackSlot>
<SpOffset>16</SpOffset>
<Base>GC_FRAMEREG_REL</Base>
</StackSlot>
<Flags>GC_SLOT_UNTRACKED</Flags>
</GcSlot>
</GcSlots>
</SlotTable>
</GcInfo>
<GcTransition xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="18">
<SlotId>0</SlotId>
<IsLive>true</IsLive>
<ChunkId>0</ChunkId>
<SlotState>ECX is live</SlotState>
</GcTransition>
<GcTransition xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="24">
<SlotId>0</SlotId>
<IsLive>false</IsLive>
<ChunkId>0</ChunkId>
<SlotState>ECX is dead</SlotState>
</GcTransition>
</GcInfo>
<RuntimeFunctions>
<RuntimeFunction Index="3">
<MethodRid>3</MethodRid>
<RuntimeFunction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" Index="3">
<Size>32</Size>
<CodeOffset>0</CodeOffset>
</RuntimeFunction>
<UnwindInfo>
<UnwindInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<Size>12</Size>
<Version>1</Version>
<Flags>3</Flags>
<SizeOfProlog>5</SizeOfProlog>
<CountOfUnwindCodes>2</CountOfUnwindCodes>
<FrameRegister>EAX</FrameRegister>
<FrameOffset>0</FrameOffset>
<UnwindCode>
<UnwindCode Index="0">
<CodeOffset>5</CodeOffset>
<UnwindOp>UWOP_ALLOC_SMALL</UnwindOp>
<OpInfo>3</OpInfo>
<OffsetLow>5</OffsetLow>
<OffsetHigh>3</OffsetHigh>
<FrameOffset>12805</FrameOffset>
</UnwindCode>
<UnwindCode Index="1">
<CodeOffset>1</CodeOffset>
<UnwindOp>UWOP_PUSH_NONVOL</UnwindOp>
<OpInfo>5</OpInfo>
<OffsetLow>1</OffsetLow>
<OffsetHigh>5</OffsetHigh>
<FrameOffset>20481</FrameOffset>
</UnwindCode>
</UnwindCode>
<PersonalityRoutineRVA>9339</PersonalityRoutineRVA>
</UnwindInfo>
</UnwindInfo>
</RuntimeFunction>
</RuntimeFunctions>
</Method>
</Methods>
</R2RDump>
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Private.Xml/tests/XmlReader/ReadContentAs/ReadAsBooleanTests.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace System.Xml.Tests
{
public class BooleanTests
{
[Fact]
public static void ReadContentAsBoolean1()
{
var reader = Utils.CreateFragmentReader("<Root> <?a?><!-- Comment inbetween--><![CDATA[0]]> </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.False(reader.ReadContentAsBoolean());
}
[Fact]
public static void ReadContentAsBoolean10()
{
var reader = Utils.CreateFragmentReader("<Root> t<!-- Comment inbetween-->ru<?a?>e </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.Equal((object)true, reader.ReadContentAs(typeof(bool), null));
}
[Fact]
public static void ReadContentAsBoolean11()
{
var reader = Utils.CreateFragmentReader("<Root> <?a?><!-- Comment inbetween-->0 </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.Equal((object)false, reader.ReadContentAs(typeof(bool), null));
}
[Fact]
public static void ReadContentAsBoolean12()
{
var reader = Utils.CreateFragmentReader("<Root><![CDATA[1]]><?a?></Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.Equal((object)true, reader.ReadContentAs(typeof(bool), null));
}
[Fact]
public static void ReadContentAsBoolean2()
{
var reader = Utils.CreateFragmentReader("<Root> <!-- Comment inbetween--><![CDATA[1]]><?a?> </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.True(reader.ReadContentAsBoolean());
}
[Fact]
public static void ReadContentAsBoolean3()
{
var reader = Utils.CreateFragmentReader("<Root> f<!-- Comment inbetween-->a<?a?>lse<!-- Comment inbetween--> </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.False(reader.ReadContentAsBoolean());
}
[Fact]
public static void ReadContentAsBoolean4()
{
var reader = Utils.CreateFragmentReader("<Root> t<!-- Comment inbetween-->ru<?a?>e </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.True(reader.ReadContentAsBoolean());
}
[Fact]
public static void ReadContentAsBoolean5()
{
var reader = Utils.CreateFragmentReader("<Root> <?a?><!-- Comment inbetween-->0 </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.False(reader.ReadContentAsBoolean());
}
[Fact]
public static void ReadContentAsBoolean6()
{
var reader = Utils.CreateFragmentReader("<Root><![CDATA[1]]><?a?></Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.True(reader.ReadContentAsBoolean());
}
[Fact]
public static void ReadContentAsBoolean7()
{
var reader = Utils.CreateFragmentReader("<Root> <?a?><!-- Comment inbetween--><![CDATA[0]]> </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.Equal((object)false, reader.ReadContentAs(typeof(bool), null));
}
[Fact]
public static void ReadContentAsBoolean8()
{
var reader = Utils.CreateFragmentReader("<Root> <!-- Comment inbetween--><![CDATA[1]]><?a?> </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.Equal((object)true, reader.ReadContentAs(typeof(bool), null));
}
[Fact]
public static void ReadContentAsBoolean9()
{
var reader = Utils.CreateFragmentReader("<Root> f<!-- Comment inbetween-->a<?a?>lse<!-- Comment inbetween--> </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.Equal((object)false, reader.ReadContentAs(typeof(bool), null));
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace System.Xml.Tests
{
public class BooleanTests
{
[Fact]
public static void ReadContentAsBoolean1()
{
var reader = Utils.CreateFragmentReader("<Root> <?a?><!-- Comment inbetween--><![CDATA[0]]> </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.False(reader.ReadContentAsBoolean());
}
[Fact]
public static void ReadContentAsBoolean10()
{
var reader = Utils.CreateFragmentReader("<Root> t<!-- Comment inbetween-->ru<?a?>e </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.Equal((object)true, reader.ReadContentAs(typeof(bool), null));
}
[Fact]
public static void ReadContentAsBoolean11()
{
var reader = Utils.CreateFragmentReader("<Root> <?a?><!-- Comment inbetween-->0 </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.Equal((object)false, reader.ReadContentAs(typeof(bool), null));
}
[Fact]
public static void ReadContentAsBoolean12()
{
var reader = Utils.CreateFragmentReader("<Root><![CDATA[1]]><?a?></Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.Equal((object)true, reader.ReadContentAs(typeof(bool), null));
}
[Fact]
public static void ReadContentAsBoolean2()
{
var reader = Utils.CreateFragmentReader("<Root> <!-- Comment inbetween--><![CDATA[1]]><?a?> </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.True(reader.ReadContentAsBoolean());
}
[Fact]
public static void ReadContentAsBoolean3()
{
var reader = Utils.CreateFragmentReader("<Root> f<!-- Comment inbetween-->a<?a?>lse<!-- Comment inbetween--> </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.False(reader.ReadContentAsBoolean());
}
[Fact]
public static void ReadContentAsBoolean4()
{
var reader = Utils.CreateFragmentReader("<Root> t<!-- Comment inbetween-->ru<?a?>e </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.True(reader.ReadContentAsBoolean());
}
[Fact]
public static void ReadContentAsBoolean5()
{
var reader = Utils.CreateFragmentReader("<Root> <?a?><!-- Comment inbetween-->0 </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.False(reader.ReadContentAsBoolean());
}
[Fact]
public static void ReadContentAsBoolean6()
{
var reader = Utils.CreateFragmentReader("<Root><![CDATA[1]]><?a?></Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.True(reader.ReadContentAsBoolean());
}
[Fact]
public static void ReadContentAsBoolean7()
{
var reader = Utils.CreateFragmentReader("<Root> <?a?><!-- Comment inbetween--><![CDATA[0]]> </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.Equal((object)false, reader.ReadContentAs(typeof(bool), null));
}
[Fact]
public static void ReadContentAsBoolean8()
{
var reader = Utils.CreateFragmentReader("<Root> <!-- Comment inbetween--><![CDATA[1]]><?a?> </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.Equal((object)true, reader.ReadContentAs(typeof(bool), null));
}
[Fact]
public static void ReadContentAsBoolean9()
{
var reader = Utils.CreateFragmentReader("<Root> f<!-- Comment inbetween-->a<?a?>lse<!-- Comment inbetween--> </Root>");
reader.PositionOnElement("Root");
reader.Read();
Assert.Equal((object)false, reader.ReadContentAs(typeof(bool), null));
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Runtime.InteropServices/tests/System.Runtime.InteropServices.UnitTests/System/Runtime/InteropServices/Marshal/ReAllocCoTaskMemTests.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace System.Runtime.InteropServices.Tests
{
public class ReAllocCoTaskMemTests
{
[Fact]
public void ReAllocCoTaskMem_Invoke_DataCopied()
{
const int Size = 3;
IntPtr p1 = Marshal.AllocCoTaskMem(Size);
IntPtr p2 = p1;
try
{
for (int i = 0; i < Size; i++)
{
Marshal.WriteByte(p1 + i, (byte)i);
}
int add = 1;
do
{
p2 = Marshal.ReAllocCoTaskMem(p2, Size + add);
for (int i = 0; i < Size; i++)
{
Assert.Equal((byte)i, Marshal.ReadByte(p2 + i));
}
add++;
}
while (p2 == p1); // stop once we've validated moved case
}
finally
{
Marshal.FreeCoTaskMem(p2);
}
}
[InlineData(0)]
[InlineData(1)]
[InlineData(100)]
[Theory]
public void ReAllocCoTaskMem_PositiveSize(int size)
{
IntPtr p = Marshal.ReAllocCoTaskMem(IntPtr.Zero, size);
Assert.NotEqual(IntPtr.Zero, p);
IntPtr p1 = Marshal.ReAllocCoTaskMem(p, size + 1);
Assert.NotEqual(IntPtr.Zero, p1);
IntPtr p2 = Marshal.ReAllocCoTaskMem(p1, 0);
Assert.Equal(IntPtr.Zero, p2);
}
[Fact]
[OuterLoop]
public void ReAllocCoTaskMem_NegativeSize_ThrowsOutOfMemoryException()
{
// -1 is treated as (uint)-1 by ReAllocCoTaskMem. The allocation may succeed on 64-bit machines.
try
{
IntPtr p1 = Marshal.ReAllocCoTaskMem(IntPtr.Zero, -1);
Assert.NotEqual(IntPtr.Zero, p1);
Marshal.FreeCoTaskMem(p1);
}
catch (OutOfMemoryException)
{
}
IntPtr p2 = Marshal.AllocCoTaskMem(1);
try
{
p2 = Marshal.ReAllocCoTaskMem(p2, -1);
Assert.NotEqual(IntPtr.Zero, p2);
}
catch (OutOfMemoryException)
{
}
Marshal.FreeCoTaskMem(p2);
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace System.Runtime.InteropServices.Tests
{
public class ReAllocCoTaskMemTests
{
[Fact]
public void ReAllocCoTaskMem_Invoke_DataCopied()
{
const int Size = 3;
IntPtr p1 = Marshal.AllocCoTaskMem(Size);
IntPtr p2 = p1;
try
{
for (int i = 0; i < Size; i++)
{
Marshal.WriteByte(p1 + i, (byte)i);
}
int add = 1;
do
{
p2 = Marshal.ReAllocCoTaskMem(p2, Size + add);
for (int i = 0; i < Size; i++)
{
Assert.Equal((byte)i, Marshal.ReadByte(p2 + i));
}
add++;
}
while (p2 == p1); // stop once we've validated moved case
}
finally
{
Marshal.FreeCoTaskMem(p2);
}
}
[InlineData(0)]
[InlineData(1)]
[InlineData(100)]
[Theory]
public void ReAllocCoTaskMem_PositiveSize(int size)
{
IntPtr p = Marshal.ReAllocCoTaskMem(IntPtr.Zero, size);
Assert.NotEqual(IntPtr.Zero, p);
IntPtr p1 = Marshal.ReAllocCoTaskMem(p, size + 1);
Assert.NotEqual(IntPtr.Zero, p1);
IntPtr p2 = Marshal.ReAllocCoTaskMem(p1, 0);
Assert.Equal(IntPtr.Zero, p2);
}
[Fact]
[OuterLoop]
public void ReAllocCoTaskMem_NegativeSize_ThrowsOutOfMemoryException()
{
// -1 is treated as (uint)-1 by ReAllocCoTaskMem. The allocation may succeed on 64-bit machines.
try
{
IntPtr p1 = Marshal.ReAllocCoTaskMem(IntPtr.Zero, -1);
Assert.NotEqual(IntPtr.Zero, p1);
Marshal.FreeCoTaskMem(p1);
}
catch (OutOfMemoryException)
{
}
IntPtr p2 = Marshal.AllocCoTaskMem(1);
try
{
p2 = Marshal.ReAllocCoTaskMem(p2, -1);
Assert.NotEqual(IntPtr.Zero, p2);
}
catch (OutOfMemoryException)
{
}
Marshal.FreeCoTaskMem(p2);
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/Arm/AdvSimd/Or.Vector64.Byte.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void Or_Vector64_Byte()
{
var test = new SimpleBinaryOpTest__Or_Vector64_Byte();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__Or_Vector64_Byte
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Byte[] inArray1, Byte[] inArray2, Byte[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Byte>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Byte>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Byte>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Byte, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Byte, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector64<Byte> _fld1;
public Vector64<Byte> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref testStruct._fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref testStruct._fld2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__Or_Vector64_Byte testClass)
{
var result = AdvSimd.Or(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__Or_Vector64_Byte testClass)
{
fixed (Vector64<Byte>* pFld1 = &_fld1)
fixed (Vector64<Byte>* pFld2 = &_fld2)
{
var result = AdvSimd.Or(
AdvSimd.LoadVector64((Byte*)(pFld1)),
AdvSimd.LoadVector64((Byte*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 8;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte);
private static Byte[] _data1 = new Byte[Op1ElementCount];
private static Byte[] _data2 = new Byte[Op2ElementCount];
private static Vector64<Byte> _clsVar1;
private static Vector64<Byte> _clsVar2;
private Vector64<Byte> _fld1;
private Vector64<Byte> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__Or_Vector64_Byte()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _clsVar1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _clsVar2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
}
public SimpleBinaryOpTest__Or_Vector64_Byte()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _fld2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
_dataTable = new DataTable(_data1, _data2, new Byte[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.Or(
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.Or(
AdvSimd.LoadVector64((Byte*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector64((Byte*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.Or), new Type[] { typeof(Vector64<Byte>), typeof(Vector64<Byte>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Byte>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.Or), new Type[] { typeof(Vector64<Byte>), typeof(Vector64<Byte>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector64((Byte*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector64((Byte*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Byte>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.Or(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector64<Byte>* pClsVar1 = &_clsVar1)
fixed (Vector64<Byte>* pClsVar2 = &_clsVar2)
{
var result = AdvSimd.Or(
AdvSimd.LoadVector64((Byte*)(pClsVar1)),
AdvSimd.LoadVector64((Byte*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr);
var result = AdvSimd.Or(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector64((Byte*)(_dataTable.inArray1Ptr));
var op2 = AdvSimd.LoadVector64((Byte*)(_dataTable.inArray2Ptr));
var result = AdvSimd.Or(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__Or_Vector64_Byte();
var result = AdvSimd.Or(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__Or_Vector64_Byte();
fixed (Vector64<Byte>* pFld1 = &test._fld1)
fixed (Vector64<Byte>* pFld2 = &test._fld2)
{
var result = AdvSimd.Or(
AdvSimd.LoadVector64((Byte*)(pFld1)),
AdvSimd.LoadVector64((Byte*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.Or(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector64<Byte>* pFld1 = &_fld1)
fixed (Vector64<Byte>* pFld2 = &_fld2)
{
var result = AdvSimd.Or(
AdvSimd.LoadVector64((Byte*)(pFld1)),
AdvSimd.LoadVector64((Byte*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.Or(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.Or(
AdvSimd.LoadVector64((Byte*)(&test._fld1)),
AdvSimd.LoadVector64((Byte*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector64<Byte> op1, Vector64<Byte> op2, void* result, [CallerMemberName] string method = "")
{
Byte[] inArray1 = new Byte[Op1ElementCount];
Byte[] inArray2 = new Byte[Op2ElementCount];
Byte[] outArray = new Byte[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Byte, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Byte>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Byte[] inArray1 = new Byte[Op1ElementCount];
Byte[] inArray2 = new Byte[Op2ElementCount];
Byte[] outArray = new Byte[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector64<Byte>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector64<Byte>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Byte>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Byte[] left, Byte[] right, Byte[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (Helpers.Or(left[i], right[i]) != result[i])
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd)}.{nameof(AdvSimd.Or)}<Byte>(Vector64<Byte>, Vector64<Byte>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void Or_Vector64_Byte()
{
var test = new SimpleBinaryOpTest__Or_Vector64_Byte();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__Or_Vector64_Byte
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Byte[] inArray1, Byte[] inArray2, Byte[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Byte>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Byte>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Byte>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Byte, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Byte, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector64<Byte> _fld1;
public Vector64<Byte> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref testStruct._fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref testStruct._fld2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__Or_Vector64_Byte testClass)
{
var result = AdvSimd.Or(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__Or_Vector64_Byte testClass)
{
fixed (Vector64<Byte>* pFld1 = &_fld1)
fixed (Vector64<Byte>* pFld2 = &_fld2)
{
var result = AdvSimd.Or(
AdvSimd.LoadVector64((Byte*)(pFld1)),
AdvSimd.LoadVector64((Byte*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 8;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte);
private static Byte[] _data1 = new Byte[Op1ElementCount];
private static Byte[] _data2 = new Byte[Op2ElementCount];
private static Vector64<Byte> _clsVar1;
private static Vector64<Byte> _clsVar2;
private Vector64<Byte> _fld1;
private Vector64<Byte> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__Or_Vector64_Byte()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _clsVar1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _clsVar2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
}
public SimpleBinaryOpTest__Or_Vector64_Byte()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _fld2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
_dataTable = new DataTable(_data1, _data2, new Byte[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.Or(
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.Or(
AdvSimd.LoadVector64((Byte*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector64((Byte*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.Or), new Type[] { typeof(Vector64<Byte>), typeof(Vector64<Byte>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Byte>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.Or), new Type[] { typeof(Vector64<Byte>), typeof(Vector64<Byte>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector64((Byte*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector64((Byte*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Byte>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.Or(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector64<Byte>* pClsVar1 = &_clsVar1)
fixed (Vector64<Byte>* pClsVar2 = &_clsVar2)
{
var result = AdvSimd.Or(
AdvSimd.LoadVector64((Byte*)(pClsVar1)),
AdvSimd.LoadVector64((Byte*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr);
var result = AdvSimd.Or(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector64((Byte*)(_dataTable.inArray1Ptr));
var op2 = AdvSimd.LoadVector64((Byte*)(_dataTable.inArray2Ptr));
var result = AdvSimd.Or(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__Or_Vector64_Byte();
var result = AdvSimd.Or(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__Or_Vector64_Byte();
fixed (Vector64<Byte>* pFld1 = &test._fld1)
fixed (Vector64<Byte>* pFld2 = &test._fld2)
{
var result = AdvSimd.Or(
AdvSimd.LoadVector64((Byte*)(pFld1)),
AdvSimd.LoadVector64((Byte*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.Or(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector64<Byte>* pFld1 = &_fld1)
fixed (Vector64<Byte>* pFld2 = &_fld2)
{
var result = AdvSimd.Or(
AdvSimd.LoadVector64((Byte*)(pFld1)),
AdvSimd.LoadVector64((Byte*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.Or(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.Or(
AdvSimd.LoadVector64((Byte*)(&test._fld1)),
AdvSimd.LoadVector64((Byte*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector64<Byte> op1, Vector64<Byte> op2, void* result, [CallerMemberName] string method = "")
{
Byte[] inArray1 = new Byte[Op1ElementCount];
Byte[] inArray2 = new Byte[Op2ElementCount];
Byte[] outArray = new Byte[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Byte, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Byte>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Byte[] inArray1 = new Byte[Op1ElementCount];
Byte[] inArray2 = new Byte[Op2ElementCount];
Byte[] outArray = new Byte[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector64<Byte>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector64<Byte>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Byte>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Byte[] left, Byte[] right, Byte[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (Helpers.Or(left[i], right[i]) != result[i])
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd)}.{nameof(AdvSimd.Or)}<Byte>(Vector64<Byte>, Vector64<Byte>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/IL_Conformance/Old/Conformance_Base/shr_u4.ilproj
|
<Project Sdk="Microsoft.NET.Sdk.IL">
<PropertyGroup>
<OutputType>Exe</OutputType>
<RestorePackages>true</RestorePackages>
<CLRTestPriority>1</CLRTestPriority>
</PropertyGroup>
<PropertyGroup>
<DebugType>PdbOnly</DebugType>
<Optimize>True</Optimize>
</PropertyGroup>
<ItemGroup>
<Compile Include="shr_u4.il" />
</ItemGroup>
</Project>
|
<Project Sdk="Microsoft.NET.Sdk.IL">
<PropertyGroup>
<OutputType>Exe</OutputType>
<RestorePackages>true</RestorePackages>
<CLRTestPriority>1</CLRTestPriority>
</PropertyGroup>
<PropertyGroup>
<DebugType>PdbOnly</DebugType>
<Optimize>True</Optimize>
</PropertyGroup>
<ItemGroup>
<Compile Include="shr_u4.il" />
</ItemGroup>
</Project>
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/General/Vector64/EqualsAll.Byte.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace JIT.HardwareIntrinsics.General
{
public static partial class Program
{
private static void EqualsAllByte()
{
var test = new VectorBooleanBinaryOpTest__EqualsAllByte();
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class VectorBooleanBinaryOpTest__EqualsAllByte
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private GCHandle inHandle1;
private GCHandle inHandle2;
private ulong alignment;
public DataTable(Byte[] inArray1, Byte[] inArray2, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Byte>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Byte>();
if ((alignment != 32 && alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Byte, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Byte, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector64<Byte> _fld1;
public Vector64<Byte> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref testStruct._fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref testStruct._fld2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
return testStruct;
}
public void RunStructFldScenario(VectorBooleanBinaryOpTest__EqualsAllByte testClass)
{
var result = Vector64.EqualsAll(_fld1, _fld2);
testClass.ValidateResult(_fld1, _fld2, result);
}
}
private static readonly int LargestVectorSize = 8;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte);
private static Byte[] _data1 = new Byte[Op1ElementCount];
private static Byte[] _data2 = new Byte[Op2ElementCount];
private static Vector64<Byte> _clsVar1;
private static Vector64<Byte> _clsVar2;
private Vector64<Byte> _fld1;
private Vector64<Byte> _fld2;
private DataTable _dataTable;
static VectorBooleanBinaryOpTest__EqualsAllByte()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _clsVar1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _clsVar2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
}
public VectorBooleanBinaryOpTest__EqualsAllByte()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _fld2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
_dataTable = new DataTable(_data1, _data2, LargestVectorSize);
}
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Vector64.EqualsAll(
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr)
);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var method = typeof(Vector64).GetMethod(nameof(Vector64.EqualsAll), new Type[] {
typeof(Vector64<Byte>),
typeof(Vector64<Byte>)
});
if (method is null)
{
method = typeof(Vector64).GetMethod(nameof(Vector64.EqualsAll), 1, new Type[] {
typeof(Vector64<>).MakeGenericType(Type.MakeGenericMethodParameter(0)),
typeof(Vector64<>).MakeGenericType(Type.MakeGenericMethodParameter(0))
});
}
if (method.IsGenericMethodDefinition)
{
method = method.MakeGenericMethod(typeof(Byte));
}
var result = method.Invoke(null, new object[] {
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr)
});
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result));
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Vector64.EqualsAll(
_clsVar1,
_clsVar2
);
ValidateResult(_clsVar1, _clsVar2, result);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr);
var result = Vector64.EqualsAll(op1, op2);
ValidateResult(op1, op2, result);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new VectorBooleanBinaryOpTest__EqualsAllByte();
var result = Vector64.EqualsAll(test._fld1, test._fld2);
ValidateResult(test._fld1, test._fld2, result);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Vector64.EqualsAll(_fld1, _fld2);
ValidateResult(_fld1, _fld2, result);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Vector64.EqualsAll(test._fld1, test._fld2);
ValidateResult(test._fld1, test._fld2, result);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
private void ValidateResult(Vector64<Byte> op1, Vector64<Byte> op2, bool result, [CallerMemberName] string method = "")
{
Byte[] inArray1 = new Byte[Op1ElementCount];
Byte[] inArray2 = new Byte[Op2ElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Byte, byte>(ref inArray2[0]), op2);
ValidateResult(inArray1, inArray2, result, method);
}
private void ValidateResult(void* op1, void* op2, bool result, [CallerMemberName] string method = "")
{
Byte[] inArray1 = new Byte[Op1ElementCount];
Byte[] inArray2 = new Byte[Op2ElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector64<Byte>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector64<Byte>>());
ValidateResult(inArray1, inArray2, result, method);
}
private void ValidateResult(Byte[] left, Byte[] right, bool result, [CallerMemberName] string method = "")
{
bool succeeded = true;
var expectedResult = true;
for (var i = 0; i < Op1ElementCount; i++)
{
expectedResult &= (left[i] == right[i]);
}
succeeded = (expectedResult == result);
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Vector64)}.{nameof(Vector64.EqualsAll)}<Byte>(Vector64<Byte>, Vector64<Byte>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({result})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
namespace JIT.HardwareIntrinsics.General
{
public static partial class Program
{
private static void EqualsAllByte()
{
var test = new VectorBooleanBinaryOpTest__EqualsAllByte();
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class VectorBooleanBinaryOpTest__EqualsAllByte
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private GCHandle inHandle1;
private GCHandle inHandle2;
private ulong alignment;
public DataTable(Byte[] inArray1, Byte[] inArray2, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Byte>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Byte>();
if ((alignment != 32 && alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Byte, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Byte, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector64<Byte> _fld1;
public Vector64<Byte> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref testStruct._fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref testStruct._fld2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
return testStruct;
}
public void RunStructFldScenario(VectorBooleanBinaryOpTest__EqualsAllByte testClass)
{
var result = Vector64.EqualsAll(_fld1, _fld2);
testClass.ValidateResult(_fld1, _fld2, result);
}
}
private static readonly int LargestVectorSize = 8;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte);
private static Byte[] _data1 = new Byte[Op1ElementCount];
private static Byte[] _data2 = new Byte[Op2ElementCount];
private static Vector64<Byte> _clsVar1;
private static Vector64<Byte> _clsVar2;
private Vector64<Byte> _fld1;
private Vector64<Byte> _fld2;
private DataTable _dataTable;
static VectorBooleanBinaryOpTest__EqualsAllByte()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _clsVar1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _clsVar2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
}
public VectorBooleanBinaryOpTest__EqualsAllByte()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _fld2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); }
_dataTable = new DataTable(_data1, _data2, LargestVectorSize);
}
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Vector64.EqualsAll(
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr)
);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var method = typeof(Vector64).GetMethod(nameof(Vector64.EqualsAll), new Type[] {
typeof(Vector64<Byte>),
typeof(Vector64<Byte>)
});
if (method is null)
{
method = typeof(Vector64).GetMethod(nameof(Vector64.EqualsAll), 1, new Type[] {
typeof(Vector64<>).MakeGenericType(Type.MakeGenericMethodParameter(0)),
typeof(Vector64<>).MakeGenericType(Type.MakeGenericMethodParameter(0))
});
}
if (method.IsGenericMethodDefinition)
{
method = method.MakeGenericMethod(typeof(Byte));
}
var result = method.Invoke(null, new object[] {
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr)
});
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result));
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Vector64.EqualsAll(
_clsVar1,
_clsVar2
);
ValidateResult(_clsVar1, _clsVar2, result);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr);
var result = Vector64.EqualsAll(op1, op2);
ValidateResult(op1, op2, result);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new VectorBooleanBinaryOpTest__EqualsAllByte();
var result = Vector64.EqualsAll(test._fld1, test._fld2);
ValidateResult(test._fld1, test._fld2, result);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Vector64.EqualsAll(_fld1, _fld2);
ValidateResult(_fld1, _fld2, result);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Vector64.EqualsAll(test._fld1, test._fld2);
ValidateResult(test._fld1, test._fld2, result);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
private void ValidateResult(Vector64<Byte> op1, Vector64<Byte> op2, bool result, [CallerMemberName] string method = "")
{
Byte[] inArray1 = new Byte[Op1ElementCount];
Byte[] inArray2 = new Byte[Op2ElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Byte, byte>(ref inArray2[0]), op2);
ValidateResult(inArray1, inArray2, result, method);
}
private void ValidateResult(void* op1, void* op2, bool result, [CallerMemberName] string method = "")
{
Byte[] inArray1 = new Byte[Op1ElementCount];
Byte[] inArray2 = new Byte[Op2ElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector64<Byte>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector64<Byte>>());
ValidateResult(inArray1, inArray2, result, method);
}
private void ValidateResult(Byte[] left, Byte[] right, bool result, [CallerMemberName] string method = "")
{
bool succeeded = true;
var expectedResult = true;
for (var i = 0; i < Op1ElementCount; i++)
{
expectedResult &= (left[i] == right[i]);
}
succeeded = (expectedResult == result);
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Vector64)}.{nameof(Vector64.EqualsAll)}<Byte>(Vector64<Byte>, Vector64<Byte>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({result})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/Regression/CLR-x86-JIT/V1-M12-Beta2/b37238/b37238.il
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
.assembly extern legacy library mscorlib {}
.assembly extern System.Console
{
.publickeytoken = (B0 3F 5F 7F 11 D5 0A 3A )
.ver 4:0:0:0
}
.assembly 'b37238' {}
.assembly extern xunit.core {}
.class ILGEN_0x7fee1ca2 {
.method static float32 Method_0x54eb(int64 Arg_0x0, int16 Arg_0x1, unsigned int32 Arg_0x2, unsigned int16 Arg_0x3, int8 Arg_0x4, int16 Arg_0x5, int8 Arg_0x6) {
.maxstack 15
.locals (int64[] local_0x0,int32 local_0x1,unsigned int64[] local_0x2,unsigned int8 local_0x3,unsigned int64[] local_0x4,int64 local_0x5,unsigned int8[] local_0x6,unsigned int32 local_0x7,int32[] local_0x8)
ldc.i4 255
newarr [mscorlib]System.Int64
stloc local_0x0
ldc.i4 0xb44784d
stloc local_0x1
ldc.i8 0x54ac6d6f10d26fa
stloc local_0x5
ldc.i4 0x3e7b537
stloc local_0x7
Start_Orphan_5:
ldarg Arg_0x0
conv.i4
stloc local_0x7
End_Orphan_5:
ldloc local_0x1
conv.ovf.i1.un
ldarg Arg_0x0
ldloc local_0x5
clt.un
cgt
ldloc local_0x0
ldloc local_0x7
ldelem.i8
conv.i1
mul
conv.r8
ret
}
.method static int32 Main() {
.custom instance void [xunit.core]Xunit.FactAttribute::.ctor() = (
01 00 00 00
)
.entrypoint
.maxstack 20
.try {
ldc.i8 0x20c16ffc86d5015
ldc.i4 0x71924c8b
ldc.i4 0x2e2c213c
ldc.i4 0x234ab01
ldc.i4 0x1b3d6105
ldc.i4 0x28233ccd
ldc.i4 0x2d0873ff
call float32 ILGEN_0x7fee1ca2::Method_0x54eb(int64 Arg_0x0, int16 Arg_0x1, unsigned int32 Arg_0x2, unsigned int16 Arg_0x3, int8 Arg_0x4, int16 Arg_0x5, int8 Arg_0x6)
conv.i4
pop
leave sss
} catch [mscorlib]System.OverflowException {
pop
leave sss
}
sss:
ldc.i4 100
ret
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
.assembly extern legacy library mscorlib {}
.assembly extern System.Console
{
.publickeytoken = (B0 3F 5F 7F 11 D5 0A 3A )
.ver 4:0:0:0
}
.assembly 'b37238' {}
.assembly extern xunit.core {}
.class ILGEN_0x7fee1ca2 {
.method static float32 Method_0x54eb(int64 Arg_0x0, int16 Arg_0x1, unsigned int32 Arg_0x2, unsigned int16 Arg_0x3, int8 Arg_0x4, int16 Arg_0x5, int8 Arg_0x6) {
.maxstack 15
.locals (int64[] local_0x0,int32 local_0x1,unsigned int64[] local_0x2,unsigned int8 local_0x3,unsigned int64[] local_0x4,int64 local_0x5,unsigned int8[] local_0x6,unsigned int32 local_0x7,int32[] local_0x8)
ldc.i4 255
newarr [mscorlib]System.Int64
stloc local_0x0
ldc.i4 0xb44784d
stloc local_0x1
ldc.i8 0x54ac6d6f10d26fa
stloc local_0x5
ldc.i4 0x3e7b537
stloc local_0x7
Start_Orphan_5:
ldarg Arg_0x0
conv.i4
stloc local_0x7
End_Orphan_5:
ldloc local_0x1
conv.ovf.i1.un
ldarg Arg_0x0
ldloc local_0x5
clt.un
cgt
ldloc local_0x0
ldloc local_0x7
ldelem.i8
conv.i1
mul
conv.r8
ret
}
.method static int32 Main() {
.custom instance void [xunit.core]Xunit.FactAttribute::.ctor() = (
01 00 00 00
)
.entrypoint
.maxstack 20
.try {
ldc.i8 0x20c16ffc86d5015
ldc.i4 0x71924c8b
ldc.i4 0x2e2c213c
ldc.i4 0x234ab01
ldc.i4 0x1b3d6105
ldc.i4 0x28233ccd
ldc.i4 0x2d0873ff
call float32 ILGEN_0x7fee1ca2::Method_0x54eb(int64 Arg_0x0, int16 Arg_0x1, unsigned int32 Arg_0x2, unsigned int16 Arg_0x3, int8 Arg_0x4, int16 Arg_0x5, int8 Arg_0x6)
conv.i4
pop
leave sss
} catch [mscorlib]System.OverflowException {
pop
leave sss
}
sss:
ldc.i4 100
ret
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/libraries/System.Private.Xml/src/System/Xml/XPath/Internal/StringFunctions.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Text;
using System.Xml;
using System.Xml.XPath;
using System.Xml.Xsl;
namespace MS.Internal.Xml.XPath
{
internal sealed class StringFunctions : ValueQuery
{
private readonly Function.FunctionType _funcType;
private readonly IList<Query> _argList;
public StringFunctions(Function.FunctionType funcType, IList<Query> argList)
{
Debug.Assert(argList != null, "Use 'new Query[]{}' instead.");
_funcType = funcType;
_argList = argList;
}
private StringFunctions(StringFunctions other) : base(other)
{
_funcType = other._funcType;
Query[] tmp = new Query[other._argList.Count];
{
for (int i = 0; i < tmp.Length; i++)
{
tmp[i] = Clone(other._argList[i]);
}
}
_argList = tmp;
}
public override void SetXsltContext(XsltContext context)
{
for (int i = 0; i < _argList.Count; i++)
{
_argList[i].SetXsltContext(context);
}
}
public override object Evaluate(XPathNodeIterator nodeIterator) =>
_funcType switch
{
Function.FunctionType.FuncString => toString(nodeIterator),
Function.FunctionType.FuncConcat => Concat(nodeIterator),
Function.FunctionType.FuncStartsWith => StartsWith(nodeIterator),
Function.FunctionType.FuncContains => Contains(nodeIterator),
Function.FunctionType.FuncSubstringBefore => SubstringBefore(nodeIterator),
Function.FunctionType.FuncSubstringAfter => SubstringAfter(nodeIterator),
Function.FunctionType.FuncSubstring => Substring(nodeIterator),
Function.FunctionType.FuncStringLength => StringLength(nodeIterator),
Function.FunctionType.FuncNormalize => Normalize(nodeIterator),
Function.FunctionType.FuncTranslate => Translate(nodeIterator),
_ => string.Empty,
};
internal static string toString(double num)
{
return num.ToString("R", NumberFormatInfo.InvariantInfo);
}
internal static string toString(bool b)
{
return b ? "true" : "false";
}
private string toString(XPathNodeIterator nodeIterator)
{
if (_argList.Count > 0)
{
object argVal = _argList[0].Evaluate(nodeIterator);
switch (GetXPathType(argVal))
{
case XPathResultType.NodeSet:
XPathNavigator? value = _argList[0].Advance();
return value != null ? value.Value : string.Empty;
case XPathResultType.String:
return (string)argVal;
case XPathResultType.Boolean:
return ((bool)argVal) ? "true" : "false";
case XPathResultType_Navigator:
return ((XPathNavigator)argVal).Value;
default:
Debug.Assert(GetXPathType(argVal) == XPathResultType.Number);
return toString((double)argVal);
}
}
Debug.Assert(nodeIterator.Current != null);
return nodeIterator.Current.Value;
}
public override XPathResultType StaticType
{
get
{
if (_funcType == Function.FunctionType.FuncStringLength)
{
return XPathResultType.Number;
}
if (
_funcType == Function.FunctionType.FuncStartsWith ||
_funcType == Function.FunctionType.FuncContains
)
{
return XPathResultType.Boolean;
}
return XPathResultType.String;
}
}
private string Concat(XPathNodeIterator nodeIterator)
{
int count = 0;
StringBuilder s = new StringBuilder();
while (count < _argList.Count)
{
s.Append(_argList[count++].Evaluate(nodeIterator).ToString());
}
return s.ToString();
}
private bool StartsWith(XPathNodeIterator nodeIterator)
{
Debug.Assert(_argList.Count > 1);
string s1 = _argList[0].Evaluate(nodeIterator).ToString()!;
string s2 = _argList[1].Evaluate(nodeIterator).ToString()!;
return s1.Length >= s2.Length && string.CompareOrdinal(s1, 0, s2, 0, s2.Length) == 0;
}
private static readonly CompareInfo s_compareInfo = CultureInfo.InvariantCulture.CompareInfo;
private bool Contains(XPathNodeIterator nodeIterator)
{
Debug.Assert(_argList.Count > 1);
string s1 = _argList[0].Evaluate(nodeIterator).ToString()!;
string s2 = _argList[1].Evaluate(nodeIterator).ToString()!;
return s_compareInfo.IndexOf(s1, s2, CompareOptions.Ordinal) >= 0;
}
private string SubstringBefore(XPathNodeIterator nodeIterator)
{
Debug.Assert(_argList.Count > 1);
string s1 = _argList[0].Evaluate(nodeIterator).ToString()!;
string s2 = _argList[1].Evaluate(nodeIterator).ToString()!;
if (s2.Length == 0) { return s2; }
int idx = s_compareInfo.IndexOf(s1, s2, CompareOptions.Ordinal);
return (idx < 1) ? string.Empty : s1.Substring(0, idx);
}
private string SubstringAfter(XPathNodeIterator nodeIterator)
{
Debug.Assert(_argList.Count > 1);
string s1 = _argList[0].Evaluate(nodeIterator).ToString()!;
string s2 = _argList[1].Evaluate(nodeIterator).ToString()!;
if (s2.Length == 0) { return s1; }
int idx = s_compareInfo.IndexOf(s1, s2, CompareOptions.Ordinal);
return (idx < 0) ? string.Empty : s1.Substring(idx + s2.Length);
}
private string Substring(XPathNodeIterator nodeIterator)
{
Debug.Assert(_argList.Count > 0);
string str1 = _argList[0].Evaluate(nodeIterator).ToString()!;
double num = XmlConvert.XPathRound(XmlConvert.ToXPathDouble(_argList[1].Evaluate(nodeIterator))) - 1;
if (double.IsNaN(num) || str1.Length <= num)
{
return string.Empty;
}
if (_argList.Count == 3)
{
double num1 = XmlConvert.XPathRound(XmlConvert.ToXPathDouble(_argList[2].Evaluate(nodeIterator)));
if (double.IsNaN(num1))
{
return string.Empty;
}
if (num < 0 || num1 < 0)
{
num1 = num + num1;
// NOTE: condition is true for NaN
if (!(num1 > 0))
{
return string.Empty;
}
num = 0;
}
double maxlength = str1.Length - num;
if (num1 > maxlength)
{
num1 = maxlength;
}
return str1.Substring((int)num, (int)num1);
}
if (num < 0)
{
num = 0;
}
return str1.Substring((int)num);
}
private double StringLength(XPathNodeIterator nodeIterator)
{
if (_argList.Count > 0)
{
return _argList[0].Evaluate(nodeIterator).ToString()!.Length;
}
Debug.Assert(nodeIterator!.Current != null);
return nodeIterator.Current.Value.Length;
}
private string Normalize(XPathNodeIterator nodeIterator)
{
string value;
if (_argList.Count > 0)
{
value = _argList[0].Evaluate(nodeIterator).ToString()!;
}
else
{
Debug.Assert(nodeIterator!.Current != null);
value = nodeIterator.Current.Value;
}
int modifyPos = -1;
char[] chars = value.ToCharArray();
bool firstSpace = false; // Start false to trim the beginning
for (int comparePos = 0; comparePos < chars.Length; comparePos++)
{
if (!XmlCharType.IsWhiteSpace(chars[comparePos]))
{
firstSpace = true;
modifyPos++;
chars[modifyPos] = chars[comparePos];
}
else if (firstSpace)
{
firstSpace = false;
modifyPos++;
chars[modifyPos] = ' ';
}
}
// Trim end
if (modifyPos > -1 && chars[modifyPos] == ' ')
modifyPos--;
return new string(chars, 0, modifyPos + 1);
}
private string Translate(XPathNodeIterator nodeIterator)
{
Debug.Assert(_argList.Count > 2);
string value = _argList[0].Evaluate(nodeIterator).ToString()!;
string mapFrom = _argList[1].Evaluate(nodeIterator).ToString()!;
string mapTo = _argList[2].Evaluate(nodeIterator).ToString()!;
int modifyPos = -1;
char[] chars = value.ToCharArray();
for (int comparePos = 0; comparePos < chars.Length; comparePos++)
{
int index = mapFrom.IndexOf(chars[comparePos]);
if (index != -1)
{
if (index < mapTo.Length)
{
modifyPos++;
chars[modifyPos] = mapTo[index];
}
}
else
{
modifyPos++;
chars[modifyPos] = chars[comparePos];
}
}
return new string(chars, 0, modifyPos + 1);
}
public override XPathNodeIterator Clone() { return new StringFunctions(this); }
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Text;
using System.Xml;
using System.Xml.XPath;
using System.Xml.Xsl;
namespace MS.Internal.Xml.XPath
{
internal sealed class StringFunctions : ValueQuery
{
private readonly Function.FunctionType _funcType;
private readonly IList<Query> _argList;
public StringFunctions(Function.FunctionType funcType, IList<Query> argList)
{
Debug.Assert(argList != null, "Use 'new Query[]{}' instead.");
_funcType = funcType;
_argList = argList;
}
private StringFunctions(StringFunctions other) : base(other)
{
_funcType = other._funcType;
Query[] tmp = new Query[other._argList.Count];
{
for (int i = 0; i < tmp.Length; i++)
{
tmp[i] = Clone(other._argList[i]);
}
}
_argList = tmp;
}
public override void SetXsltContext(XsltContext context)
{
for (int i = 0; i < _argList.Count; i++)
{
_argList[i].SetXsltContext(context);
}
}
public override object Evaluate(XPathNodeIterator nodeIterator) =>
_funcType switch
{
Function.FunctionType.FuncString => toString(nodeIterator),
Function.FunctionType.FuncConcat => Concat(nodeIterator),
Function.FunctionType.FuncStartsWith => StartsWith(nodeIterator),
Function.FunctionType.FuncContains => Contains(nodeIterator),
Function.FunctionType.FuncSubstringBefore => SubstringBefore(nodeIterator),
Function.FunctionType.FuncSubstringAfter => SubstringAfter(nodeIterator),
Function.FunctionType.FuncSubstring => Substring(nodeIterator),
Function.FunctionType.FuncStringLength => StringLength(nodeIterator),
Function.FunctionType.FuncNormalize => Normalize(nodeIterator),
Function.FunctionType.FuncTranslate => Translate(nodeIterator),
_ => string.Empty,
};
internal static string toString(double num)
{
return num.ToString("R", NumberFormatInfo.InvariantInfo);
}
internal static string toString(bool b)
{
return b ? "true" : "false";
}
private string toString(XPathNodeIterator nodeIterator)
{
if (_argList.Count > 0)
{
object argVal = _argList[0].Evaluate(nodeIterator);
switch (GetXPathType(argVal))
{
case XPathResultType.NodeSet:
XPathNavigator? value = _argList[0].Advance();
return value != null ? value.Value : string.Empty;
case XPathResultType.String:
return (string)argVal;
case XPathResultType.Boolean:
return ((bool)argVal) ? "true" : "false";
case XPathResultType_Navigator:
return ((XPathNavigator)argVal).Value;
default:
Debug.Assert(GetXPathType(argVal) == XPathResultType.Number);
return toString((double)argVal);
}
}
Debug.Assert(nodeIterator.Current != null);
return nodeIterator.Current.Value;
}
public override XPathResultType StaticType
{
get
{
if (_funcType == Function.FunctionType.FuncStringLength)
{
return XPathResultType.Number;
}
if (
_funcType == Function.FunctionType.FuncStartsWith ||
_funcType == Function.FunctionType.FuncContains
)
{
return XPathResultType.Boolean;
}
return XPathResultType.String;
}
}
private string Concat(XPathNodeIterator nodeIterator)
{
int count = 0;
StringBuilder s = new StringBuilder();
while (count < _argList.Count)
{
s.Append(_argList[count++].Evaluate(nodeIterator).ToString());
}
return s.ToString();
}
private bool StartsWith(XPathNodeIterator nodeIterator)
{
Debug.Assert(_argList.Count > 1);
string s1 = _argList[0].Evaluate(nodeIterator).ToString()!;
string s2 = _argList[1].Evaluate(nodeIterator).ToString()!;
return s1.Length >= s2.Length && string.CompareOrdinal(s1, 0, s2, 0, s2.Length) == 0;
}
private static readonly CompareInfo s_compareInfo = CultureInfo.InvariantCulture.CompareInfo;
private bool Contains(XPathNodeIterator nodeIterator)
{
Debug.Assert(_argList.Count > 1);
string s1 = _argList[0].Evaluate(nodeIterator).ToString()!;
string s2 = _argList[1].Evaluate(nodeIterator).ToString()!;
return s_compareInfo.IndexOf(s1, s2, CompareOptions.Ordinal) >= 0;
}
private string SubstringBefore(XPathNodeIterator nodeIterator)
{
Debug.Assert(_argList.Count > 1);
string s1 = _argList[0].Evaluate(nodeIterator).ToString()!;
string s2 = _argList[1].Evaluate(nodeIterator).ToString()!;
if (s2.Length == 0) { return s2; }
int idx = s_compareInfo.IndexOf(s1, s2, CompareOptions.Ordinal);
return (idx < 1) ? string.Empty : s1.Substring(0, idx);
}
private string SubstringAfter(XPathNodeIterator nodeIterator)
{
Debug.Assert(_argList.Count > 1);
string s1 = _argList[0].Evaluate(nodeIterator).ToString()!;
string s2 = _argList[1].Evaluate(nodeIterator).ToString()!;
if (s2.Length == 0) { return s1; }
int idx = s_compareInfo.IndexOf(s1, s2, CompareOptions.Ordinal);
return (idx < 0) ? string.Empty : s1.Substring(idx + s2.Length);
}
private string Substring(XPathNodeIterator nodeIterator)
{
Debug.Assert(_argList.Count > 0);
string str1 = _argList[0].Evaluate(nodeIterator).ToString()!;
double num = XmlConvert.XPathRound(XmlConvert.ToXPathDouble(_argList[1].Evaluate(nodeIterator))) - 1;
if (double.IsNaN(num) || str1.Length <= num)
{
return string.Empty;
}
if (_argList.Count == 3)
{
double num1 = XmlConvert.XPathRound(XmlConvert.ToXPathDouble(_argList[2].Evaluate(nodeIterator)));
if (double.IsNaN(num1))
{
return string.Empty;
}
if (num < 0 || num1 < 0)
{
num1 = num + num1;
// NOTE: condition is true for NaN
if (!(num1 > 0))
{
return string.Empty;
}
num = 0;
}
double maxlength = str1.Length - num;
if (num1 > maxlength)
{
num1 = maxlength;
}
return str1.Substring((int)num, (int)num1);
}
if (num < 0)
{
num = 0;
}
return str1.Substring((int)num);
}
private double StringLength(XPathNodeIterator nodeIterator)
{
if (_argList.Count > 0)
{
return _argList[0].Evaluate(nodeIterator).ToString()!.Length;
}
Debug.Assert(nodeIterator!.Current != null);
return nodeIterator.Current.Value.Length;
}
private string Normalize(XPathNodeIterator nodeIterator)
{
string value;
if (_argList.Count > 0)
{
value = _argList[0].Evaluate(nodeIterator).ToString()!;
}
else
{
Debug.Assert(nodeIterator!.Current != null);
value = nodeIterator.Current.Value;
}
int modifyPos = -1;
char[] chars = value.ToCharArray();
bool firstSpace = false; // Start false to trim the beginning
for (int comparePos = 0; comparePos < chars.Length; comparePos++)
{
if (!XmlCharType.IsWhiteSpace(chars[comparePos]))
{
firstSpace = true;
modifyPos++;
chars[modifyPos] = chars[comparePos];
}
else if (firstSpace)
{
firstSpace = false;
modifyPos++;
chars[modifyPos] = ' ';
}
}
// Trim end
if (modifyPos > -1 && chars[modifyPos] == ' ')
modifyPos--;
return new string(chars, 0, modifyPos + 1);
}
private string Translate(XPathNodeIterator nodeIterator)
{
Debug.Assert(_argList.Count > 2);
string value = _argList[0].Evaluate(nodeIterator).ToString()!;
string mapFrom = _argList[1].Evaluate(nodeIterator).ToString()!;
string mapTo = _argList[2].Evaluate(nodeIterator).ToString()!;
int modifyPos = -1;
char[] chars = value.ToCharArray();
for (int comparePos = 0; comparePos < chars.Length; comparePos++)
{
int index = mapFrom.IndexOf(chars[comparePos]);
if (index != -1)
{
if (index < mapTo.Length)
{
modifyPos++;
chars[modifyPos] = mapTo[index];
}
}
else
{
modifyPos++;
chars[modifyPos] = chars[comparePos];
}
}
return new string(chars, 0, modifyPos + 1);
}
public override XPathNodeIterator Clone() { return new StringFunctions(this); }
}
}
| -1 |
dotnet/runtime
| 66,332 |
Refactor and improve MLL tests
|
* Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
vitek-karas
| 2022-03-08T13:26:28Z | 2022-03-11T08:34:45Z |
5ce2b9f860f7a84c3059650bb67817d59d8f4953
|
f82fe1d83f1a0e22025e186fdc6f4d86de0a83a1
|
Refactor and improve MLL tests. * Remove duplicated code - I know that tests should be descriptive, but repeating 100 times that we want to capture output doesn't feel necessary.
* For some of them move more stuff into the shared test state - this improves perf as we avoid repeating the setup (copying files around) for each test case, we do it once for the entire class
I also changed some of the tests to "Theory" as it's easier to read that way.
* `SDKLookup.cs` - moved most of the state in to the shared state to speed up the tests.
* Adding more cases into the "theory" tests
* Most notably for the framework resolution I added variations on the TFM (which will be needed when we implement disable of MLL)
* Adding new tests mostly around "list runtimes" (various forms), "list sdks" (various forms) and errors (which also list runtimes or sdks)
* Ported all of the `MultiLevelLookupFramework` tests over to the `FrameworkResolution` and `DependencyResolutions` suites which have a more robust test infra and can run the same tests much faster. Along the way I added lot more variations on top of the existing tests:
* `PerAssemblyVersionResolutionMultipleFrameworks.cs` - this is actually not an MLL test, but I moved it to the new infra to make it much faster
* `MultipleHives.cs` - MLL framework resolution tests
For SDK resolution I kept the `MultiLevelSDKLookup.cs` just removed code duplication and added new variants.
For the core reviewers: I promise I didn't remove any single test case in spirit with these exceptions:
* We had tests which validated that framework resolution is not affected by frameworks in current directory and also by frameworks in the user's directory. I left some basic test for the current directory check, but I completely removed the user's directory variant as the product simply doesn't have any code around that anymore.
|
./src/tests/JIT/HardwareIntrinsics/Arm/AdvSimd/FusedSubtractHalving.Vector128.Int16.cs
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void FusedSubtractHalving_Vector128_Int16()
{
var test = new SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Int16[] inArray1, Int16[] inArray2, Int16[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Int16>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Int16>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Int16>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Int16, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Int16, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<Int16> _fld1;
public Vector128<Int16> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref testStruct._fld1), ref Unsafe.As<Int16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int16>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref testStruct._fld2), ref Unsafe.As<Int16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Int16>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16 testClass)
{
var result = AdvSimd.FusedSubtractHalving(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16 testClass)
{
fixed (Vector128<Int16>* pFld1 = &_fld1)
fixed (Vector128<Int16>* pFld2 = &_fld2)
{
var result = AdvSimd.FusedSubtractHalving(
AdvSimd.LoadVector128((Int16*)(pFld1)),
AdvSimd.LoadVector128((Int16*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Int16>>() / sizeof(Int16);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<Int16>>() / sizeof(Int16);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Int16>>() / sizeof(Int16);
private static Int16[] _data1 = new Int16[Op1ElementCount];
private static Int16[] _data2 = new Int16[Op2ElementCount];
private static Vector128<Int16> _clsVar1;
private static Vector128<Int16> _clsVar2;
private Vector128<Int16> _fld1;
private Vector128<Int16> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _clsVar1), ref Unsafe.As<Int16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int16>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _clsVar2), ref Unsafe.As<Int16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Int16>>());
}
public SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _fld1), ref Unsafe.As<Int16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int16>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _fld2), ref Unsafe.As<Int16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Int16>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt16(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetInt16(); }
_dataTable = new DataTable(_data1, _data2, new Int16[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.FusedSubtractHalving(
Unsafe.Read<Vector128<Int16>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Int16>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.FusedSubtractHalving(
AdvSimd.LoadVector128((Int16*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector128((Int16*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.FusedSubtractHalving), new Type[] { typeof(Vector128<Int16>), typeof(Vector128<Int16>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Int16>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Int16>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.FusedSubtractHalving), new Type[] { typeof(Vector128<Int16>), typeof(Vector128<Int16>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector128((Int16*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector128((Int16*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.FusedSubtractHalving(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<Int16>* pClsVar1 = &_clsVar1)
fixed (Vector128<Int16>* pClsVar2 = &_clsVar2)
{
var result = AdvSimd.FusedSubtractHalving(
AdvSimd.LoadVector128((Int16*)(pClsVar1)),
AdvSimd.LoadVector128((Int16*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<Int16>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<Int16>>(_dataTable.inArray2Ptr);
var result = AdvSimd.FusedSubtractHalving(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector128((Int16*)(_dataTable.inArray1Ptr));
var op2 = AdvSimd.LoadVector128((Int16*)(_dataTable.inArray2Ptr));
var result = AdvSimd.FusedSubtractHalving(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16();
var result = AdvSimd.FusedSubtractHalving(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16();
fixed (Vector128<Int16>* pFld1 = &test._fld1)
fixed (Vector128<Int16>* pFld2 = &test._fld2)
{
var result = AdvSimd.FusedSubtractHalving(
AdvSimd.LoadVector128((Int16*)(pFld1)),
AdvSimd.LoadVector128((Int16*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.FusedSubtractHalving(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<Int16>* pFld1 = &_fld1)
fixed (Vector128<Int16>* pFld2 = &_fld2)
{
var result = AdvSimd.FusedSubtractHalving(
AdvSimd.LoadVector128((Int16*)(pFld1)),
AdvSimd.LoadVector128((Int16*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.FusedSubtractHalving(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.FusedSubtractHalving(
AdvSimd.LoadVector128((Int16*)(&test._fld1)),
AdvSimd.LoadVector128((Int16*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<Int16> op1, Vector128<Int16> op2, void* result, [CallerMemberName] string method = "")
{
Int16[] inArray1 = new Int16[Op1ElementCount];
Int16[] inArray2 = new Int16[Op2ElementCount];
Int16[] outArray = new Int16[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Int16, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Int16, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Int16>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Int16[] inArray1 = new Int16[Op1ElementCount];
Int16[] inArray2 = new Int16[Op2ElementCount];
Int16[] outArray = new Int16[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Int16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<Int16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Int16>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Int16[] left, Int16[] right, Int16[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (Helpers.FusedSubtractHalving(left[i], right[i]) != result[i])
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd)}.{nameof(AdvSimd.FusedSubtractHalving)}<Int16>(Vector128<Int16>, Vector128<Int16>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void FusedSubtractHalving_Vector128_Int16()
{
var test = new SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Int16[] inArray1, Int16[] inArray2, Int16[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Int16>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Int16>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Int16>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Int16, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Int16, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<Int16> _fld1;
public Vector128<Int16> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref testStruct._fld1), ref Unsafe.As<Int16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int16>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref testStruct._fld2), ref Unsafe.As<Int16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Int16>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16 testClass)
{
var result = AdvSimd.FusedSubtractHalving(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16 testClass)
{
fixed (Vector128<Int16>* pFld1 = &_fld1)
fixed (Vector128<Int16>* pFld2 = &_fld2)
{
var result = AdvSimd.FusedSubtractHalving(
AdvSimd.LoadVector128((Int16*)(pFld1)),
AdvSimd.LoadVector128((Int16*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Int16>>() / sizeof(Int16);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<Int16>>() / sizeof(Int16);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Int16>>() / sizeof(Int16);
private static Int16[] _data1 = new Int16[Op1ElementCount];
private static Int16[] _data2 = new Int16[Op2ElementCount];
private static Vector128<Int16> _clsVar1;
private static Vector128<Int16> _clsVar2;
private Vector128<Int16> _fld1;
private Vector128<Int16> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _clsVar1), ref Unsafe.As<Int16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int16>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _clsVar2), ref Unsafe.As<Int16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Int16>>());
}
public SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _fld1), ref Unsafe.As<Int16, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int16>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetInt16(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _fld2), ref Unsafe.As<Int16, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Int16>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt16(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetInt16(); }
_dataTable = new DataTable(_data1, _data2, new Int16[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.FusedSubtractHalving(
Unsafe.Read<Vector128<Int16>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Int16>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.FusedSubtractHalving(
AdvSimd.LoadVector128((Int16*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector128((Int16*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.FusedSubtractHalving), new Type[] { typeof(Vector128<Int16>), typeof(Vector128<Int16>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Int16>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Int16>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.FusedSubtractHalving), new Type[] { typeof(Vector128<Int16>), typeof(Vector128<Int16>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector128((Int16*)(_dataTable.inArray1Ptr)),
AdvSimd.LoadVector128((Int16*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.FusedSubtractHalving(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<Int16>* pClsVar1 = &_clsVar1)
fixed (Vector128<Int16>* pClsVar2 = &_clsVar2)
{
var result = AdvSimd.FusedSubtractHalving(
AdvSimd.LoadVector128((Int16*)(pClsVar1)),
AdvSimd.LoadVector128((Int16*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<Int16>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<Int16>>(_dataTable.inArray2Ptr);
var result = AdvSimd.FusedSubtractHalving(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector128((Int16*)(_dataTable.inArray1Ptr));
var op2 = AdvSimd.LoadVector128((Int16*)(_dataTable.inArray2Ptr));
var result = AdvSimd.FusedSubtractHalving(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16();
var result = AdvSimd.FusedSubtractHalving(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__FusedSubtractHalving_Vector128_Int16();
fixed (Vector128<Int16>* pFld1 = &test._fld1)
fixed (Vector128<Int16>* pFld2 = &test._fld2)
{
var result = AdvSimd.FusedSubtractHalving(
AdvSimd.LoadVector128((Int16*)(pFld1)),
AdvSimd.LoadVector128((Int16*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.FusedSubtractHalving(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<Int16>* pFld1 = &_fld1)
fixed (Vector128<Int16>* pFld2 = &_fld2)
{
var result = AdvSimd.FusedSubtractHalving(
AdvSimd.LoadVector128((Int16*)(pFld1)),
AdvSimd.LoadVector128((Int16*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.FusedSubtractHalving(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.FusedSubtractHalving(
AdvSimd.LoadVector128((Int16*)(&test._fld1)),
AdvSimd.LoadVector128((Int16*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<Int16> op1, Vector128<Int16> op2, void* result, [CallerMemberName] string method = "")
{
Int16[] inArray1 = new Int16[Op1ElementCount];
Int16[] inArray2 = new Int16[Op2ElementCount];
Int16[] outArray = new Int16[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Int16, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Int16, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Int16>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Int16[] inArray1 = new Int16[Op1ElementCount];
Int16[] inArray2 = new Int16[Op2ElementCount];
Int16[] outArray = new Int16[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Int16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<Int16>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Int16>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Int16[] left, Int16[] right, Int16[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var i = 0; i < RetElementCount; i++)
{
if (Helpers.FusedSubtractHalving(left[i], right[i]) != result[i])
{
succeeded = false;
break;
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd)}.{nameof(AdvSimd.FusedSubtractHalving)}<Int16>(Vector128<Int16>, Vector128<Int16>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.