diff --git a/src/coreclr/nativeaot/Runtime/startup.cpp b/src/coreclr/nativeaot/Runtime/startup.cpp
index f87bc947d970ac..74793ecd0268a8 100644
--- a/src/coreclr/nativeaot/Runtime/startup.cpp
+++ b/src/coreclr/nativeaot/Runtime/startup.cpp
@@ -50,11 +50,11 @@ extern RhConfig * g_pRhConfig;
#if defined(HOST_X86) || defined(HOST_AMD64) || defined(HOST_ARM64)
// This field is inspected from the generated code to determine what intrinsics are available.
-EXTERN_C int g_cpuFeatures;
-int g_cpuFeatures = 0;
+EXTERN_C HardwareIntrinsicConstants g_cpuFeatures;
+HardwareIntrinsicConstants g_cpuFeatures = {0};
// This field is defined in the generated code and sets the ISA expectations.
-EXTERN_C int g_requiredCpuFeatures;
+EXTERN_C HardwareIntrinsicConstants g_requiredCpuFeatures;
#endif
#ifdef TARGET_UNIX
@@ -180,7 +180,7 @@ bool DetectCPUFeatures()
#if defined(HOST_X86) || defined(HOST_AMD64) || defined(HOST_ARM64)
g_cpuFeatures = minipal_getcpufeatures();
- if ((g_cpuFeatures & g_requiredCpuFeatures) != g_requiredCpuFeatures)
+ if (!areEqualHardwareIntrinsicConstants(&g_cpuFeatures, &g_requiredCpuFeatures))
{
PalPrintFatalError("\nThe required instruction sets are not supported by the current CPU.\n");
RhFailFast();
diff --git a/src/coreclr/tools/Common/Compiler/HardwareIntrinsicHelpers.cs b/src/coreclr/tools/Common/Compiler/HardwareIntrinsicHelpers.cs
index 98d06568878dd4..6381897c51a7d3 100644
--- a/src/coreclr/tools/Common/Compiler/HardwareIntrinsicHelpers.cs
+++ b/src/coreclr/tools/Common/Compiler/HardwareIntrinsicHelpers.cs
@@ -11,6 +11,8 @@ namespace ILCompiler
{
public static partial class HardwareIntrinsicHelpers
{
+ public static int NUM_PARTS = 3;
+
///
/// Gets a value indicating whether this is a hardware intrinsic on the platform that we're compiling for.
///
@@ -34,16 +36,16 @@ public static bool IsHardwareIntrinsic(MethodDesc method)
return false;
}
- public static void AddRuntimeRequiredIsaFlagsToBuilder(InstructionSetSupportBuilder builder, int flags)
+ public static void AddRuntimeRequiredIsaFlagsToBuilder(InstructionSetSupportBuilder builder, HardwareIntrinsicConstants flags)
{
switch (builder.Architecture)
{
case TargetArchitecture.X86:
case TargetArchitecture.X64:
- XArchIntrinsicConstants.AddToBuilder(builder, flags);
+ XArchIntrinsicFeatures.AddToBuilder(builder, flags);
break;
case TargetArchitecture.ARM64:
- Arm64IntrinsicConstants.AddToBuilder(builder, flags);
+ Arm64IntrinsicFeatures.AddToBuilder(builder, flags);
break;
default:
Debug.Fail("Probably unimplemented");
@@ -52,101 +54,281 @@ public static void AddRuntimeRequiredIsaFlagsToBuilder(InstructionSetSupportBuil
}
// Keep these enumerations in sync with cpufeatures.h in the minipal.
- private static class XArchIntrinsicConstants
+ // private static class XArchIntrinsicConstants
+ // {
+
+ // public static void AddToBuilder(InstructionSetSupportBuilder builder, int flags)
+ // {
+ // if ((flags & Aes) != 0)
+ // builder.AddSupportedInstructionSet("aes");
+ // if ((flags & Pclmulqdq) != 0)
+ // builder.AddSupportedInstructionSet("pclmul");
+ // if ((flags & Sse3) != 0)
+ // builder.AddSupportedInstructionSet("sse3");
+ // if ((flags & Ssse3) != 0)
+ // builder.AddSupportedInstructionSet("ssse3");
+ // if ((flags & Sse41) != 0)
+ // builder.AddSupportedInstructionSet("sse4.1");
+ // if ((flags & Sse42) != 0)
+ // builder.AddSupportedInstructionSet("sse4.2");
+ // if ((flags & Popcnt) != 0)
+ // builder.AddSupportedInstructionSet("popcnt");
+ // if ((flags & Avx) != 0)
+ // builder.AddSupportedInstructionSet("avx");
+ // if ((flags & Fma) != 0)
+ // builder.AddSupportedInstructionSet("fma");
+ // if ((flags & Avx2) != 0)
+ // builder.AddSupportedInstructionSet("avx2");
+ // if ((flags & Bmi1) != 0)
+ // builder.AddSupportedInstructionSet("bmi");
+ // if ((flags & Bmi2) != 0)
+ // builder.AddSupportedInstructionSet("bmi2");
+ // if ((flags & Lzcnt) != 0)
+ // builder.AddSupportedInstructionSet("lzcnt");
+ // if ((flags & AvxVnni) != 0)
+ // builder.AddSupportedInstructionSet("avxvnni");
+ // if ((flags & Movbe) != 0)
+ // builder.AddSupportedInstructionSet("movbe");
+ // if ((flags & Avx512f) != 0)
+ // builder.AddSupportedInstructionSet("avx512f");
+ // if ((flags & Avx512f_vl) != 0)
+ // builder.AddSupportedInstructionSet("avx512f_vl");
+ // if ((flags & Avx512bw) != 0)
+ // builder.AddSupportedInstructionSet("avx512bw");
+ // if ((flags & Avx512bw_vl) != 0)
+ // builder.AddSupportedInstructionSet("avx512bw_vl");
+ // if ((flags & Avx512cd) != 0)
+ // builder.AddSupportedInstructionSet("avx512cd");
+ // if ((flags & Avx512cd_vl) != 0)
+ // builder.AddSupportedInstructionSet("avx512cd_vl");
+ // if ((flags & Avx512dq) != 0)
+ // builder.AddSupportedInstructionSet("avx512dq");
+ // if ((flags & Avx512dq_vl) != 0)
+ // builder.AddSupportedInstructionSet("avx512dq_vl");
+ // if ((flags & Avx512Vbmi) != 0)
+ // builder.AddSupportedInstructionSet("avx512vbmi");
+ // if ((flags & Avx512Vbmi_vl) != 0)
+ // builder.AddSupportedInstructionSet("avx512vbmi_vl");
+ // if ((flags & Serialize) != 0)
+ // builder.AddSupportedInstructionSet("serialize");
+ // if ((flags & Avx10v1) != 0)
+ // builder.AddSupportedInstructionSet("avx10v1");
+ // if ((flags & Avx10v1_v256) != 0)
+ // builder.AddSupportedInstructionSet("avx10v1_v256");
+ // if ((flags & Avx10v1_v512) != 0)
+ // builder.AddSupportedInstructionSet("avx10v1_v512");
+ // }
+
+ // public static int FromInstructionSet(InstructionSet instructionSet)
+ // {
+ // Debug.Assert(InstructionSet.X64_AES == InstructionSet.X86_AES);
+ // Debug.Assert(InstructionSet.X64_SSE41 == InstructionSet.X86_SSE41);
+ // Debug.Assert(InstructionSet.X64_LZCNT == InstructionSet.X86_LZCNT);
+
+ // return instructionSet switch
+ // {
+ // // Optional ISAs - only available via opt-in or opportunistic light-up
+ // InstructionSet.X64_AES => Aes,
+ // InstructionSet.X64_AES_X64 => Aes,
+ // InstructionSet.X64_PCLMULQDQ => Pclmulqdq,
+ // InstructionSet.X64_PCLMULQDQ_X64 => Pclmulqdq,
+ // InstructionSet.X64_SSE3 => Sse3,
+ // InstructionSet.X64_SSE3_X64 => Sse3,
+ // InstructionSet.X64_SSSE3 => Ssse3,
+ // InstructionSet.X64_SSSE3_X64 => Ssse3,
+ // InstructionSet.X64_SSE41 => Sse41,
+ // InstructionSet.X64_SSE41_X64 => Sse41,
+ // InstructionSet.X64_SSE42 => Sse42,
+ // InstructionSet.X64_SSE42_X64 => Sse42,
+ // InstructionSet.X64_POPCNT => Popcnt,
+ // InstructionSet.X64_POPCNT_X64 => Popcnt,
+ // InstructionSet.X64_AVX => Avx,
+ // InstructionSet.X64_AVX_X64 => Avx,
+ // InstructionSet.X64_FMA => Fma,
+ // InstructionSet.X64_FMA_X64 => Fma,
+ // InstructionSet.X64_AVX2 => Avx2,
+ // InstructionSet.X64_AVX2_X64 => Avx2,
+ // InstructionSet.X64_BMI1 => Bmi1,
+ // InstructionSet.X64_BMI1_X64 => Bmi1,
+ // InstructionSet.X64_BMI2 => Bmi2,
+ // InstructionSet.X64_BMI2_X64 => Bmi2,
+ // InstructionSet.X64_LZCNT => Lzcnt,
+ // InstructionSet.X64_LZCNT_X64 => Lzcnt,
+ // InstructionSet.X64_AVXVNNI => AvxVnni,
+ // InstructionSet.X64_AVXVNNI_X64 => AvxVnni,
+ // InstructionSet.X64_MOVBE => Movbe,
+ // InstructionSet.X64_MOVBE_X64 => Movbe,
+ // InstructionSet.X64_AVX512F => Avx512f,
+ // InstructionSet.X64_AVX512F_X64 => Avx512f,
+ // InstructionSet.X64_AVX512F_VL => Avx512f_vl,
+ // InstructionSet.X64_AVX512F_VL_X64 => Avx512f_vl,
+ // InstructionSet.X64_AVX512BW => Avx512bw,
+ // InstructionSet.X64_AVX512BW_X64 => Avx512bw,
+ // InstructionSet.X64_AVX512BW_VL => Avx512bw_vl,
+ // InstructionSet.X64_AVX512BW_VL_X64 => Avx512bw_vl,
+ // InstructionSet.X64_AVX512CD => Avx512cd,
+ // InstructionSet.X64_AVX512CD_X64 => Avx512cd,
+ // InstructionSet.X64_AVX512CD_VL => Avx512cd_vl,
+ // InstructionSet.X64_AVX512CD_VL_X64 => Avx512cd_vl,
+ // InstructionSet.X64_AVX512DQ => Avx512dq,
+ // InstructionSet.X64_AVX512DQ_X64 => Avx512dq,
+ // InstructionSet.X64_AVX512DQ_VL => Avx512dq_vl,
+ // InstructionSet.X64_AVX512DQ_VL_X64 => Avx512dq_vl,
+ // InstructionSet.X64_AVX512VBMI => Avx512Vbmi,
+ // InstructionSet.X64_AVX512VBMI_X64 => Avx512Vbmi,
+ // InstructionSet.X64_AVX512VBMI_VL => Avx512Vbmi_vl,
+ // InstructionSet.X64_AVX512VBMI_VL_X64 => Avx512Vbmi_vl,
+ // InstructionSet.X64_X86Serialize => Serialize,
+ // InstructionSet.X64_X86Serialize_X64 => Serialize,
+ // InstructionSet.X64_AVX10v1 => Avx10v1,
+ // InstructionSet.X64_AVX10v1_X64 => Avx10v1,
+ // InstructionSet.X64_AVX10v1_V256 => Avx10v1_v256,
+ // InstructionSet.X64_AVX10v1_V256_X64 => Avx10v1_v256,
+ // InstructionSet.X64_AVX10v1_V512 => Avx10v1_v512,
+ // InstructionSet.X64_AVX10v1_V512_X64 => Avx10v1_v512,
+
+ // // Baseline ISAs - they're always available
+ // InstructionSet.X64_SSE => 0,
+ // InstructionSet.X64_SSE_X64 => 0,
+ // InstructionSet.X64_SSE2 => 0,
+ // InstructionSet.X64_SSE2_X64 => 0,
+
+ // InstructionSet.X64_X86Base => 0,
+ // InstructionSet.X64_X86Base_X64 => 0,
+
+ // // Vector Sizes
+ // InstructionSet.X64_VectorT128 => VectorT128,
+ // InstructionSet.X64_VectorT256 => VectorT256,
+ // InstructionSet.X64_VectorT512 => VectorT512,
+
+ // _ => throw new NotSupportedException(((InstructionSet_X64)instructionSet).ToString())
+ // };
+ // }
+ // }
+
+ public class HardwareIntrinsicConstants
+ {
+ public uint[] parts = new uint[NUM_PARTS];
+
+ public HardwareIntrinsicConstants(){}
+ }
+
+ public static void SetFlag(ref HardwareIntrinsicConstants constants, int flag)
{
- // SSE and SSE2 are baseline ISAs - they're always available
- public const int Aes = 0x0001;
- public const int Pclmulqdq = 0x0002;
- public const int Sse3 = 0x0004;
- public const int Ssse3 = 0x0008;
- public const int Sse41 = 0x0010;
- public const int Sse42 = 0x0020;
- public const int Popcnt = 0x0040;
- public const int Avx = 0x0080;
- public const int Fma = 0x0100;
- public const int Avx2 = 0x0200;
- public const int Bmi1 = 0x0400;
- public const int Bmi2 = 0x0800;
- public const int Lzcnt = 0x1000;
- public const int AvxVnni = 0x2000;
- public const int Movbe = 0x4000;
- public const int Avx512f = 0x8000;
- public const int Avx512f_vl = 0x10000;
- public const int Avx512bw = 0x20000;
- public const int Avx512bw_vl = 0x40000;
- public const int Avx512cd = 0x80000;
- public const int Avx512cd_vl = 0x100000;
- public const int Avx512dq = 0x200000;
- public const int Avx512dq_vl = 0x400000;
- public const int Avx512Vbmi = 0x800000;
- public const int Avx512Vbmi_vl = 0x1000000;
- public const int Serialize = 0x2000000;
- public const int VectorT128 = 0x4000000;
- public const int VectorT256 = 0x8000000;
- public const int VectorT512 = 0x10000000;
- public const int Avx10v1 = 0x20000000;
- public const int Avx10v1_v256 = 0x40000000;
- public const int Avx10v1_v512 = unchecked((int)0x80000000);
-
- public static void AddToBuilder(InstructionSetSupportBuilder builder, int flags)
+ constants.parts[flag / 32] |= (uint)(1 << (flag % 32));
+ }
+
+ public static void ClearFlag(ref HardwareIntrinsicConstants constants, int flag)
+ {
+ constants.parts[flag / 32] &= (uint)~(1 << (flag % 32));
+ }
+
+ public static bool IsFlagSet(ref HardwareIntrinsicConstants constants, int flag)
+ {
+ return (constants.parts[flag / 32] & (1 << (flag % 32))) != 0;
+ }
+
+ public static class XArchIntrinsicFeatures
+ {
+
+ public const int Aes = 0;
+ public const int Pclmulqdq = 1;
+ public const int Sse3 = 2;
+ public const int Ssse3 = 3;
+ public const int Sse41 = 4;
+ public const int Sse42 = 5;
+ public const int Popcnt = 6;
+ public const int Avx = 7;
+ public const int Fma = 8;
+ public const int Avx2 = 9;
+ public const int Bmi1 = 10;
+ public const int Bmi2 = 11;
+ public const int Lzcnt = 12;
+ public const int AvxVnni = 13;
+ public const int Movbe = 14;
+ public const int Avx512f = 15;
+ public const int Avx512f_vl = 16;
+ public const int Avx512bw = 17;
+ public const int Avx512bw_vl = 18;
+ public const int Avx512cd = 19;
+ public const int Avx512cd_vl = 20;
+ public const int Avx512dq = 21;
+ public const int Avx512dq_vl = 22;
+ public const int Avx512Vbmi = 23;
+ public const int Avx512Vbmi_vl = 24;
+ public const int Serialize = 25;
+ public const int VectorT128 = 26;
+ public const int VectorT256 = 27;
+ public const int VectorT512 = 28;
+ public const int Avx10v1 = 29;
+ public const int Avx10v1_v256 = 30;
+ public const int Avx10v1_v512 = 31;
+
+ public static void AddToBuilder(InstructionSetSupportBuilder builder, HardwareIntrinsicConstants flags)
{
- if ((flags & Aes) != 0)
+ if (IsFlagSet(ref flags, Aes))
builder.AddSupportedInstructionSet("aes");
- if ((flags & Pclmulqdq) != 0)
+ if (IsFlagSet(ref flags, Pclmulqdq))
builder.AddSupportedInstructionSet("pclmul");
- if ((flags & Sse3) != 0)
+ if (IsFlagSet(ref flags, Sse3))
builder.AddSupportedInstructionSet("sse3");
- if ((flags & Ssse3) != 0)
+ if (IsFlagSet(ref flags, Ssse3))
builder.AddSupportedInstructionSet("ssse3");
- if ((flags & Sse41) != 0)
+ if (IsFlagSet(ref flags, Sse41))
builder.AddSupportedInstructionSet("sse4.1");
- if ((flags & Sse42) != 0)
+ if (IsFlagSet(ref flags, Sse42))
builder.AddSupportedInstructionSet("sse4.2");
- if ((flags & Popcnt) != 0)
+ if (IsFlagSet(ref flags, Popcnt))
builder.AddSupportedInstructionSet("popcnt");
- if ((flags & Avx) != 0)
+ if (IsFlagSet(ref flags, Avx))
builder.AddSupportedInstructionSet("avx");
- if ((flags & Fma) != 0)
+ if (IsFlagSet(ref flags, Fma))
builder.AddSupportedInstructionSet("fma");
- if ((flags & Avx2) != 0)
+ if (IsFlagSet(ref flags, Avx2))
builder.AddSupportedInstructionSet("avx2");
- if ((flags & Bmi1) != 0)
+ if (IsFlagSet(ref flags, Bmi1))
builder.AddSupportedInstructionSet("bmi");
- if ((flags & Bmi2) != 0)
+ if (IsFlagSet(ref flags, Bmi2))
builder.AddSupportedInstructionSet("bmi2");
- if ((flags & Lzcnt) != 0)
+ if (IsFlagSet(ref flags, Lzcnt))
builder.AddSupportedInstructionSet("lzcnt");
- if ((flags & AvxVnni) != 0)
+ if (IsFlagSet(ref flags, AvxVnni))
builder.AddSupportedInstructionSet("avxvnni");
- if ((flags & Movbe) != 0)
+ if (IsFlagSet(ref flags, Movbe))
builder.AddSupportedInstructionSet("movbe");
- if ((flags & Avx512f) != 0)
+ if (IsFlagSet(ref flags, Avx512f))
builder.AddSupportedInstructionSet("avx512f");
- if ((flags & Avx512f_vl) != 0)
+ if (IsFlagSet(ref flags, Avx512f_vl))
builder.AddSupportedInstructionSet("avx512f_vl");
- if ((flags & Avx512bw) != 0)
+ if (IsFlagSet(ref flags, Avx512bw))
builder.AddSupportedInstructionSet("avx512bw");
- if ((flags & Avx512bw_vl) != 0)
+ if (IsFlagSet(ref flags, Avx512bw_vl))
builder.AddSupportedInstructionSet("avx512bw_vl");
- if ((flags & Avx512cd) != 0)
+ if (IsFlagSet(ref flags, Avx512cd))
builder.AddSupportedInstructionSet("avx512cd");
- if ((flags & Avx512cd_vl) != 0)
+ if (IsFlagSet(ref flags, Avx512cd_vl))
builder.AddSupportedInstructionSet("avx512cd_vl");
- if ((flags & Avx512dq) != 0)
+ if (IsFlagSet(ref flags, Avx512dq))
builder.AddSupportedInstructionSet("avx512dq");
- if ((flags & Avx512dq_vl) != 0)
+ if (IsFlagSet(ref flags, Avx512dq_vl))
builder.AddSupportedInstructionSet("avx512dq_vl");
- if ((flags & Avx512Vbmi) != 0)
+ if (IsFlagSet(ref flags, Avx512Vbmi))
builder.AddSupportedInstructionSet("avx512vbmi");
- if ((flags & Avx512Vbmi_vl) != 0)
+ if (IsFlagSet(ref flags, Avx512Vbmi_vl))
builder.AddSupportedInstructionSet("avx512vbmi_vl");
- if ((flags & Serialize) != 0)
+ if (IsFlagSet(ref flags, Serialize))
builder.AddSupportedInstructionSet("serialize");
- if ((flags & Avx10v1) != 0)
+ if (IsFlagSet(ref flags, VectorT128))
+ builder.AddSupportedInstructionSet("VectorT128");
+ if (IsFlagSet(ref flags, VectorT256))
+ builder.AddSupportedInstructionSet("VectorT256");
+ if (IsFlagSet(ref flags, VectorT512))
+ builder.AddSupportedInstructionSet("VectorT512");
+ if (IsFlagSet(ref flags, Avx10v1))
builder.AddSupportedInstructionSet("avx10v1");
- if ((flags & Avx10v1_v256) != 0)
+ if (IsFlagSet(ref flags, Avx10v1_v256))
builder.AddSupportedInstructionSet("avx10v1_v256");
- if ((flags & Avx10v1_v512) != 0)
+ if (IsFlagSet(ref flags, Avx10v1_v512))
builder.AddSupportedInstructionSet("avx10v1_v512");
}
@@ -232,50 +414,50 @@ public static int FromInstructionSet(InstructionSet instructionSet)
InstructionSet.X64_VectorT256 => VectorT256,
InstructionSet.X64_VectorT512 => VectorT512,
- _ => throw new NotSupportedException(((InstructionSet_X64)instructionSet).ToString())
+ _ => throw new NotSupportedException(((InstructionSet)instructionSet).ToString())
};
}
}
// Keep these enumerations in sync with cpufeatures.h in the minipal.
- private static class Arm64IntrinsicConstants
+ private static class Arm64IntrinsicFeatures
{
- public const int AdvSimd = 0x0001;
- public const int Aes = 0x0002;
- public const int Crc32 = 0x0004;
- public const int Dp = 0x0008;
- public const int Rdm = 0x0010;
- public const int Sha1 = 0x0020;
- public const int Sha256 = 0x0040;
- public const int Atomics = 0x0080;
- public const int Rcpc = 0x0100;
- public const int VectorT128 = 0x0200;
- public const int Rcpc2 = 0x0400;
- public const int Sve = 0x0800;
-
- public static void AddToBuilder(InstructionSetSupportBuilder builder, int flags)
+ public const int AdvSimd = 0;
+ public const int Aes = 1;
+ public const int Crc32 = 2;
+ public const int Dp = 3;
+ public const int Rdm = 4;
+ public const int Sha1 = 5;
+ public const int Sha256 = 6;
+ public const int Atomics = 6;
+ public const int Rcpc = 7;
+ public const int VectorT128 = 8;
+ public const int Rcpc2 = 10;
+ public const int Sve = 11;
+
+ public static void AddToBuilder(InstructionSetSupportBuilder builder, HardwareIntrinsicConstants flags)
{
- if ((flags & AdvSimd) != 0)
+ if (IsFlagSet(ref flags, AdvSimd))
builder.AddSupportedInstructionSet("neon");
- if ((flags & Aes) != 0)
+ if (IsFlagSet(ref flags, Aes))
builder.AddSupportedInstructionSet("aes");
- if ((flags & Crc32) != 0)
+ if (IsFlagSet(ref flags, Crc32))
builder.AddSupportedInstructionSet("crc");
- if ((flags & Dp) != 0)
+ if (IsFlagSet(ref flags, Dp))
builder.AddSupportedInstructionSet("dotprod");
- if ((flags & Rdm) != 0)
+ if (IsFlagSet(ref flags, Rdm))
builder.AddSupportedInstructionSet("rdma");
- if ((flags & Sha1) != 0)
+ if (IsFlagSet(ref flags, Sha1))
builder.AddSupportedInstructionSet("sha1");
- if ((flags & Sha256) != 0)
+ if (IsFlagSet(ref flags, Sha256))
builder.AddSupportedInstructionSet("sha2");
- if ((flags & Atomics) != 0)
+ if (IsFlagSet(ref flags, Atomics))
builder.AddSupportedInstructionSet("lse");
- if ((flags & Rcpc) != 0)
+ if (IsFlagSet(ref flags, Rcpc))
builder.AddSupportedInstructionSet("rcpc");
- if ((flags & Rcpc2) != 0)
+ if (IsFlagSet(ref flags, Rcpc2))
builder.AddSupportedInstructionSet("rcpc2");
- if ((flags & Sve) != 0)
+ if (IsFlagSet(ref flags, Sve))
builder.AddSupportedInstructionSet("sve");
}
diff --git a/src/coreclr/tools/Common/InstructionSetHelpers.cs b/src/coreclr/tools/Common/InstructionSetHelpers.cs
index 8a7303f15f3711..45cc7b70f4e4f3 100644
--- a/src/coreclr/tools/Common/InstructionSetHelpers.cs
+++ b/src/coreclr/tools/Common/InstructionSetHelpers.cs
@@ -57,10 +57,10 @@ public static InstructionSetSupport ConfigureInstructionSetSupport(string instru
string jitInterfaceLibrary = "jitinterface_" + RuntimeInformation.ProcessArchitecture.ToString().ToLowerInvariant();
nint libHandle = NativeLibrary.Load(jitInterfaceLibrary, System.Reflection.Assembly.GetExecutingAssembly(), DllImportSearchPath.ApplicationDirectory);
- int cpuFeatures;
+ HardwareIntrinsicHelpers.HardwareIntrinsicConstants cpuFeatures = new HardwareIntrinsicHelpers.HardwareIntrinsicConstants();
unsafe
{
- var getCpuFeatures = (delegate* unmanaged)NativeLibrary.GetExport(libHandle, "JitGetProcessorFeatures");
+ var getCpuFeatures = (delegate* unmanaged)NativeLibrary.GetExport(libHandle, "JitGetProcessorFeatures");
cpuFeatures = getCpuFeatures();
}
HardwareIntrinsicHelpers.AddRuntimeRequiredIsaFlagsToBuilder(instructionSetSupportBuilder, cpuFeatures);
diff --git a/src/coreclr/tools/aot/ILCompiler.Compiler/Compiler/HardwareIntrinsicHelpers.Aot.cs b/src/coreclr/tools/aot/ILCompiler.Compiler/Compiler/HardwareIntrinsicHelpers.Aot.cs
index ee355b964404d7..49184e7aaaceed 100644
--- a/src/coreclr/tools/aot/ILCompiler.Compiler/Compiler/HardwareIntrinsicHelpers.Aot.cs
+++ b/src/coreclr/tools/aot/ILCompiler.Compiler/Compiler/HardwareIntrinsicHelpers.Aot.cs
@@ -34,11 +34,11 @@ public static MethodIL EmitIsSupportedIL(MethodDesc method, FieldDesc isSupporte
{
case TargetArchitecture.X86:
case TargetArchitecture.X64:
- flag = XArchIntrinsicConstants.FromInstructionSet(instructionSet);
+ flag = XArchIntrinsicFeatures.FromInstructionSet(instructionSet);
break;
case TargetArchitecture.ARM64:
- flag = Arm64IntrinsicConstants.FromInstructionSet(instructionSet);
+ flag = Arm64IntrinsicFeatures.FromInstructionSet(instructionSet);
break;
default:
@@ -67,12 +67,12 @@ public static int GetRuntimeRequiredIsaFlags(InstructionSetSupport instructionSe
case TargetArchitecture.X86:
case TargetArchitecture.X64:
foreach (InstructionSet instructionSet in instructionSetSupport.SupportedFlags)
- result |= XArchIntrinsicConstants.FromInstructionSet(instructionSet);
+ result |= XArchIntrinsicFeatures.FromInstructionSet(instructionSet);
break;
case TargetArchitecture.ARM64:
foreach (InstructionSet instructionSet in instructionSetSupport.SupportedFlags)
- result |= Arm64IntrinsicConstants.FromInstructionSet(instructionSet);
+ result |= Arm64IntrinsicFeatures.FromInstructionSet(instructionSet);
break;
default:
diff --git a/src/coreclr/tools/aot/jitinterface/jitwrapper.cpp b/src/coreclr/tools/aot/jitinterface/jitwrapper.cpp
index 8fd38d192f84e9..c34c587957a440 100644
--- a/src/coreclr/tools/aot/jitinterface/jitwrapper.cpp
+++ b/src/coreclr/tools/aot/jitinterface/jitwrapper.cpp
@@ -53,7 +53,7 @@ DLL_EXPORT void JitProcessShutdownWork(ICorJitCompiler * pJit)
return pJit->ProcessShutdownWork(nullptr);
}
-DLL_EXPORT int JitGetProcessorFeatures()
+DLL_EXPORT HardwareIntrinsicConstants JitGetProcessorFeatures()
{
return minipal_getcpufeatures();
}
diff --git a/src/coreclr/vm/codeman.cpp b/src/coreclr/vm/codeman.cpp
index 0885ec6d3b8ede..9672893b916224 100644
--- a/src/coreclr/vm/codeman.cpp
+++ b/src/coreclr/vm/codeman.cpp
@@ -1255,18 +1255,18 @@ void EEJitManager::SetCpuInfo()
CORJIT_FLAGS CPUCompileFlags;
- int cpuFeatures = minipal_getcpufeatures();
+ HardwareIntrinsicConstants cpuFeatures = minipal_getcpufeatures();
#if defined(TARGET_X86) || defined(TARGET_AMD64)
#if defined(TARGET_X86) && !defined(TARGET_WINDOWS)
// Linux may still support no SSE/SSE2 for 32-bit
- if ((cpuFeatures & XArchIntrinsicConstants_VectorT128) == 0)
+ if (!isFlagSet(&cpuFeatures, XArchIntrinsicConstants_VectorT128))
{
EEPOLICY_HANDLE_FATAL_ERROR_WITH_MESSAGE(COR_E_EXECUTIONENGINE, W("SSE and SSE2 processor support required."));
}
#else
- _ASSERTE((cpuFeatures & XArchIntrinsicConstants_VectorT128) != 0);
+ _ASSERTE(isFlagSet(&cpuFeatures, XArchIntrinsicConstants_VectorT128));
#endif
CPUCompileFlags.Set(InstructionSet_VectorT128);
@@ -1274,13 +1274,13 @@ void EEJitManager::SetCpuInfo()
// Get the maximum bitwidth of Vector, rounding down to the nearest multiple of 128-bits
uint32_t maxVectorTBitWidth = (CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_MaxVectorTBitWidth) / 128) * 128;
- if (((cpuFeatures & XArchIntrinsicConstants_VectorT256) != 0) && ((maxVectorTBitWidth == 0) || (maxVectorTBitWidth >= 256)))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_VectorT256) && ((maxVectorTBitWidth == 0) || (maxVectorTBitWidth >= 256)))
{
// We allow 256-bit Vector by default
CPUCompileFlags.Set(InstructionSet_VectorT256);
}
- if (((cpuFeatures & XArchIntrinsicConstants_VectorT512) != 0) && (maxVectorTBitWidth >= 512))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_VectorT512) && (maxVectorTBitWidth >= 512))
{
// We require 512-bit Vector to be opt-in
CPUCompileFlags.Set(InstructionSet_VectorT512);
@@ -1301,136 +1301,136 @@ void EEJitManager::SetCpuInfo()
CPUCompileFlags.Set(InstructionSet_SSE2);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Aes) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAES))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Aes) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAES))
{
CPUCompileFlags.Set(InstructionSet_AES);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX))
{
CPUCompileFlags.Set(InstructionSet_AVX);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx2) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX2))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx2) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX2))
{
CPUCompileFlags.Set(InstructionSet_AVX2);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx512f) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512F))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx512f) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512F))
{
CPUCompileFlags.Set(InstructionSet_AVX512F);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx512f_vl) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512F_VL))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx512f_vl) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512F_VL))
{
CPUCompileFlags.Set(InstructionSet_AVX512F_VL);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx512bw) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512BW))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx512bw) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512BW))
{
CPUCompileFlags.Set(InstructionSet_AVX512BW);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx512bw_vl) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512BW_VL))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx512bw_vl) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512BW_VL))
{
CPUCompileFlags.Set(InstructionSet_AVX512BW_VL);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx512cd) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512CD))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx512cd) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512CD))
{
CPUCompileFlags.Set(InstructionSet_AVX512CD);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx512cd_vl) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512CD_VL))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx512cd_vl) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512CD_VL))
{
CPUCompileFlags.Set(InstructionSet_AVX512CD_VL);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx512dq) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512DQ))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx512dq) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512DQ))
{
CPUCompileFlags.Set(InstructionSet_AVX512DQ);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx512dq_vl) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512DQ_VL))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx512dq_vl) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512DQ_VL))
{
CPUCompileFlags.Set(InstructionSet_AVX512DQ_VL);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx512Vbmi) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512VBMI))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx512Vbmi) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512VBMI))
{
CPUCompileFlags.Set(InstructionSet_AVX512VBMI);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx512Vbmi_vl) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512VBMI_VL))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx512Vbmi_vl) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX512VBMI_VL))
{
CPUCompileFlags.Set(InstructionSet_AVX512VBMI_VL);
}
- if (((cpuFeatures & XArchIntrinsicConstants_AvxVnni) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVXVNNI))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_AvxVnni) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVXVNNI))
{
CPUCompileFlags.Set(InstructionSet_AVXVNNI);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Bmi1) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableBMI1))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Bmi1) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableBMI1))
{
CPUCompileFlags.Set(InstructionSet_BMI1);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Bmi2) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableBMI2))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Bmi2) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableBMI2))
{
CPUCompileFlags.Set(InstructionSet_BMI2);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Fma) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableFMA))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Fma) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableFMA))
{
CPUCompileFlags.Set(InstructionSet_FMA);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Lzcnt) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableLZCNT))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Lzcnt) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableLZCNT))
{
CPUCompileFlags.Set(InstructionSet_LZCNT);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Pclmulqdq) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnablePCLMULQDQ))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Pclmulqdq) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnablePCLMULQDQ))
{
CPUCompileFlags.Set(InstructionSet_PCLMULQDQ);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Movbe) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableMOVBE))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Movbe) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableMOVBE))
{
CPUCompileFlags.Set(InstructionSet_MOVBE);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Popcnt) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnablePOPCNT))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Popcnt) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnablePOPCNT))
{
CPUCompileFlags.Set(InstructionSet_POPCNT);
}
// We need to additionally check that EXTERNAL_EnableSSE3_4 is set, as that
// is a prexisting config flag that controls the SSE3+ ISAs
- if (((cpuFeatures & XArchIntrinsicConstants_Sse3) != 0) &&
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Sse3) &&
CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableSSE3) &&
CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableSSE3_4))
{
CPUCompileFlags.Set(InstructionSet_SSE3);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Sse41) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableSSE41))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Sse41) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableSSE41))
{
CPUCompileFlags.Set(InstructionSet_SSE41);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Sse42) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableSSE42))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Sse42) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableSSE42))
{
CPUCompileFlags.Set(InstructionSet_SSE42);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Ssse3) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableSSSE3))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Ssse3) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableSSSE3))
{
CPUCompileFlags.Set(InstructionSet_SSSE3);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Serialize) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableX86Serialize))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Serialize) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableX86Serialize))
{
CPUCompileFlags.Set(InstructionSet_X86Serialize);
}
@@ -1440,17 +1440,17 @@ void EEJitManager::SetCpuInfo()
// `EnusreValidInstructionSetSupport` to handle the illegal combination.
// To ensure `EnusreValidInstructionSetSupport` handle the dependency correctly, the implication
// defined in InstructionSetDesc.txt should be explicit, no transitive implication should be assumed.
- if (((cpuFeatures & XArchIntrinsicConstants_Avx10v1) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX10v1))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx10v1) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX10v1))
{
CPUCompileFlags.Set(InstructionSet_AVX10v1);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx10v1_V256) != 0))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx10v1_V256))
{
CPUCompileFlags.Set(InstructionSet_AVX10v1_V256);
}
- if (((cpuFeatures & XArchIntrinsicConstants_Avx10v1_V512) != 0))
+ if (isFlagSet(&cpuFeatures, XArchIntrinsicConstants_Avx10v1_V512))
{
CPUCompileFlags.Set(InstructionSet_AVX10v1_V512);
}
@@ -1458,12 +1458,12 @@ void EEJitManager::SetCpuInfo()
#if !defined(TARGET_WINDOWS)
// Linux may still support no AdvSimd
- if ((cpuFeatures & ARM64IntrinsicConstants_VectorT128) == 0)
+ if (!isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_VectorT128))
{
EEPOLICY_HANDLE_FATAL_ERROR_WITH_MESSAGE(COR_E_EXECUTIONENGINE, W("AdvSimd processor support required."));
}
#else
- _ASSERTE((cpuFeatures & ARM64IntrinsicConstants_VectorT128) != 0);
+ _ASSERTE(isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_VectorT128));
#endif
CPUCompileFlags.Set(InstructionSet_VectorT128);
@@ -1473,57 +1473,57 @@ void EEJitManager::SetCpuInfo()
CPUCompileFlags.Set(InstructionSet_ArmBase);
}
- if (((cpuFeatures & ARM64IntrinsicConstants_AdvSimd) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64AdvSimd))
+ if (isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_AdvSimd) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64AdvSimd))
{
CPUCompileFlags.Set(InstructionSet_AdvSimd);
}
- if (((cpuFeatures & ARM64IntrinsicConstants_Aes) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Aes))
+ if (isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_Aes) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Aes))
{
CPUCompileFlags.Set(InstructionSet_Aes);
}
- if (((cpuFeatures & ARM64IntrinsicConstants_Atomics) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Atomics))
+ if (isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_Atomics) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Atomics))
{
CPUCompileFlags.Set(InstructionSet_Atomics);
}
- if (((cpuFeatures & ARM64IntrinsicConstants_Rcpc) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Rcpc))
+ if (isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_Rcpc) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Rcpc))
{
CPUCompileFlags.Set(InstructionSet_Rcpc);
}
- if (((cpuFeatures & ARM64IntrinsicConstants_Rcpc2) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Rcpc2))
+ if (isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_Rcpc2) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Rcpc2))
{
CPUCompileFlags.Set(InstructionSet_Rcpc2);
}
- if (((cpuFeatures & ARM64IntrinsicConstants_Crc32) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Crc32))
+ if (isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_Crc32) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Crc32))
{
CPUCompileFlags.Set(InstructionSet_Crc32);
}
- if (((cpuFeatures & ARM64IntrinsicConstants_Dp) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Dp))
+ if (isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_Dp) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Dp))
{
CPUCompileFlags.Set(InstructionSet_Dp);
}
- if (((cpuFeatures & ARM64IntrinsicConstants_Rdm) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Rdm))
+ if (isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_Rdm) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Rdm))
{
CPUCompileFlags.Set(InstructionSet_Rdm);
}
- if (((cpuFeatures & ARM64IntrinsicConstants_Sha1) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Sha1))
+ if (isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_Sha1) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Sha1))
{
CPUCompileFlags.Set(InstructionSet_Sha1);
}
- if (((cpuFeatures & ARM64IntrinsicConstants_Sha256) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Sha256))
+ if (isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_Sha256) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Sha256))
{
CPUCompileFlags.Set(InstructionSet_Sha256);
}
- if (((cpuFeatures & ARM64IntrinsicConstants_Sve) != 0) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Sve))
+ if (isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_Sve) && CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableArm64Sve))
{
CPUCompileFlags.Set(InstructionSet_Sve);
}
@@ -1537,7 +1537,7 @@ void EEJitManager::SetCpuInfo()
CPUCompileFlags.Set(InstructionSet_Dczva);
}
- if ((cpuFeatures & ARM64IntrinsicConstants_Atomics) != 0)
+ if (isFlagSet(&cpuFeatures, ARM64IntrinsicConstants_Atomics))
{
g_arm64_atomics_present = true;
}
diff --git a/src/native/minipal/cpufeatures.c b/src/native/minipal/cpufeatures.c
index a2ff83222140cd..890c1eab7a87ec 100644
--- a/src/native/minipal/cpufeatures.c
+++ b/src/native/minipal/cpufeatures.c
@@ -139,9 +139,9 @@ static bool IsAvx512Enabled()
#endif // defined(HOST_X86) || defined(HOST_AMD64)
#endif // HOST_WINDOWS
-int minipal_getcpufeatures(void)
+HardwareIntrinsicConstants minipal_getcpufeatures(void)
{
- int result = 0;
+ HardwareIntrinsicConstants result = {0};
#if defined(HOST_X86) || defined(HOST_AMD64)
@@ -164,42 +164,51 @@ int minipal_getcpufeatures(void)
if ((cpuidInfo[CPUID_EDX] & requiredBaselineEdxFlags) == requiredBaselineEdxFlags)
{
- result |= XArchIntrinsicConstants_VectorT128;
+ // result |= XArchIntrinsicConstants_VectorT128;
+ setFlag(&result, XArchIntrinsicConstants_VectorT128);
if ((cpuidInfo[CPUID_ECX] & (1 << 25)) != 0) // AESNI
{
- result |= XArchIntrinsicConstants_Aes;
+ // result |= XArchIntrinsicConstants_Aes;
+ setFlag(&result, XArchIntrinsicConstants_Aes);
}
if ((cpuidInfo[CPUID_ECX] & (1 << 1)) != 0) // PCLMULQDQ
{
- result |= XArchIntrinsicConstants_Pclmulqdq;
+ // result |= XArchIntrinsicConstants_Pclmulqdq;
+ setFlag(&result, XArchIntrinsicConstants_Pclmulqdq);
}
if ((cpuidInfo[CPUID_ECX] & (1 << 0)) != 0) // SSE3
{
- result |= XArchIntrinsicConstants_Sse3;
+ // result |= XArchIntrinsicConstants_Sse3;
+ setFlag(&result, XArchIntrinsicConstants_Sse3);
if ((cpuidInfo[CPUID_ECX] & (1 << 9)) != 0) // SSSE3
{
- result |= XArchIntrinsicConstants_Ssse3;
+ // result |= XArchIntrinsicConstants_Ssse3;
+ setFlag(&result, XArchIntrinsicConstants_Ssse3);
if ((cpuidInfo[CPUID_ECX] & (1 << 19)) != 0) // SSE4.1
{
- result |= XArchIntrinsicConstants_Sse41;
+ // result |= XArchIntrinsicConstants_Sse41;
+ setFlag(&result, XArchIntrinsicConstants_Sse41);
if ((cpuidInfo[CPUID_ECX] & (1 << 20)) != 0) // SSE4.2
{
- result |= XArchIntrinsicConstants_Sse42;
+ // result |= XArchIntrinsicConstants_Sse42;
+ setFlag(&result, XArchIntrinsicConstants_Sse42);
if ((cpuidInfo[CPUID_ECX] & (1 << 22)) != 0) // MOVBE
{
- result |= XArchIntrinsicConstants_Movbe;
+ // result |= XArchIntrinsicConstants_Movbe;
+ setFlag(&result, XArchIntrinsicConstants_Movbe);
}
if ((cpuidInfo[CPUID_ECX] & (1 << 23)) != 0) // POPCNT
{
- result |= XArchIntrinsicConstants_Popcnt;
+ // result |= XArchIntrinsicConstants_Popcnt;
+ setFlag(&result, XArchIntrinsicConstants_Popcnt);
}
const int requiredAvxEcxFlags = (1 << 27) // OSXSAVE
@@ -209,11 +218,13 @@ int minipal_getcpufeatures(void)
{
if (IsAvxEnabled() && (xmmYmmStateSupport() == 1)) // XGETBV == 11
{
- result |= XArchIntrinsicConstants_Avx;
+ // result |= XArchIntrinsicConstants_Avx;
+ setFlag(&result, XArchIntrinsicConstants_Avx);
if ((cpuidInfo[CPUID_ECX] & (1 << 12)) != 0) // FMA
{
- result |= XArchIntrinsicConstants_Fma;
+ // result |= XArchIntrinsicConstants_Fma;
+ setFlag(&result, XArchIntrinsicConstants_Fma);
}
if (maxCpuId >= 0x07)
@@ -222,56 +233,69 @@ int minipal_getcpufeatures(void)
if ((cpuidInfo[CPUID_EBX] & (1 << 5)) != 0) // AVX2
{
- result |= XArchIntrinsicConstants_Avx2;
- result |= XArchIntrinsicConstants_VectorT256;
+ // result |= XArchIntrinsicConstants_Avx2;
+ setFlag(&result, XArchIntrinsicConstants_Avx2);
+ // result |= XArchIntrinsicConstants_VectorT256;
+ setFlag(&result, XArchIntrinsicConstants_VectorT256);
if (IsAvx512Enabled() && (avx512StateSupport() == 1)) // XGETBV XRC0[7:5] == 111
{
if ((cpuidInfo[CPUID_EBX] & (1 << 16)) != 0) // AVX512F
{
- result |= XArchIntrinsicConstants_Avx512f;
- result |= XArchIntrinsicConstants_VectorT512;
+ // result |= XArchIntrinsicConstants_Avx512f;
+ setFlag(&result, XArchIntrinsicConstants_Avx512f);
+ // result |= XArchIntrinsicConstants_VectorT512;
+ setFlag(&result, XArchIntrinsicConstants_VectorT512);
bool isAVX512_VLSupported = false;
if ((cpuidInfo[CPUID_EBX] & (1 << 31)) != 0) // AVX512VL
{
- result |= XArchIntrinsicConstants_Avx512f_vl;
+ // result |= XArchIntrinsicConstants_Avx512f_vl;
+ setFlag(&result, XArchIntrinsicConstants_Avx512f_vl);
isAVX512_VLSupported = true;
}
if ((cpuidInfo[CPUID_EBX] & (1 << 30)) != 0) // AVX512BW
{
- result |= XArchIntrinsicConstants_Avx512bw;
+ // result |= XArchIntrinsicConstants_Avx512bw;
+ setFlag(&result, XArchIntrinsicConstants_Avx512bw);
if (isAVX512_VLSupported) // AVX512BW_VL
{
- result |= XArchIntrinsicConstants_Avx512bw_vl;
+ // result |= XArchIntrinsicConstants_Avx512bw_vl;
+ setFlag(&result, XArchIntrinsicConstants_Avx512bw_vl);
}
}
if ((cpuidInfo[CPUID_EBX] & (1 << 28)) != 0) // AVX512CD
{
- result |= XArchIntrinsicConstants_Avx512cd;
+ // result |= XArchIntrinsicConstants_Avx512cd;
+ setFlag(&result, XArchIntrinsicConstants_Avx512cd);
if (isAVX512_VLSupported) // AVX512CD_VL
{
- result |= XArchIntrinsicConstants_Avx512cd_vl;
+ // result |= XArchIntrinsicConstants_Avx512cd_vl;
+ setFlag(&result, XArchIntrinsicConstants_Avx512cd_vl);
}
}
if ((cpuidInfo[CPUID_EBX] & (1 << 17)) != 0) // AVX512DQ
{
- result |= XArchIntrinsicConstants_Avx512dq;
+ // result |= XArchIntrinsicConstants_Avx512dq;
+ setFlag(&result, XArchIntrinsicConstants_Avx512dq);
if (isAVX512_VLSupported) // AVX512DQ_VL
{
- result |= XArchIntrinsicConstants_Avx512dq_vl;
+ // result |= XArchIntrinsicConstants_Avx512dq_vl;
+ setFlag(&result, XArchIntrinsicConstants_Avx512dq_vl);
}
}
if ((cpuidInfo[CPUID_ECX] & (1 << 1)) != 0) // AVX512VBMI
{
- result |= XArchIntrinsicConstants_Avx512Vbmi;
+ // result |= XArchIntrinsicConstants_Avx512Vbmi;
+ setFlag(&result, XArchIntrinsicConstants_Avx512Vbmi);
if (isAVX512_VLSupported) // AVX512VBMI_VL
{
- result |= XArchIntrinsicConstants_Avx512Vbmi_vl;
+ // result |= XArchIntrinsicConstants_Avx512Vbmi_vl;
+ setFlag(&result, XArchIntrinsicConstants_Avx512Vbmi_vl);
}
}
}
@@ -281,7 +305,8 @@ int minipal_getcpufeatures(void)
if ((cpuidInfo[CPUID_EAX] & (1 << 4)) != 0) // AVX-VNNI
{
- result |= XArchIntrinsicConstants_AvxVnni;
+ // result |= XArchIntrinsicConstants_AvxVnni;
+ setFlag(&result, XArchIntrinsicConstants_AvxVnni);
}
if ((cpuidInfo[CPUID_EDX] & (1 << 19)) != 0) // Avx10
@@ -291,17 +316,20 @@ int minipal_getcpufeatures(void)
{
if ((cpuidInfo[CPUID_EBX] & (1 << 16)) != 0)
{
- result |= XArchIntrinsicConstants_Avx10v1;
+ // result |= XArchIntrinsicConstants_Avx10v1;
+ setFlag(&result, XArchIntrinsicConstants_Avx10v1);
}
if ((cpuidInfo[CPUID_EBX] & (1 << 17)) != 0)
{
- result |= XArchIntrinsicConstants_Avx10v1_V256;
+ // result |= XArchIntrinsicConstants_Avx10v1_V256;
+ setFlag(&result, XArchIntrinsicConstants_Avx10v1_V256);
}
if ((cpuidInfo[CPUID_EBX] & (1 << 18)) != 0)
{
- result |= XArchIntrinsicConstants_Avx10v1_V512;
+ // result |= XArchIntrinsicConstants_Avx10v1_V512;
+ setFlag(&result, XArchIntrinsicConstants_Avx10v1_V512);
}
}
}
@@ -321,17 +349,20 @@ int minipal_getcpufeatures(void)
if ((cpuidInfo[CPUID_EBX] & (1 << 3)) != 0) // BMI1
{
- result |= XArchIntrinsicConstants_Bmi1;
+ // result |= XArchIntrinsicConstants_Bmi1;
+ setFlag(&result, XArchIntrinsicConstants_Bmi1);
}
if ((cpuidInfo[CPUID_EBX] & (1 << 8)) != 0) // BMI2
{
- result |= XArchIntrinsicConstants_Bmi2;
+ // result |= XArchIntrinsicConstants_Bmi2;
+ setFlag(&result, XArchIntrinsicConstants_Bmi2);
}
if ((cpuidInfo[CPUID_EDX] & (1 << 14)) != 0)
{
- result |= XArchIntrinsicConstants_Serialize; // SERIALIZE
+ // result |= XArchIntrinsicConstants_Serialize; // SERIALIZE
+ setFlag(&result, XArchIntrinsicConstants_Serialize);
}
}
}
@@ -345,7 +376,8 @@ int minipal_getcpufeatures(void)
if ((cpuidInfo[CPUID_ECX] & (1 << 5)) != 0) // LZCNT
{
- result |= XArchIntrinsicConstants_Lzcnt;
+ // result |= XArchIntrinsicConstants_Lzcnt;
+ setFlag(&result, XArchIntrinsicConstants_Lzcnt);
}
}
@@ -358,37 +390,48 @@ int minipal_getcpufeatures(void)
unsigned long hwCap = getauxval(AT_HWCAP);
if (hwCap & HWCAP_AES)
- result |= ARM64IntrinsicConstants_Aes;
+ // result |= ARM64IntrinsicConstants_Aes;
+ setFlag(&result, ARM64IntrinsicConstants_Aes);
if (hwCap & HWCAP_ATOMICS)
- result |= ARM64IntrinsicConstants_Atomics;
+ // result |= ARM64IntrinsicConstants_Atomics;
+ setFlag(&result, ARM64IntrinsicConstants_Atomics);
if (hwCap & HWCAP_CRC32)
- result |= ARM64IntrinsicConstants_Crc32;
+ // result |= ARM64IntrinsicConstants_Crc32;
+ setFlag(&result, ARM64IntrinsicConstants_Crc32);
if (hwCap & HWCAP_ASIMDDP)
- result |= ARM64IntrinsicConstants_Dp;
+ // result |= ARM64IntrinsicConstants_Dp;
+ setFlag(&result, ARM64IntrinsicConstants_Dp);
if (hwCap & HWCAP_LRCPC)
- result |= ARM64IntrinsicConstants_Rcpc;
+ // result |= ARM64IntrinsicConstants_Rcpc;
+ setFlag(&result, ARM64IntrinsicConstants_Rcpc);
if (hwCap & HWCAP_ILRCPC)
- result |= ARM64IntrinsicConstants_Rcpc2;
+ // result |= ARM64IntrinsicConstants_Rcpc2;
+ setFlag(&result, ARM64IntrinsicConstants_Rcpc2);
if (hwCap & HWCAP_SHA1)
- result |= ARM64IntrinsicConstants_Sha1;
+ // result |= ARM64IntrinsicConstants_Sha1;
+ setFlag(&result, ARM64IntrinsicConstants_Sha1);
if (hwCap & HWCAP_SHA2)
- result |= ARM64IntrinsicConstants_Sha256;
+ // result |= ARM64IntrinsicConstants_Sha256;
+ setFlag(&result, ARM64IntrinsicConstants_Sha256);
if (hwCap & HWCAP_ASIMD)
- result |= ARM64IntrinsicConstants_AdvSimd | ARM64IntrinsicConstants_VectorT128;
+ // result |= ARM64IntrinsicConstants_AdvSimd | ARM64IntrinsicConstants_VectorT128;
+ setFlag(&result, ARM64IntrinsicConstants_AdvSimd | ARM64IntrinsicConstants_VectorT128);
if (hwCap & HWCAP_ASIMDRDM)
- result |= ARM64IntrinsicConstants_Rdm;
+ // result |= ARM64IntrinsicConstants_Rdm;
+ setFlag(&result, ARM64IntrinsicConstants_Rdm);
if (hwCap & HWCAP_SVE)
- result |= ARM64IntrinsicConstants_Sve;
+ // result |= ARM64IntrinsicConstants_Sve;
+ setFlag(&result, ARM64IntrinsicConstants_Sve);
#else // !HAVE_AUXV_HWCAP_H
@@ -397,76 +440,95 @@ int minipal_getcpufeatures(void)
size_t sz = sizeof(valueFromSysctl);
if ((sysctlbyname("hw.optional.arm.FEAT_AES", &valueFromSysctl, &sz, NULL, 0) == 0) && (valueFromSysctl != 0))
- result |= ARM64IntrinsicConstants_Aes;
+ // result |= ARM64IntrinsicConstants_Aes;
+ setFlag(&result, ARM64IntrinsicConstants_Aes);
if ((sysctlbyname("hw.optional.armv8_crc32", &valueFromSysctl, &sz, NULL, 0) == 0) && (valueFromSysctl != 0))
- result |= ARM64IntrinsicConstants_Crc32;
+ // result |= ARM64IntrinsicConstants_Crc32;
+ setFlag(&result, ARM64IntrinsicConstants_Crc32);
if ((sysctlbyname("hw.optional.arm.FEAT_DotProd", &valueFromSysctl, &sz, NULL, 0) == 0) && (valueFromSysctl != 0))
- result |= ARM64IntrinsicConstants_Dp;
+ // result |= ARM64IntrinsicConstants_Dp;
+ setFlag(&result, ARM64IntrinsicConstants_Dp);
if ((sysctlbyname("hw.optional.arm.FEAT_RDM", &valueFromSysctl, &sz, NULL, 0) == 0) && (valueFromSysctl != 0))
- result |= ARM64IntrinsicConstants_Rdm;
+ // result |= ARM64IntrinsicConstants_Rdm
+ setFlag(&result, ARM64IntrinsicConstants_Rdm);
if ((sysctlbyname("hw.optional.arm.FEAT_SHA1", &valueFromSysctl, &sz, NULL, 0) == 0) && (valueFromSysctl != 0))
- result |= ARM64IntrinsicConstants_Sha1;
+ // result |= ARM64IntrinsicConstants_Sha1;
+ setFlag(&result, ARM64IntrinsicConstants_Sha1);
if ((sysctlbyname("hw.optional.arm.FEAT_SHA256", &valueFromSysctl, &sz, NULL, 0) == 0) && (valueFromSysctl != 0))
- result |= ARM64IntrinsicConstants_Sha256;
+ // result |= ARM64IntrinsicConstants_Sha256;
+ setFlag(&result, ARM64IntrinsicConstants_Sha256);
if ((sysctlbyname("hw.optional.armv8_1_atomics", &valueFromSysctl, &sz, NULL, 0) == 0) && (valueFromSysctl != 0))
- result |= ARM64IntrinsicConstants_Atomics;
+ // result |= ARM64IntrinsicConstants_Atomics;
+ setFlag(&result, ARM64IntrinsicConstants_Atomics);
if ((sysctlbyname("hw.optional.arm.FEAT_LRCPC", &valueFromSysctl, &sz, NULL, 0) == 0) && (valueFromSysctl != 0))
- result |= ARM64IntrinsicConstants_Rcpc;
+ // result |= ARM64IntrinsicConstants_Rcpc;
+ setFlag(&result, ARM64IntrinsicConstants_Rcpc);
if ((sysctlbyname("hw.optional.arm.FEAT_LRCPC2", &valueFromSysctl, &sz, NULL, 0) == 0) && (valueFromSysctl != 0))
- result |= ARM64IntrinsicConstants_Rcpc2;
+ // result |= ARM64IntrinsicConstants_Rcpc2
+ setFlag(&result, ARM64IntrinsicConstants_Rcpc2);
#endif // HAVE_SYSCTLBYNAME
// Every ARM64 CPU should support SIMD and FP
// If the OS have no function to query for CPU capabilities we set just these
- result |= ARM64IntrinsicConstants_AdvSimd | ARM64IntrinsicConstants_VectorT128;
+ // result |= ARM64IntrinsicConstants_AdvSimd | ARM64IntrinsicConstants_VectorT128;
+ setFlag(&result, ARM64IntrinsicConstants_AdvSimd | ARM64IntrinsicConstants_VectorT128);
#endif // HAVE_AUXV_HWCAP_H
#endif // HOST_UNIX
#if defined(HOST_WINDOWS)
// FP and SIMD support are enabled by default
- result |= ARM64IntrinsicConstants_AdvSimd | ARM64IntrinsicConstants_VectorT128;
+ // result |= ARM64IntrinsicConstants_AdvSimd | ARM64IntrinsicConstants_VectorT128;
+ setFlag(&result, ARM64IntrinsicConstants_AdvSimd | ARM64IntrinsicConstants_VectorT128);
if (IsProcessorFeaturePresent(PF_ARM_V8_CRYPTO_INSTRUCTIONS_AVAILABLE))
{
- result |= ARM64IntrinsicConstants_Aes;
- result |= ARM64IntrinsicConstants_Sha1;
- result |= ARM64IntrinsicConstants_Sha256;
+ // result |= ARM64IntrinsicConstants_Aes;
+ setFlag(&result, ARM64IntrinsicConstants_Aes);
+ // result |= ARM64IntrinsicConstants_Sha1;
+ setFlag(&result, ARM64IntrinsicConstants_Sha1);
+ // result |= ARM64IntrinsicConstants_Sha256;
+ setFlag(&result, ARM64IntrinsicConstants_Sha256);
}
if (IsProcessorFeaturePresent(PF_ARM_V8_CRC32_INSTRUCTIONS_AVAILABLE))
{
- result |= ARM64IntrinsicConstants_Crc32;
+ // result |= ARM64IntrinsicConstants_Crc32;
+ setFlag(&result, ARM64IntrinsicConstants_Crc32);
}
if (IsProcessorFeaturePresent(PF_ARM_V81_ATOMIC_INSTRUCTIONS_AVAILABLE))
{
- result |= ARM64IntrinsicConstants_Atomics;
+ // result |= ARM64IntrinsicConstants_Atomics;
+ setFlag(&result, ARM64IntrinsicConstants_Atomics);
}
if (IsProcessorFeaturePresent(PF_ARM_V82_DP_INSTRUCTIONS_AVAILABLE))
{
- result |= ARM64IntrinsicConstants_Dp;
+ // result |= ARM64IntrinsicConstants_Dp;
+ setFlag(&result, ARM64IntrinsicConstants_Dp);
}
if (IsProcessorFeaturePresent(PF_ARM_V83_LRCPC_INSTRUCTIONS_AVAILABLE))
{
- result |= ARM64IntrinsicConstants_Rcpc;
+ // result |= ARM64IntrinsicConstants_Rcpc;
+ setFlag(&result, ARM64IntrinsicConstants_Rcpc);
}
// TODO: IsProcessorFeaturePresent doesn't support LRCPC2 yet.
if (IsProcessorFeaturePresent(PF_ARM_SVE_INSTRUCTIONS_AVAILABLE))
{
- result |= ARM64IntrinsicConstants_Sve;
+ // result |= ARM64IntrinsicConstants_Sve;
+ setFlag(&result, ARM64IntrinsicConstants_Sve);
}
#endif // HOST_WINDOWS
diff --git a/src/native/minipal/cpufeatures.h b/src/native/minipal/cpufeatures.h
index 472ce178339613..8463248ca41820 100644
--- a/src/native/minipal/cpufeatures.h
+++ b/src/native/minipal/cpufeatures.h
@@ -3,71 +3,96 @@
#ifndef HAVE_MINIPAL_CPUFEATURES_H
#define HAVE_MINIPAL_CPUFEATURES_H
-
+#define NUM_PARTS 3
//
// Should match the constants defined in the compiler in HardwareIntrinsicHelpers.cs
//
+typedef struct {
+ uint32_t parts[NUM_PARTS];
+} HardwareIntrinsicConstants;
+
+inline bool areEqualHardwareIntrinsicConstants(HardwareIntrinsicConstants *constants1, HardwareIntrinsicConstants *constants2) {
+ for (int i = 0; i < NUM_PARTS; i++) {
+ if (constants1->parts[i] != constants2->parts[i]) {
+ return false;
+ }
+ }
+ return true;
+}
+
+inline void setFlag(HardwareIntrinsicConstants *constants, int flag) {
+ constants->parts[flag / 32] |= (1 << (flag % 32));
+}
+
+inline void clearFlag(HardwareIntrinsicConstants *constants, int flag) {
+ constants->parts[flag / 32] &= ~(1 << (flag % 32));
+}
+
+inline bool isFlagSet(const HardwareIntrinsicConstants *constants, int flag) {
+ return (constants->parts[flag / 32] & (1 << (flag % 32))) != 0;
+}
+
#if defined(HOST_X86) || defined(HOST_AMD64)
-enum XArchIntrinsicConstants
+enum XArchIntrinsicFeatures
{
- XArchIntrinsicConstants_Aes = 0x0001,
- XArchIntrinsicConstants_Pclmulqdq = 0x0002,
- XArchIntrinsicConstants_Sse3 = 0x0004,
- XArchIntrinsicConstants_Ssse3 = 0x0008,
- XArchIntrinsicConstants_Sse41 = 0x0010,
- XArchIntrinsicConstants_Sse42 = 0x0020,
- XArchIntrinsicConstants_Popcnt = 0x0040,
- XArchIntrinsicConstants_Avx = 0x0080,
- XArchIntrinsicConstants_Fma = 0x0100,
- XArchIntrinsicConstants_Avx2 = 0x0200,
- XArchIntrinsicConstants_Bmi1 = 0x0400,
- XArchIntrinsicConstants_Bmi2 = 0x0800,
- XArchIntrinsicConstants_Lzcnt = 0x1000,
- XArchIntrinsicConstants_AvxVnni = 0x2000,
- XArchIntrinsicConstants_Movbe = 0x4000,
- XArchIntrinsicConstants_Avx512f = 0x8000,
- XArchIntrinsicConstants_Avx512f_vl = 0x10000,
- XArchIntrinsicConstants_Avx512bw = 0x20000,
- XArchIntrinsicConstants_Avx512bw_vl = 0x40000,
- XArchIntrinsicConstants_Avx512cd = 0x80000,
- XArchIntrinsicConstants_Avx512cd_vl = 0x100000,
- XArchIntrinsicConstants_Avx512dq = 0x200000,
- XArchIntrinsicConstants_Avx512dq_vl = 0x400000,
- XArchIntrinsicConstants_Avx512Vbmi = 0x800000,
- XArchIntrinsicConstants_Avx512Vbmi_vl = 0x1000000,
- XArchIntrinsicConstants_Serialize = 0x2000000,
- XArchIntrinsicConstants_VectorT128 = 0x4000000,
- XArchIntrinsicConstants_VectorT256 = 0x8000000,
- XArchIntrinsicConstants_VectorT512 = 0x10000000,
- XArchIntrinsicConstants_Avx10v1 = 0x20000000,
- XArchIntrinsicConstants_Avx10v1_V256 = 0x40000000,
- XArchIntrinsicConstants_Avx10v1_V512 = 0x80000000,
+ XArchIntrinsicConstants_Aes = 0,
+ XArchIntrinsicConstants_Pclmulqdq = 1,
+ XArchIntrinsicConstants_Sse3 = 2,
+ XArchIntrinsicConstants_Ssse3 = 3,
+ XArchIntrinsicConstants_Sse41 = 4,
+ XArchIntrinsicConstants_Sse42 = 5,
+ XArchIntrinsicConstants_Popcnt = 6,
+ XArchIntrinsicConstants_Avx = 7,
+ XArchIntrinsicConstants_Fma = 8,
+ XArchIntrinsicConstants_Avx2 = 9,
+ XArchIntrinsicConstants_Bmi1 = 10,
+ XArchIntrinsicConstants_Bmi2 = 11,
+ XArchIntrinsicConstants_Lzcnt = 12,
+ XArchIntrinsicConstants_AvxVnni = 13,
+ XArchIntrinsicConstants_Movbe = 14,
+ XArchIntrinsicConstants_Avx512f = 15,
+ XArchIntrinsicConstants_Avx512f_vl = 16,
+ XArchIntrinsicConstants_Avx512bw = 17,
+ XArchIntrinsicConstants_Avx512bw_vl = 18,
+ XArchIntrinsicConstants_Avx512cd = 19,
+ XArchIntrinsicConstants_Avx512cd_vl = 20,
+ XArchIntrinsicConstants_Avx512dq = 21,
+ XArchIntrinsicConstants_Avx512dq_vl = 22,
+ XArchIntrinsicConstants_Avx512Vbmi = 23,
+ XArchIntrinsicConstants_Avx512Vbmi_vl = 24,
+ XArchIntrinsicConstants_Serialize = 25,
+ XArchIntrinsicConstants_VectorT128 = 26,
+ XArchIntrinsicConstants_VectorT256 = 27,
+ XArchIntrinsicConstants_VectorT512 = 28,
+ XArchIntrinsicConstants_Avx10v1 = 29,
+ XArchIntrinsicConstants_Avx10v1_V256 = 30,
+ XArchIntrinsicConstants_Avx10v1_V512 = 31,
};
#endif // HOST_X86 || HOST_AMD64
#if defined(HOST_ARM64)
enum ARM64IntrinsicConstants
{
- ARM64IntrinsicConstants_AdvSimd = 0x0001,
- ARM64IntrinsicConstants_Aes = 0x0002,
- ARM64IntrinsicConstants_Crc32 = 0x0004,
- ARM64IntrinsicConstants_Dp = 0x0008,
- ARM64IntrinsicConstants_Rdm = 0x0010,
- ARM64IntrinsicConstants_Sha1 = 0x0020,
- ARM64IntrinsicConstants_Sha256 = 0x0040,
- ARM64IntrinsicConstants_Atomics = 0x0080,
- ARM64IntrinsicConstants_Rcpc = 0x0100,
- ARM64IntrinsicConstants_VectorT128 = 0x0200,
- ARM64IntrinsicConstants_Rcpc2 = 0x0400,
- ARM64IntrinsicConstants_Sve = 0x0800,
+ ARM64IntrinsicConstants_AdvSimd = 0,
+ ARM64IntrinsicConstants_Aes = 1,
+ ARM64IntrinsicConstants_Crc32 = 2,
+ ARM64IntrinsicConstants_Dp = 3,
+ ARM64IntrinsicConstants_Rdm = 4,
+ ARM64IntrinsicConstants_Sha1 = 5,
+ ARM64IntrinsicConstants_Sha256 = 6,
+ ARM64IntrinsicConstants_Atomics = 7,
+ ARM64IntrinsicConstants_Rcpc = 8,
+ ARM64IntrinsicConstants_VectorT128 = 9,
+ ARM64IntrinsicConstants_Rcpc2 = 10,
+ ARM64IntrinsicConstants_Sve = 11,
};
#include
// Bit position for the ARM64IntrinsicConstants_Atomics flags, to be used with tbz / tbnz instructions
-#define ARM64_ATOMICS_FEATURE_FLAG_BIT 7
-static_assert((1 << ARM64_ATOMICS_FEATURE_FLAG_BIT) == ARM64IntrinsicConstants_Atomics, "ARM64_ATOMICS_FEATURE_FLAG_BIT must match with ARM64IntrinsicConstants_Atomics");
+#define ARM64_ATOMICS_FEATURE_FLAG_VALUE 7
+static_assert(ARM64_ATOMICS_FEATURE_FLAG_VALUE == ARM64IntrinsicConstants_Atomics, "ARM64_ATOMICS_FEATURE_FLAG_BIT must match with ARM64IntrinsicConstants_Atomics");
#endif // HOST_ARM64
@@ -76,7 +101,7 @@ extern "C"
{
#endif // __cplusplus
-int minipal_getcpufeatures(void);
+HardwareIntrinsicConstants minipal_getcpufeatures(void);
bool minipal_detect_rosetta(void);
#ifdef __cplusplus