20 #define foreach_x86_cpu_uarch \ 21 _(0x06, 0x9e, "Kaby Lake", "Kaby Lake DT/H/S/X") \ 22 _(0x06, 0x8e, "Kaby Lake", "Kaby Lake Y/U") \ 23 _(0x06, 0x85, "Knights Mill", "Knights Mill") \ 24 _(0x06, 0x5f, "Goldmont", "Denverton") \ 25 _(0x06, 0x5e, "Skylake", "Skylake DT/H/S") \ 26 _(0x06, 0x5c, "Goldmont", "Apollo Lake") \ 27 _(0x06, 0x5a, "Silvermont", "Moorefield") \ 28 _(0x06, 0x57, "Knights Landing", "Knights Landing") \ 29 _(0x06, 0x56, "Broadwell", "Broadwell DE") \ 30 _(0x06, 0x55, "Skylake", "Skylake X/SP") \ 31 _(0x06, 0x4f, "Broadwell", "Broadwell EP/EX") \ 32 _(0x06, 0x4e, "Skylake", "Skylake Y/U") \ 33 _(0x06, 0x4d, "Silvermont", "Rangeley") \ 34 _(0x06, 0x4c, "Airmont", "Braswell") \ 35 _(0x06, 0x47, "Broadwell", "Broadwell H") \ 36 _(0x06, 0x46, "Haswell", "Crystalwell") \ 37 _(0x06, 0x45, "Haswell", "Haswell ULT") \ 38 _(0x06, 0x3f, "Haswell", "Haswell E") \ 39 _(0x06, 0x3e, "Ivy Bridge", "Ivy Bridge E/EN/EP") \ 40 _(0x06, 0x3d, "Broadwell", "Broadwell U") \ 41 _(0x06, 0x3c, "Haswell", "Haswell") \ 42 _(0x06, 0x3a, "Ivy Bridge", "IvyBridge") \ 43 _(0x06, 0x37, "Silvermont", "BayTrail") \ 44 _(0x06, 0x36, "Saltwell", "Cedarview,Centerton") \ 45 _(0x06, 0x35, "Saltwell", "Cloverview") \ 46 _(0x06, 0x2f, "Westmere", "Westmere EX") \ 47 _(0x06, 0x2e, "Nehalem", "Nehalem EX") \ 48 _(0x06, 0x2d, "Sandy Bridge", "SandyBridge E/EN/EP") \ 49 _(0x06, 0x2c, "Westmere", "Westmere EP/EX,Gulftown") \ 50 _(0x06, 0x2a, "Sandy Bridge", "Sandy Bridge") \ 51 _(0x06, 0x27, "Saltwell", "Medfield") \ 52 _(0x06, 0x26, "Bonnell", "Tunnel Creek") \ 53 _(0x06, 0x25, "Westmere", "Arrandale,Clarksdale") \ 54 _(0x06, 0x1e, "Nehalem", "Clarksfield,Lynnfield,Jasper Forest") \ 55 _(0x06, 0x1d, "Penryn", "Dunnington") \ 56 _(0x06, 0x1c, "Bonnell", "Pineview,Silverthorne") \ 57 _(0x06, 0x1a, "Nehalem", "Nehalem EP,Bloomfield)") \ 58 _(0x06, 0x17, "Penryn", "Yorkfield,Wolfdale,Penryn,Harpertown") 60 #define foreach_aarch64_cpu_uarch \ 61 _(0x41, 0xd03, "ARM", "Cortex-A53") \ 62 _(0x41, 0xd07, "ARM", "Cortex-A57") \ 63 _(0x41, 0xd08, "ARM", "Cortex-A72") \ 64 _(0x41, 0xd09, "ARM", "Cortex-A73") \ 65 _(0x43, 0x0a1, "Cavium", "ThunderX CN88XX") \ 66 _(0x43, 0x0a2, "Cavium", "Octeon TX CN81XX") \ 67 _(0x43, 0x0a3, "Cavium", "Octeon TX CN83XX") \ 68 _(0x43, 0x0af, "Cavium", "ThunderX2 CN99XX") \ 69 _(0x43, 0x0b1, "Cavium", "Octeon TX2 CN98XX") \ 70 _(0x43, 0x0b2, "Cavium", "Octeon TX2 CN93XX") \ 76 u32 __attribute__ ((unused)) eax, ebx, ecx, edx;
79 if (__get_cpuid (1, &eax, &ebx, &ecx, &edx) == 0)
80 return format (s,
"unknown (missing cpuid)");
82 model = ((eax >> 4) & 0x0f) | ((eax >> 12) & 0xf0);
83 family = (eax >> 8) & 0x0f;
85 #define _(f,m,a,c) if ((model == m) && (family == f)) return format(s, "%s (%s)", a, c); 88 return format (s,
"unknown (family 0x%02x model 0x%02x)", family, model);
93 u32 implementer, primary_part_number, variant, revision;
95 fd = open (
"/proc/cpuinfo", 0);
97 return format (s,
"unknown");
102 if (
unformat (&input,
"CPU implementer%_: 0x%x", &implementer))
104 else if (
unformat (&input,
"CPU part%_: 0x%x", &primary_part_number))
106 else if (
unformat (&input,
"CPU variant%_: 0x%x", &variant))
108 else if (
unformat (&input,
"CPU revision%_: %u", &revision))
117 if (implementer == 0x43)
120 #define _(i,p,a,c) if ((implementer == i) && (primary_part_number == p)) \ 121 return format(s, "%s (%s PASS %u.%u)", a, c, variant, revision); 124 return format (s,
"unknown (implementer 0x%02x part 0x%03x PASS %u.%u)",
125 implementer, primary_part_number, variant, revision);
128 return format (s,
"unknown");
136 u32 __attribute__ ((unused)) eax, ebx, ecx, edx;
140 if (__get_cpuid (1, &eax, &ebx, &ecx, &edx) == 0)
141 return format (s,
"unknown (missing cpuid)");
143 __get_cpuid (0x80000000, &eax, &ebx, &ecx, &edx);
144 if (eax < 0x80000004)
145 return format (s,
"unknown (missing ext feature)");
148 name_u32 = (
u32 *) name;
150 __get_cpuid (0x80000002, &eax, &ebx, &ecx, &edx);
156 __get_cpuid (0x80000003, &eax, &ebx, &ecx, &edx);
162 __get_cpuid (0x80000004, &eax, &ebx, &ecx, &edx);
168 s =
format (s,
"%s", name);
172 #elif defined(__aarch64__) 173 return format (s,
"armv8");
175 return format (s,
"unknown");
180 static inline char const *
183 if (memcmp (flag,
"x86_",
sizeof (
"x86_") - 1) == 0)
184 return flag +
sizeof (
"x86_") - 1;
185 if (memcmp (flag,
"aarch64_",
sizeof (
"aarch64_") - 1) == 0)
186 return flag +
sizeof (
"aarch64_") - 1;
193 #if defined(__x86_64__) 194 #define _(flag, func, reg, bit) \ 195 if (clib_cpu_supports_ ## flag()) \ 196 s = format (s, "%s ", flag_skip_prefix(#flag)); 199 #elif defined(__aarch64__) 200 #define _(flag, bit) \ 201 if (clib_cpu_supports_ ## flag()) \ 202 s = format (s, "%s ", flag_skip_prefix(#flag)); 206 return format (s,
"unknown");
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment) ...
static char const * flag_skip_prefix(char const *flag)
u8 * format_cpu_uarch(u8 *s, va_list *args)
#define foreach_aarch64_flags
#define foreach_aarch64_cpu_uarch
u8 * format_cpu_model_name(u8 *s, va_list *args)
#define vec_free(V)
Free vector's memory (no header).
u8 * format_cpu_flags(u8 *s, va_list *args)
#define foreach_x86_64_flags
#define foreach_x86_cpu_uarch