Fix flags field decoding in ACPI_NFIT_CONTROL_REGION.

It looked like incomplete copy/paste, printing absolute garbage.

While there, print ValidFields field ax hex, since it is a bitmask.

MFC after:	3 days
This commit is contained in:
Alexander Motin 2017-08-31 09:08:06 +00:00
parent b290ac7ba4
commit d4c2de2e0b
Notes: svn2git 2020-12-20 02:59:44 +00:00
svn path=/head/; revision=323045

View File

@ -1277,7 +1277,7 @@ acpi_print_nfit(ACPI_NFIT_HEADER *nfit)
(u_int)ctlreg->SubsystemDeviceId);
printf("\tSubsystemRevisionId=%u\n",
(u_int)ctlreg->SubsystemRevisionId);
printf("\tValidFields=%u\n", (u_int)ctlreg->ValidFields);
printf("\tValidFields=0x%02x\n", (u_int)ctlreg->ValidFields);
printf("\tManufacturingLocation=%u\n",
(u_int)ctlreg->ManufacturingLocation);
printf("\tManufacturingDate=%u\n",
@ -1300,8 +1300,7 @@ acpi_print_nfit(ACPI_NFIT_HEADER *nfit)
#define PRINTFLAG(var, flag) printflag((var), ACPI_NFIT_## flag, #flag)
printf("\tFlags=");
PRINTFLAG(mmap->Flags, ADD_ONLINE_ONLY);
PRINTFLAG(mmap->Flags, PROXIMITY_VALID);
PRINTFLAG(ctlreg->Flags, CONTROL_BUFFERED);
PRINTFLAG_END();
#undef PRINTFLAG