Use #ifdef APPLE_MAC instead of #ifdef MAC to conditionalize Apple-specific
behavior for unicode support in UDF so as not to conflict with the MAC Framework. Note that Apple's XNU kernel also uses #ifdef MAC for the MAC Framework. Suggested by: pjd MFC after: 3 days
This commit is contained in:
parent
322ef7cc60
commit
dde155e95a
@ -280,7 +280,7 @@ main()
|
|||||||
* Define MAXLEN = 255
|
* Define MAXLEN = 255
|
||||||
*
|
*
|
||||||
* Macintosh:
|
* Macintosh:
|
||||||
* Define MAC.
|
* Define APPLE_MAC.
|
||||||
* Define MAXLEN = 31.
|
* Define MAXLEN = 31.
|
||||||
*
|
*
|
||||||
* UNIX
|
* UNIX
|
||||||
@ -484,7 +484,7 @@ int UnicodeInString(
|
|||||||
*/
|
*/
|
||||||
int IsIllegal(unicode_t ch)
|
int IsIllegal(unicode_t ch)
|
||||||
{
|
{
|
||||||
#ifdef MAC
|
#ifdef APPLE_MAC
|
||||||
/* Only illegal character on the MAC is the colon. */
|
/* Only illegal character on the MAC is the colon. */
|
||||||
if (ch == 0x003A) {
|
if (ch == 0x003A) {
|
||||||
return(1);
|
return(1);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user