1 //===-- AArch64ELFObjectWriter.cpp - AArch64 ELF Writer -------------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file handles ELF-specific object emission, converting LLVM's internal
11 // fixups into the appropriate relocations.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "MCTargetDesc/AArch64FixupKinds.h"
16 #include "MCTargetDesc/AArch64MCExpr.h"
17 #include "MCTargetDesc/AArch64MCTargetDesc.h"
18 #include "llvm/BinaryFormat/ELF.h"
19 #include "llvm/MC/MCContext.h"
20 #include "llvm/MC/MCELFObjectWriter.h"
21 #include "llvm/MC/MCFixup.h"
22 #include "llvm/MC/MCValue.h"
23 #include "llvm/Support/ErrorHandling.h"
24 #include <cassert>
25 #include <cstdint>
26 
27 using namespace llvm;
28 
29 namespace {
30 
31 class AArch64ELFObjectWriter : public MCELFObjectTargetWriter {
32 public:
33   AArch64ELFObjectWriter(uint8_t OSABI, bool IsLittleEndian, bool IsILP32);
34 
35   ~AArch64ELFObjectWriter() override = default;
36 
37 protected:
38   unsigned getRelocType(MCContext &Ctx, const MCValue &Target,
39                         const MCFixup &Fixup, bool IsPCRel) const override;
40   bool IsILP32;
41 };
42 
43 } // end anonymous namespace
44 
45 AArch64ELFObjectWriter::AArch64ELFObjectWriter(uint8_t OSABI,
46                                                bool IsLittleEndian,
47                                                bool IsILP32)
48     : MCELFObjectTargetWriter(/*Is64Bit*/ true, OSABI, ELF::EM_AARCH64,
49                               /*HasRelocationAddend*/ true),
50       IsILP32(IsILP32) {}
51 
52 #define R_CLS(rtype)                                                           \
53   IsILP32 ? ELF::R_AARCH64_P32_##rtype : ELF::R_AARCH64_##rtype
54 #define BAD_ILP32_MOV(lp64rtype)                                               \
55   "ILP32 absolute MOV relocation not "                                         \
56   "supported (LP64 eqv: " #lp64rtype ")"
57 
58 // assumes IsILP32 is true
59 static bool isNonILP32reloc(const MCFixup &Fixup,
60                             AArch64MCExpr::VariantKind RefKind,
61                             MCContext &Ctx) {
62   if ((unsigned)Fixup.getKind() != AArch64::fixup_aarch64_movw)
63     return false;
64   switch (RefKind) {
65   case AArch64MCExpr::VK_ABS_G3:
66     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G3));
67     return true;
68   case AArch64MCExpr::VK_ABS_G2:
69     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2));
70     return true;
71   case AArch64MCExpr::VK_ABS_G2_S:
72     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G2));
73     return true;
74   case AArch64MCExpr::VK_ABS_G2_NC:
75     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2_NC));
76     return true;
77   case AArch64MCExpr::VK_ABS_G1_S:
78     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G1));
79     return true;
80   case AArch64MCExpr::VK_ABS_G1_NC:
81     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G1_NC));
82     return true;
83   case AArch64MCExpr::VK_DTPREL_G2:
84     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G2));
85     return true;
86   case AArch64MCExpr::VK_DTPREL_G1_NC:
87     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G1_NC));
88     return true;
89   case AArch64MCExpr::VK_TPREL_G2:
90     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G2));
91     return true;
92   case AArch64MCExpr::VK_TPREL_G1_NC:
93     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G1_NC));
94     return true;
95   case AArch64MCExpr::VK_GOTTPREL_G1:
96     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G1));
97     return true;
98   case AArch64MCExpr::VK_GOTTPREL_G0_NC:
99     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G0_NC));
100     return true;
101   default:
102     return false;
103   }
104   return false;
105 }
106 
107 unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx,
108                                               const MCValue &Target,
109                                               const MCFixup &Fixup,
110                                               bool IsPCRel) const {
111   AArch64MCExpr::VariantKind RefKind =
112       static_cast<AArch64MCExpr::VariantKind>(Target.getRefKind());
113   AArch64MCExpr::VariantKind SymLoc = AArch64MCExpr::getSymbolLoc(RefKind);
114   bool IsNC = AArch64MCExpr::isNotChecked(RefKind);
115 
116   assert((!Target.getSymA() ||
117           Target.getSymA()->getKind() == MCSymbolRefExpr::VK_None) &&
118          "Should only be expression-level modifiers here");
119 
120   assert((!Target.getSymB() ||
121           Target.getSymB()->getKind() == MCSymbolRefExpr::VK_None) &&
122          "Should only be expression-level modifiers here");
123 
124   if (IsPCRel) {
125     switch ((unsigned)Fixup.getKind()) {
126     case FK_Data_1:
127       Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
128       return ELF::R_AARCH64_NONE;
129     case FK_Data_2:
130       return R_CLS(PREL16);
131     case FK_Data_4:
132       return R_CLS(PREL32);
133     case FK_Data_8:
134       if (IsILP32) {
135         Ctx.reportError(Fixup.getLoc(),
136                         "ILP32 8 byte PC relative data "
137                         "relocation not supported (LP64 eqv: PREL64)");
138         return ELF::R_AARCH64_NONE;
139       } else
140         return ELF::R_AARCH64_PREL64;
141     case AArch64::fixup_aarch64_pcrel_adr_imm21:
142       assert(SymLoc == AArch64MCExpr::VK_NONE && "unexpected ADR relocation");
143       return R_CLS(ADR_PREL_LO21);
144     case AArch64::fixup_aarch64_pcrel_adrp_imm21:
145       if (SymLoc == AArch64MCExpr::VK_ABS && !IsNC)
146         return R_CLS(ADR_PREL_PG_HI21);
147       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) {
148         if (IsILP32) {
149           Ctx.reportError(Fixup.getLoc(),
150                           "invalid fixup for 32-bit pcrel ADRP instruction "
151                           "VK_ABS VK_NC");
152           return ELF::R_AARCH64_NONE;
153         } else {
154           return ELF::R_AARCH64_ADR_PREL_PG_HI21_NC;
155         }
156       }
157       if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC)
158         return R_CLS(ADR_GOT_PAGE);
159       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && !IsNC)
160         return R_CLS(TLSIE_ADR_GOTTPREL_PAGE21);
161       if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC)
162         return R_CLS(TLSDESC_ADR_PAGE21);
163       Ctx.reportError(Fixup.getLoc(),
164                       "invalid symbol kind for ADRP relocation");
165       return ELF::R_AARCH64_NONE;
166     case AArch64::fixup_aarch64_pcrel_branch26:
167       return R_CLS(JUMP26);
168     case AArch64::fixup_aarch64_pcrel_call26:
169       return R_CLS(CALL26);
170     case AArch64::fixup_aarch64_ldr_pcrel_imm19:
171       if (SymLoc == AArch64MCExpr::VK_GOTTPREL)
172         return R_CLS(TLSIE_LD_GOTTPREL_PREL19);
173       return R_CLS(LD_PREL_LO19);
174     case AArch64::fixup_aarch64_pcrel_branch14:
175       return R_CLS(TSTBR14);
176     case AArch64::fixup_aarch64_pcrel_branch19:
177       return R_CLS(CONDBR19);
178     default:
179       Ctx.reportError(Fixup.getLoc(), "Unsupported pc-relative fixup kind");
180       return ELF::R_AARCH64_NONE;
181     }
182   } else {
183     if (IsILP32 && isNonILP32reloc(Fixup, RefKind, Ctx))
184       return ELF::R_AARCH64_NONE;
185     switch ((unsigned)Fixup.getKind()) {
186     case FK_Data_1:
187       Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
188       return ELF::R_AARCH64_NONE;
189     case FK_Data_2:
190       return R_CLS(ABS16);
191     case FK_Data_4:
192       return R_CLS(ABS32);
193     case FK_Data_8:
194       if (IsILP32) {
195         Ctx.reportError(Fixup.getLoc(),
196                         "ILP32 8 byte absolute data "
197                         "relocation not supported (LP64 eqv: ABS64)");
198         return ELF::R_AARCH64_NONE;
199       } else
200         return ELF::R_AARCH64_ABS64;
201     case AArch64::fixup_aarch64_add_imm12:
202       if (RefKind == AArch64MCExpr::VK_DTPREL_HI12)
203         return R_CLS(TLSLD_ADD_DTPREL_HI12);
204       if (RefKind == AArch64MCExpr::VK_TPREL_HI12)
205         return R_CLS(TLSLE_ADD_TPREL_HI12);
206       if (RefKind == AArch64MCExpr::VK_DTPREL_LO12_NC)
207         return R_CLS(TLSLD_ADD_DTPREL_LO12_NC);
208       if (RefKind == AArch64MCExpr::VK_DTPREL_LO12)
209         return R_CLS(TLSLD_ADD_DTPREL_LO12);
210       if (RefKind == AArch64MCExpr::VK_TPREL_LO12_NC)
211         return R_CLS(TLSLE_ADD_TPREL_LO12_NC);
212       if (RefKind == AArch64MCExpr::VK_TPREL_LO12)
213         return R_CLS(TLSLE_ADD_TPREL_LO12);
214       if (RefKind == AArch64MCExpr::VK_TLSDESC_LO12)
215         return R_CLS(TLSDESC_ADD_LO12);
216       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
217         return R_CLS(ADD_ABS_LO12_NC);
218 
219       Ctx.reportError(Fixup.getLoc(),
220                       "invalid fixup for add (uimm12) instruction");
221       return ELF::R_AARCH64_NONE;
222     case AArch64::fixup_aarch64_ldst_imm12_scale1:
223       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
224         return R_CLS(LDST8_ABS_LO12_NC);
225       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
226         return R_CLS(TLSLD_LDST8_DTPREL_LO12);
227       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
228         return R_CLS(TLSLD_LDST8_DTPREL_LO12_NC);
229       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
230         return R_CLS(TLSLE_LDST8_TPREL_LO12);
231       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
232         return R_CLS(TLSLE_LDST8_TPREL_LO12_NC);
233 
234       Ctx.reportError(Fixup.getLoc(),
235                       "invalid fixup for 8-bit load/store instruction");
236       return ELF::R_AARCH64_NONE;
237     case AArch64::fixup_aarch64_ldst_imm12_scale2:
238       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
239         return R_CLS(LDST16_ABS_LO12_NC);
240       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
241         return R_CLS(TLSLD_LDST16_DTPREL_LO12);
242       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
243         return R_CLS(TLSLD_LDST16_DTPREL_LO12_NC);
244       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
245         return R_CLS(TLSLE_LDST16_TPREL_LO12);
246       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
247         return R_CLS(TLSLE_LDST16_TPREL_LO12_NC);
248 
249       Ctx.reportError(Fixup.getLoc(),
250                       "invalid fixup for 16-bit load/store instruction");
251       return ELF::R_AARCH64_NONE;
252     case AArch64::fixup_aarch64_ldst_imm12_scale4:
253       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
254         return R_CLS(LDST32_ABS_LO12_NC);
255       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
256         return R_CLS(TLSLD_LDST32_DTPREL_LO12);
257       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
258         return R_CLS(TLSLD_LDST32_DTPREL_LO12_NC);
259       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
260         return R_CLS(TLSLE_LDST32_TPREL_LO12);
261       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
262         return R_CLS(TLSLE_LDST32_TPREL_LO12_NC);
263       if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) {
264         if (IsILP32) {
265           return ELF::R_AARCH64_P32_LD32_GOT_LO12_NC;
266         } else {
267           Ctx.reportError(Fixup.getLoc(),
268                           "LP64 4 byte unchecked GOT load/store relocation "
269                           "not supported (ILP32 eqv: LD32_GOT_LO12_NC");
270           return ELF::R_AARCH64_NONE;
271         }
272       }
273       if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC) {
274         if (IsILP32) {
275           Ctx.reportError(Fixup.getLoc(),
276                           "ILP32 4 byte checked GOT load/store relocation "
277                           "not supported (unchecked eqv: LD32_GOT_LO12_NC)");
278         } else {
279           Ctx.reportError(Fixup.getLoc(),
280                           "LP64 4 byte checked GOT load/store relocation "
281                           "not supported (unchecked/ILP32 eqv: "
282                           "LD32_GOT_LO12_NC)");
283         }
284         return ELF::R_AARCH64_NONE;
285       }
286       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) {
287         if (IsILP32) {
288           return ELF::R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC;
289         } else {
290           Ctx.reportError(Fixup.getLoc(),
291                           "LP64 32-bit load/store "
292                           "relocation not supported (ILP32 eqv: "
293                           "TLSIE_LD32_GOTTPREL_LO12_NC)");
294           return ELF::R_AARCH64_NONE;
295         }
296       }
297       if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC) {
298         if (IsILP32) {
299           return ELF::R_AARCH64_P32_TLSDESC_LD32_LO12;
300         } else {
301           Ctx.reportError(Fixup.getLoc(),
302                           "LP64 4 byte TLSDESC load/store relocation "
303                           "not supported (ILP32 eqv: TLSDESC_LD64_LO12)");
304           return ELF::R_AARCH64_NONE;
305         }
306       }
307 
308       Ctx.reportError(Fixup.getLoc(),
309                       "invalid fixup for 32-bit load/store instruction "
310                       "fixup_aarch64_ldst_imm12_scale4");
311       return ELF::R_AARCH64_NONE;
312     case AArch64::fixup_aarch64_ldst_imm12_scale8:
313       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
314         return R_CLS(LDST64_ABS_LO12_NC);
315       if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) {
316         if (!IsILP32) {
317           return ELF::R_AARCH64_LD64_GOT_LO12_NC;
318         } else {
319           Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
320                                           "relocation not supported (LP64 eqv: "
321                                           "LD64_GOT_LO12_NC)");
322           return ELF::R_AARCH64_NONE;
323         }
324       }
325       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
326         return R_CLS(TLSLD_LDST64_DTPREL_LO12);
327       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
328         return R_CLS(TLSLD_LDST64_DTPREL_LO12_NC);
329       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
330         return R_CLS(TLSLE_LDST64_TPREL_LO12);
331       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
332         return R_CLS(TLSLE_LDST64_TPREL_LO12_NC);
333       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) {
334         if (!IsILP32) {
335           return ELF::R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC;
336         } else {
337           Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
338                                           "relocation not supported (LP64 eqv: "
339                                           "TLSIE_LD64_GOTTPREL_LO12_NC)");
340           return ELF::R_AARCH64_NONE;
341         }
342       }
343       if (SymLoc == AArch64MCExpr::VK_TLSDESC) {
344         if (!IsILP32) {
345           return ELF::R_AARCH64_TLSDESC_LD64_LO12;
346         } else {
347           Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
348                                           "relocation not supported (LP64 eqv: "
349                                           "TLSDESC_LD64_LO12)");
350           return ELF::R_AARCH64_NONE;
351         }
352       }
353       Ctx.reportError(Fixup.getLoc(),
354                       "invalid fixup for 64-bit load/store instruction");
355       return ELF::R_AARCH64_NONE;
356     case AArch64::fixup_aarch64_ldst_imm12_scale16:
357       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
358         return R_CLS(LDST128_ABS_LO12_NC);
359       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
360         return R_CLS(TLSLD_LDST128_DTPREL_LO12);
361       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
362         return R_CLS(TLSLD_LDST128_DTPREL_LO12_NC);
363       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
364         return R_CLS(TLSLE_LDST128_TPREL_LO12);
365       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
366         return R_CLS(TLSLE_LDST128_TPREL_LO12_NC);
367 
368       Ctx.reportError(Fixup.getLoc(),
369                       "invalid fixup for 128-bit load/store instruction");
370       return ELF::R_AARCH64_NONE;
371     // ILP32 case not reached here, tested with isNonILP32reloc
372     case AArch64::fixup_aarch64_movw:
373       if (RefKind == AArch64MCExpr::VK_ABS_G3)
374         return ELF::R_AARCH64_MOVW_UABS_G3;
375       if (RefKind == AArch64MCExpr::VK_ABS_G2)
376         return ELF::R_AARCH64_MOVW_UABS_G2;
377       if (RefKind == AArch64MCExpr::VK_ABS_G2_S)
378         return ELF::R_AARCH64_MOVW_SABS_G2;
379       if (RefKind == AArch64MCExpr::VK_ABS_G2_NC)
380         return ELF::R_AARCH64_MOVW_UABS_G2_NC;
381       if (RefKind == AArch64MCExpr::VK_ABS_G1)
382         return R_CLS(MOVW_UABS_G1);
383       if (RefKind == AArch64MCExpr::VK_ABS_G1_S)
384         return ELF::R_AARCH64_MOVW_SABS_G1;
385       if (RefKind == AArch64MCExpr::VK_ABS_G1_NC)
386         return ELF::R_AARCH64_MOVW_UABS_G1_NC;
387       if (RefKind == AArch64MCExpr::VK_ABS_G0)
388         return R_CLS(MOVW_UABS_G0);
389       if (RefKind == AArch64MCExpr::VK_ABS_G0_S)
390         return R_CLS(MOVW_SABS_G0);
391       if (RefKind == AArch64MCExpr::VK_ABS_G0_NC)
392         return R_CLS(MOVW_UABS_G0_NC);
393       if (RefKind == AArch64MCExpr::VK_DTPREL_G2)
394         return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G2;
395       if (RefKind == AArch64MCExpr::VK_DTPREL_G1)
396         return R_CLS(TLSLD_MOVW_DTPREL_G1);
397       if (RefKind == AArch64MCExpr::VK_DTPREL_G1_NC)
398         return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G1_NC;
399       if (RefKind == AArch64MCExpr::VK_DTPREL_G0)
400         return R_CLS(TLSLD_MOVW_DTPREL_G0);
401       if (RefKind == AArch64MCExpr::VK_DTPREL_G0_NC)
402         return R_CLS(TLSLD_MOVW_DTPREL_G0_NC);
403       if (RefKind == AArch64MCExpr::VK_TPREL_G2)
404         return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G2;
405       if (RefKind == AArch64MCExpr::VK_TPREL_G1)
406         return R_CLS(TLSLE_MOVW_TPREL_G1);
407       if (RefKind == AArch64MCExpr::VK_TPREL_G1_NC)
408         return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G1_NC;
409       if (RefKind == AArch64MCExpr::VK_TPREL_G0)
410         return R_CLS(TLSLE_MOVW_TPREL_G0);
411       if (RefKind == AArch64MCExpr::VK_TPREL_G0_NC)
412         return R_CLS(TLSLE_MOVW_TPREL_G0_NC);
413       if (RefKind == AArch64MCExpr::VK_GOTTPREL_G1)
414         return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G1;
415       if (RefKind == AArch64MCExpr::VK_GOTTPREL_G0_NC)
416         return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC;
417       Ctx.reportError(Fixup.getLoc(),
418                       "invalid fixup for movz/movk instruction");
419       return ELF::R_AARCH64_NONE;
420     case AArch64::fixup_aarch64_tlsdesc_call:
421       return R_CLS(TLSDESC_CALL);
422     default:
423       Ctx.reportError(Fixup.getLoc(), "Unknown ELF relocation type");
424       return ELF::R_AARCH64_NONE;
425     }
426   }
427 
428   llvm_unreachable("Unimplemented fixup -> relocation");
429 }
430 
431 MCObjectWriter *llvm::createAArch64ELFObjectWriter(raw_pwrite_stream &OS,
432                                                    uint8_t OSABI,
433                                                    bool IsLittleEndian,
434                                                    bool IsILP32) {
435   MCELFObjectTargetWriter *MOTW =
436       new AArch64ELFObjectWriter(OSABI, IsLittleEndian, IsILP32);
437   return createELFObjectWriter(MOTW, OS, IsLittleEndian);
438 }
439