IR.h 42 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. #include "Language/JavascriptNativeOperators.h"
  7. class Func;
  8. class BasicBlock;
  9. class Region;
  10. class Lowerer;
  11. class IRBuilder;
  12. class IRBuilderAsmJs;
  13. class FlowGraph;
  14. class GlobOpt;
  15. class BailOutInfo;
  16. struct LazyBailOutRecord;
  17. typedef JsUtil::KeyValuePair<StackSym *, BailoutConstantValue> ConstantStackSymValue;
  18. typedef JsUtil::KeyValuePair<StackSym *, StackSym*> CopyPropSyms;
  19. struct CapturedValues
  20. {
  21. SListBase<ConstantStackSymValue> constantValues; // Captured constant values during glob opt
  22. SListBase<CopyPropSyms> copyPropSyms; // Captured copy prop values during glob opt
  23. BVSparse<JitArenaAllocator> * argObjSyms; // Captured arg object symbols during glob opt
  24. };
  25. class LoweredBasicBlock;
  26. class BranchJumpTableWrapper
  27. {
  28. public:
  29. BranchJumpTableWrapper(uint tableSize) : defaultTarget(nullptr), labelInstr(nullptr), tableSize(tableSize)
  30. {
  31. }
  32. void** jmpTable;
  33. void* defaultTarget;
  34. IR::LabelInstr * labelInstr;
  35. int tableSize;
  36. static BranchJumpTableWrapper* New(JitArenaAllocator * allocator, uint tableSize)
  37. {
  38. BranchJumpTableWrapper * branchTargets = JitAnew(allocator, BranchJumpTableWrapper, tableSize);
  39. //Create the jump table for integers
  40. void* * jmpTable = JitAnewArrayZ(allocator, void*, tableSize);
  41. branchTargets->jmpTable = jmpTable;
  42. return branchTargets;
  43. }
  44. static void Delete(JitArenaAllocator * allocator, BranchJumpTableWrapper * branchTargets)
  45. {
  46. Assert(allocator != nullptr && branchTargets != nullptr);
  47. JitAdeleteArray(allocator, branchTargets->tableSize, branchTargets->jmpTable);
  48. JitAdelete(allocator, branchTargets);
  49. }
  50. };
  51. namespace IR {
  52. class EntryInstr;
  53. class ExitInstr;
  54. class BranchInstr;
  55. class LabelInstr;
  56. class JitProfilingInstr;
  57. class ProfiledInstr;
  58. class ProfiledLabelInstr;
  59. class MultiBranchInstr;
  60. class PragmaInstr;
  61. class ByteCodeUsesInstr;
  62. class Opnd;
  63. class RegOpnd;
  64. class IndirOpnd;
  65. class SymOpnd;
  66. class MemRefOpnd;
  67. class PropertySymOpnd;
  68. enum AddrOpndKind : BYTE;
  69. enum IRKind : BYTE {
  70. InstrKindInvalid,
  71. InstrKindInstr,
  72. InstrKindBranch,
  73. InstrKindLabel,
  74. InstrKindProfiled,
  75. InstrKindProfiledLabel,
  76. InstrKindEntry,
  77. InstrKindExit,
  78. InstrKindPragma,
  79. InstrKindByteCodeUses,
  80. InstrKindJitProfiling,
  81. };
  82. const int32 InvalidInstrLayout = -1;
  83. ///---------------------------------------------------------------------------
  84. ///
  85. /// class Instr
  86. /// BranchInstr
  87. /// MultiBranchInstr
  88. /// LabelInstr
  89. /// JitProfilingInstr
  90. /// ProfiledInstr
  91. /// EntryInstr
  92. /// ExitInstr
  93. /// PragmaInstr
  94. /// BailoutInstr
  95. /// ByteCoteUsesInstr
  96. ///
  97. ///---------------------------------------------------------------------------
  98. class Instr
  99. {
  100. protected:
  101. Instr(bool hasBailOutInfo = false) :
  102. m_next(nullptr),
  103. m_prev(nullptr),
  104. m_opcode(Js::OpCode::MaxByteSizedOpcodes),
  105. m_func(nullptr),
  106. m_number(Js::Constants::NoByteCodeOffset),
  107. m_dst(nullptr),
  108. m_src1(nullptr),
  109. m_src2(nullptr),
  110. #if DBG_DUMP
  111. globOptInstrString(nullptr),
  112. #endif
  113. dstIsTempNumber(false),
  114. dstIsTempNumberTransferred(false),
  115. dstIsTempObject(false),
  116. isCloned(false),
  117. hasBailOutInfo(hasBailOutInfo),
  118. hasAuxBailOut(false),
  119. forcePreOpBailOutIfNeeded(false),
  120. usesStackArgumentsObject(false),
  121. isInlineeEntryInstr(false),
  122. ignoreNegativeZero(false),
  123. dstIsAlwaysConvertedToInt32(false),
  124. dstIsAlwaysConvertedToNumber(false),
  125. ignoreIntOverflow(false),
  126. ignoreIntOverflowInRange(false),
  127. loadedArrayHeadSegment(false),
  128. loadedArrayHeadSegmentLength(false),
  129. extractedUpperBoundCheckWithoutHoisting(false),
  130. ignoreOverflowBitCount(32),
  131. isCtorCall(false),
  132. isCallInstrProtectedByNoProfileBailout(false)
  133. {
  134. }
  135. public:
  136. static Instr * New(Js::OpCode opcode, Func *func);
  137. static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Func *func);
  138. static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func *func);
  139. static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func *func);
  140. static Instr* NewConstantLoad(IR::RegOpnd* dstOpnd, Js::Var varConst, Func *func);
  141. public:
  142. bool IsPlainInstr() const;
  143. bool IsEntryInstr() const;
  144. EntryInstr * AsEntryInstr();
  145. bool IsExitInstr() const;
  146. ExitInstr * AsExitInstr();
  147. bool IsBranchInstr() const;
  148. BranchInstr * AsBranchInstr();
  149. bool IsLabelInstr() const;
  150. LabelInstr * AsLabelInstr();
  151. bool IsJitProfilingInstr() const;
  152. JitProfilingInstr * AsJitProfilingInstr();
  153. bool IsProfiledInstr() const;
  154. ProfiledInstr * AsProfiledInstr();
  155. bool IsProfiledLabelInstr() const;
  156. ProfiledLabelInstr * AsProfiledLabelInstr();
  157. bool IsPragmaInstr() const;
  158. PragmaInstr * AsPragmaInstr();
  159. bool IsByteCodeUsesInstr() const;
  160. ByteCodeUsesInstr * AsByteCodeUsesInstr();
  161. bool IsLowered() const;
  162. bool IsRealInstr() const;
  163. bool IsInlined() const;
  164. bool IsNewScObjectInstr() const;
  165. bool IsInvalidInstr() const;
  166. Instr* GetInvalidInstr();
  167. bool IsLinked() const { return this->m_prev != nullptr || this->m_next != nullptr; }
  168. bool StartsBasicBlock() const;
  169. bool EndsBasicBlock() const;
  170. bool HasFallThrough() const;
  171. bool DoStackArgsOpt(Func *topFunc) const;
  172. bool IsEqual(IR::Instr *instr) const;
  173. bool IsCloned() const { return isCloned; }
  174. void SetIsCloned(bool isCloned) { this->isCloned = isCloned; }
  175. bool HasBailOutInfo() const { return hasBailOutInfo; }
  176. bool HasAuxBailOut() const { return hasAuxBailOut; }
  177. bool HasTypeCheckBailOut() const;
  178. bool HasEquivalentTypeCheckBailOut() const;
  179. void ClearBailOutInfo();
  180. bool IsDstNotAlwaysConvertedToInt32() const;
  181. bool IsDstNotAlwaysConvertedToNumber() const;
  182. bool ShouldCheckForNegativeZero() const;
  183. bool ShouldCheckForIntOverflow() const;
  184. bool ShouldCheckFor32BitOverflow() const;
  185. bool ShouldCheckForNon32BitOverflow() const;
  186. bool HasAnyImplicitCalls() const;
  187. bool HasAnySideEffects() const;
  188. IRKind GetKind() const;
  189. Opnd * GetDst() const;
  190. Opnd * SetDst(Opnd * newDst);
  191. Opnd * SetFakeDst(Opnd * newDst);
  192. Opnd * UnlinkDst();
  193. void FreeDst();
  194. Opnd * Replace(Opnd *oldOpnd, Opnd *newOpnd);
  195. Opnd * DeepReplace(Opnd *const oldOpnd, Opnd *const newOpnd);
  196. Opnd * ReplaceDst(Opnd * newDst);
  197. Instr * SinkDst(Js::OpCode assignOpcode, RegNum regNum = RegNOREG, IR::Instr *insertAfterInstr = nullptr);
  198. Instr * SinkDst(Js::OpCode assignOpcode, StackSym * stackSym, RegNum regNum = RegNOREG, IR::Instr *insertAfterInstr = nullptr);
  199. Instr * SinkInstrBefore(IR::Instr * instrTarget);
  200. Opnd * GetSrc1() const;
  201. Opnd * SetSrc1(Opnd * newSrc);
  202. Opnd * UnlinkSrc1();
  203. void FreeSrc1();
  204. Opnd * ReplaceSrc1(Opnd * newSrc);
  205. Instr * HoistSrc1(Js::OpCode assignOpcode, RegNum regNum = RegNOREG, StackSym *newSym = nullptr);
  206. bool IsSrc1FunctionObject();
  207. Opnd * GetSrc2() const;
  208. Opnd * SetSrc2(Opnd * newSrc);
  209. Opnd * UnlinkSrc2();
  210. void FreeSrc2();
  211. Opnd * ReplaceSrc2(Opnd * newSrc);
  212. Instr * HoistSrc2(Js::OpCode assignOpcode, RegNum regNum = RegNOREG, StackSym *newSym = nullptr);
  213. Instr * HoistIndirOffset(IndirOpnd *indirOpnd, RegNum regNum = RegNOREG);
  214. Instr * HoistSymOffset(SymOpnd *symOpnd, RegNum baseReg, uint32 offset, RegNum regNum = RegNOREG);
  215. Instr * HoistIndirOffsetAsAdd(IndirOpnd *orgOpnd, IR::Opnd *baseOpnd, int offset, RegNum regNum);
  216. Instr * HoistSymOffsetAsAdd(SymOpnd *orgOpnd, IR::Opnd *baseOpnd, int offset, RegNum regNum);
  217. Instr * HoistIndirIndexOpndAsAdd(IR::IndirOpnd *orgOpnd, IR::Opnd *baseOpnd, IR::Opnd *indexOpnd, RegNum regNum);
  218. IndirOpnd * HoistMemRefAddress(MemRefOpnd *const memRefOpnd, const Js::OpCode loadOpCode);
  219. Opnd * UnlinkSrc(Opnd *src);
  220. Opnd * ReplaceSrc(Opnd *oldSrc, Opnd * newSrc);
  221. void InsertBefore(Instr *instr);
  222. void InsertAfter(Instr *instr);
  223. void InsertRangeBefore(Instr *startInstr, Instr *endInstr);
  224. void InsertMultipleBefore(Instr *endInstr);
  225. void InsertRangeAfter(Instr *startInstr, Instr *endInstr);
  226. void InsertMultipleAfter(Instr *endInstr);
  227. void Unlink();
  228. void Free();
  229. void Remove();
  230. void SwapOpnds();
  231. void TransferTo(Instr * instr);
  232. void TransferDstAttributesTo(Instr * instr);
  233. IR::Instr * Copy();
  234. IR::Instr * Clone();
  235. IR::Instr * ConvertToBailOutInstr(IR::Instr * bailOutTarget, BailOutKind kind);
  236. IR::Instr * ConvertToBailOutInstr(BailOutInfo * bailOutInfo, BailOutKind kind, bool useAuxBailout = false);
  237. IR::Instr * GetNextRealInstr() const;
  238. IR::Instr * GetNextRealInstrOrLabel() const;
  239. IR::Instr * GetNextBranchOrLabel() const;
  240. IR::Instr * GetPrevRealInstr() const;
  241. IR::Instr * GetPrevRealInstrOrLabel() const;
  242. IR::Instr * GetInsertBeforeByteCodeUsesInstr();
  243. IR::LabelInstr *GetOrCreateContinueLabel(const bool isHelper = false);
  244. RegOpnd * FindRegUse(StackSym *sym);
  245. static RegOpnd *FindRegUseInRange(StackSym *sym, Instr *instrBegin, Instr *instrEnd);
  246. RegOpnd * FindRegDef(StackSym *sym);
  247. BranchInstr * ChangeCmCCToBranchInstr(LabelInstr *targetInstr);
  248. static void MoveRangeAfter(Instr * instrStart, Instr * instrLast, Instr * instrAfter);
  249. static IR::Instr * CloneRange(Instr * instrStart, Instr * instrLast, Instr * instrInsert, Lowerer *lowerer, JitArenaAllocator *alloc, bool (*fMapTest)(IR::Instr*), bool clonedInstrGetOrigArgSlot);
  250. bool CanHaveArgOutChain() const;
  251. bool HasEmptyArgOutChain(IR::Instr** startCallInstrOut = nullptr);
  252. bool HasFixedFunctionAddressTarget() const;
  253. #if ENABLE_DEBUG_CONFIG_OPTIONS
  254. const char * GetBailOutKindName() const;
  255. #endif
  256. #if DBG_DUMP
  257. virtual void Dump(IRDumpFlags flags);
  258. void Dump();
  259. void DumpSimple();
  260. wchar_t* DumpString();
  261. void DumpGlobOptInstrString();
  262. void Dump(int window);
  263. void DumpRange(Instr *instrEnd);
  264. void DumpByteCodeOffset();
  265. #endif
  266. #if ENABLE_DEBUG_CONFIG_OPTIONS
  267. void DumpTestTrace();
  268. void DumpFieldCopyPropTestTrace();
  269. #endif
  270. uint32 GetByteCodeOffset() const;
  271. uint32 GetNumber() const;
  272. void SetByteCodeOffset(IR::Instr * instr);
  273. void ClearByteCodeOffset();
  274. BailOutInfo * GetBailOutInfo() const;
  275. BailOutInfo * UnlinkBailOutInfo();
  276. void ReplaceBailOutInfo(BailOutInfo *newBailOutInfo);
  277. IR::Instr * ShareBailOut();
  278. BailOutKind GetBailOutKind() const;
  279. BailOutKind GetBailOutKindNoBits() const;
  280. BailOutKind GetAuxBailOutKind() const;
  281. void SetBailOutKind(const IR::BailOutKind bailOutKind);
  282. void SetAuxBailOutKind(const IR::BailOutKind bailOutKind);
  283. void PromoteAuxBailOut();
  284. void ResetAuxBailOut();
  285. void UnlinkStartCallFromBailOutInfo(IR::Instr *endInstr) const;
  286. void ChangeEquivalentToMonoTypeCheckBailOut();
  287. Js::Var TryOptimizeInstrWithFixedDataProperty(IR::Instr ** pInstr, GlobOpt* globopt);
  288. Opnd * FindCallArgumentOpnd(const Js::ArgSlot argSlot, IR::Instr * *const ownerInstrRef = nullptr);
  289. void CopyNumber(IR::Instr *instr) { this->SetNumber(instr->GetNumber()); }
  290. bool FetchOperands(_Out_writes_(argsOpndLength) IR::Opnd **argsOpnd, uint argsOpndLength);
  291. template <typename Fn>
  292. bool ForEachCallDirectArgOutInstrBackward(Fn fn, uint argsOpndLength) const;
  293. bool IsCmCC_A();
  294. bool IsCmCC_R8();
  295. bool IsCmCC_I4();
  296. bool BinaryCalculator(IntConstType src1Const, IntConstType src2Const, IntConstType *pResult);
  297. bool UnaryCalculator(IntConstType src1Const, IntConstType *pResult);
  298. IR::Instr* GetNextArg();
  299. // Iterates argument chain
  300. template<class Fn>
  301. bool IterateArgInstrs(Fn callback)
  302. {
  303. StackSym* linkSym = this->GetSrc2()->GetStackSym();
  304. Assert(linkSym->IsSingleDef());
  305. IR::Instr *argInstr = linkSym->m_instrDef;
  306. IR::Instr* nextArg = nullptr;
  307. do
  308. {
  309. // Get the next instr before calling 'callback' since callback might modify the IR.
  310. if (argInstr->GetSrc2() && argInstr->GetSrc2()->IsSymOpnd())
  311. {
  312. linkSym = argInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  313. Assert(linkSym->IsArgSlotSym());
  314. // Due to dead code elimination in FGPeeps, it is possible for the definitions of the
  315. // the instructions that we are visiting during FG to have been freed. In this case,
  316. // the ArgSlot, even though its was a single def, will report IsSingleDef() as false
  317. // since instrDef is reset to nullptr when the def instr is freed
  318. Assert(linkSym->IsSingleDef() ||
  319. (m_func->IsInPhase(Js::Phase::FGPeepsPhase) || m_func->IsInPhase(Js::Phase::FGBuildPhase)));
  320. nextArg = linkSym->GetInstrDef();
  321. }
  322. else
  323. {
  324. nextArg = nullptr;
  325. }
  326. if(argInstr->m_opcode == Js::OpCode::ArgOut_A_InlineSpecialized)
  327. {
  328. argInstr = nextArg;
  329. // This is a fake ArgOut, skip it
  330. continue;
  331. }
  332. if (argInstr->m_opcode == Js::OpCode::StartCall)
  333. {
  334. Assert(nextArg == nullptr);
  335. break;
  336. }
  337. if(callback(argInstr))
  338. {
  339. return true;
  340. }
  341. argInstr = nextArg;
  342. } while(argInstr && !argInstr->IsInvalidInstr());
  343. // If an instr in the call sequence is invalid (0xFDFDFDFD), it must have been freed.
  344. // This is possible if some dead-code-removal/peeps code removed only part of the call sequence, while the whole sequence was dead (TH Bug 594245).
  345. // We allow this possibility here, while relying on the more involved dead-code-removal to remove the rest of the call sequence.
  346. // Inserting the opcode InvalidOpCode, with no lowering, here to safeguard against the possibility of a dead part of the call sequence not being removed. The lowerer would assert then.
  347. if (argInstr && argInstr->IsInvalidInstr())
  348. {
  349. this->InsertBefore(Instr::New(Js::OpCode::InvalidOpCode, this->m_func));
  350. }
  351. return false;
  352. }
  353. // Iterates all meta args for inlinee
  354. template<class Fn>
  355. bool IterateMetaArgs(Fn callback)
  356. {
  357. Assert(this->m_opcode == Js::OpCode::InlineeStart);
  358. Instr* currentInstr = this;
  359. while(currentInstr->m_opcode != Js::OpCode::InlineeMetaArg)
  360. {
  361. currentInstr = currentInstr->m_prev;
  362. }
  363. // backward iteration
  364. while (currentInstr->m_prev->m_opcode == Js::OpCode::InlineeMetaArg)
  365. {
  366. currentInstr = currentInstr->m_prev;
  367. }
  368. // forward iteration
  369. while(currentInstr->m_opcode == Js::OpCode::InlineeMetaArg)
  370. {
  371. // cache next instr as callback might move meta arg.
  372. IR::Instr* nextInstr = currentInstr->m_next;
  373. if(callback(currentInstr))
  374. {
  375. return true;
  376. }
  377. currentInstr = nextInstr;
  378. }
  379. return false;
  380. }
  381. IR::Instr* GetBytecodeArgOutCapture();
  382. void GenerateBytecodeArgOutCapture();
  383. bool HasByteCodeArgOutCapture();
  384. void GenerateArgOutSnapshot();
  385. IR::Instr* GetArgOutSnapshot();
  386. Js::JavascriptFunction* GetFixedFunction() const;
  387. uint GetArgOutCount(bool getInterpreterArgOutCount);
  388. IR::PropertySymOpnd *GetPropertySymOpnd() const;
  389. bool CallsAccessor(IR::PropertySymOpnd* methodOpnd = nullptr);
  390. bool CallsGetter(IR::PropertySymOpnd* methodOpnd = nullptr);
  391. bool CallsSetter(IR::PropertySymOpnd* methodOpnd = nullptr);
  392. bool UsesAllFields();
  393. void MoveArgs(bool generateByteCodeCapture = false);
  394. void Move(IR::Instr* insertInstr);
  395. private:
  396. void ClearNumber() { this->m_number = 0; }
  397. void SetNumber(uint32 number);
  398. friend class Func;
  399. friend class Lowerer;
  400. void SetByteCodeOffset(uint32 number);
  401. friend class IRBuilder;
  402. friend class IRBuilderAsmJs;
  403. friend class FlowGraph;
  404. void SetBailOutKind_NoAssert(const IR::BailOutKind bailOutKind);
  405. public:
  406. // used only for SIMD Ld/St from typed arrays.
  407. // we keep these here to avoid increase in number of opcodes and to not use ExtendedArgs
  408. uint8 dataWidth;
  409. #ifdef BAILOUT_INJECTION
  410. uint bailOutByteCodeLocation;
  411. #endif
  412. Instr * m_next;
  413. Instr * m_prev;
  414. Func * m_func;
  415. #if DBG_DUMP
  416. wchar_t * globOptInstrString;
  417. #endif
  418. // These should be together to pack into a uint32
  419. Js::OpCode m_opcode;
  420. uint8 ignoreOverflowBitCount; // Number of bits after which ovf matters. Currently used for MULs.
  421. bool dstIsTempNumber : 1;
  422. bool dstIsTempNumberTransferred : 1;
  423. bool dstIsTempObject : 1;
  424. bool usesStackArgumentsObject: 1;
  425. // An inlinee entry instruction initializes the InlineeCallInfo on the frame.
  426. bool isInlineeEntryInstr: 1;
  427. bool ignoreNegativeZero: 1;
  428. bool ignoreIntOverflow: 1;
  429. bool ignoreIntOverflowInRange: 1;
  430. bool forcePreOpBailOutIfNeeded: 1;
  431. bool loadedArrayHeadSegment : 1;
  432. bool loadedArrayHeadSegmentLength : 1;
  433. bool extractedUpperBoundCheckWithoutHoisting : 1;
  434. bool isCtorCall : 1;
  435. bool dstIsAlwaysConvertedToInt32 : 1;
  436. bool dstIsAlwaysConvertedToNumber : 1;
  437. bool isCallInstrProtectedByNoProfileBailout : 1;
  438. protected:
  439. bool isCloned:1;
  440. bool hasBailOutInfo:1;
  441. // Used for aux bail out. We are using same bailOutInfo, just different boolean to hide regular bail out.
  442. // Refer to ConvertToBailOutInstr implementation for details.
  443. bool hasAuxBailOut:1;
  444. IRKind m_kind;
  445. uint32 m_number;
  446. Opnd * m_dst;
  447. Opnd * m_src1;
  448. Opnd * m_src2;
  449. void Init(Js::OpCode opcode, IRKind kind, Func * func);
  450. IR::Instr * CloneInstr() const;
  451. };
  452. class ByteCodeUsesInstr : public Instr
  453. {
  454. public:
  455. static ByteCodeUsesInstr * New(Func * func);
  456. static ByteCodeUsesInstr* New(IR::Instr* originalBytecodeInstr, SymID symid);
  457. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed;
  458. PropertySym * propertySymUse;
  459. void Set(uint symId);
  460. };
  461. class JitProfilingInstr : public Instr
  462. {
  463. public:
  464. static JitProfilingInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func * func);
  465. static JitProfilingInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func * func);
  466. JitProfilingInstr* CloneJitProfiling() const;
  467. JitProfilingInstr* CopyJitProfiling() const;
  468. Js::ProfileId profileId;
  469. Js::ProfileId arrayProfileId;
  470. union
  471. {
  472. Js::InlineCacheIndex inlineCacheIndex;
  473. uint loopNumber;
  474. };
  475. bool isProfiledReturnCall : 1;
  476. bool isBeginSwitch : 1;
  477. bool isNewArray : 1;
  478. bool isLoopHelper: 1;
  479. };
  480. class ProfiledInstr: public Instr
  481. {
  482. protected:
  483. ProfiledInstr(bool hasBailOutInfo = false) : Instr(hasBailOutInfo) {}
  484. public:
  485. static ProfiledInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func * func);
  486. static ProfiledInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func * func);
  487. ProfiledInstr *CloneProfiledInstr() const;
  488. ProfiledInstr *CopyProfiledInstr() const;
  489. union
  490. {
  491. public:
  492. uint profileId;
  493. const Js::LdElemInfo * ldElemInfo;
  494. const Js::StElemInfo * stElemInfo;
  495. private:
  496. Js::FldInfo::TSize fldInfoData;
  497. public:
  498. Js::FldInfo &FldInfo()
  499. {
  500. return reinterpret_cast<Js::FldInfo &>(fldInfoData);
  501. }
  502. } u;
  503. static const uint InvalidProfileId = (uint)-1;
  504. };
  505. ///---------------------------------------------------------------------------
  506. ///
  507. /// class EntryInstr
  508. ///
  509. ///---------------------------------------------------------------------------
  510. class EntryInstr: public Instr
  511. {
  512. public:
  513. static EntryInstr * New(Js::OpCode opcode, Func *func);
  514. };
  515. ///---------------------------------------------------------------------------
  516. ///
  517. /// class ExitInstr
  518. ///
  519. ///---------------------------------------------------------------------------
  520. class ExitInstr: public Instr
  521. {
  522. public:
  523. static ExitInstr * New(Js::OpCode opcode, Func *func);
  524. };
  525. ///---------------------------------------------------------------------------
  526. ///
  527. /// class LabelInstr
  528. ///
  529. ///---------------------------------------------------------------------------
  530. class LabelInstr : public Instr
  531. {
  532. friend class BranchInstr;
  533. friend class IRBuilder;
  534. friend class IRBuilderAsmJs;
  535. friend class MultiBranchInstr;
  536. public:
  537. LabelInstr(JitArenaAllocator * allocator) : Instr(), labelRefs(allocator), m_isLoopTop(false), m_block(nullptr), isOpHelper(false),
  538. m_hasNonBranchRef(false), m_region(nullptr), m_loweredBasicBlock(nullptr), m_isDataLabel(false)
  539. #if DBG
  540. , m_noHelperAssert(false)
  541. #endif
  542. {
  543. #if DBG_DUMP
  544. m_id = 0;
  545. #endif
  546. m_pc.pc = nullptr;
  547. }
  548. static LabelInstr * New(Js::OpCode opcode, Func *func, bool isOpHelper = false);
  549. public:
  550. SListCounted<BranchInstr *> labelRefs;
  551. Lifetime ** m_regContent;
  552. BYTE m_isLoopTop : 1;
  553. BYTE isOpHelper : 1;
  554. BYTE m_hasNonBranchRef : 1;
  555. BYTE m_isDataLabel : 1;
  556. #if DBG
  557. BYTE m_noHelperAssert : 1;
  558. #endif
  559. unsigned int m_id;
  560. LoweredBasicBlock* m_loweredBasicBlock;
  561. private:
  562. union labelLocation
  563. {
  564. BYTE * pc; // Used by encoder and is the real pc offset
  565. uint32 offset; // Used by preEncoder and is an estimation pc offset, not accurate
  566. } m_pc;
  567. BasicBlock * m_block;
  568. Loop * m_loop;
  569. Region * m_region;
  570. public:
  571. inline void SetPC(BYTE * pc);
  572. inline BYTE * GetPC(void) const;
  573. inline void SetOffset(uint32 offset);
  574. inline void ResetOffset(uint32 offset);
  575. inline uint32 GetOffset(void) const;
  576. inline void SetBasicBlock(BasicBlock * block);
  577. inline BasicBlock * GetBasicBlock(void) const;
  578. inline void SetLoop(Loop *loop);
  579. inline Loop * GetLoop(void) const;
  580. inline void UnlinkBasicBlock(void);
  581. inline void SetRegion(Region *);
  582. inline Region * GetRegion(void) const;
  583. inline BOOL IsUnreferenced(void) const;
  584. LabelInstr * CloneLabel(BOOL fCreate);
  585. #if DBG_DUMP
  586. virtual void Dump(IRDumpFlags flags) override;
  587. #endif
  588. private:
  589. void AddLabelRef(BranchInstr *branchRef);
  590. void RemoveLabelRef(BranchInstr *branchRef);
  591. protected:
  592. void Init(Js::OpCode opcode, IRKind kind, Func *func, bool isOpHelper);
  593. };
  594. class ProfiledLabelInstr: public LabelInstr
  595. {
  596. private:
  597. ProfiledLabelInstr(JitArenaAllocator * allocator);
  598. public:
  599. static ProfiledLabelInstr * New(Js::OpCode opcode, Func *func, Js::ImplicitCallFlags flags, Js::LoopFlags loopFlags);
  600. Js::ImplicitCallFlags loopImplicitCallFlags;
  601. Js::LoopFlags loopFlags;
  602. #if DBG_DUMP
  603. uint loopNum;
  604. #endif
  605. };
  606. ///---------------------------------------------------------------------------
  607. ///
  608. /// class BranchInstr
  609. ///
  610. ///---------------------------------------------------------------------------
  611. class BranchInstr : public Instr
  612. {
  613. public:
  614. bool m_isAirlock : 1;
  615. bool m_isSwitchBr : 1;
  616. bool m_isOrphanedLeave : 1; // A Leave in a loop body in a try, most likely generated because of a return statement.
  617. #if DBG
  618. bool m_isMultiBranch;
  619. bool m_isHelperToNonHelperBranch;
  620. #endif
  621. public:
  622. static BranchInstr * New(Js::OpCode opcode, LabelInstr * branchTarget, Func *func);
  623. static BranchInstr * New(Js::OpCode opcode, LabelInstr * branchTarget, Opnd *srcOpnd, Func *func);
  624. static BranchInstr * New(Js::OpCode opcode, Opnd* destOpnd, LabelInstr * branchTarget, Opnd *srcOpnd, Func *func);
  625. static BranchInstr * New(Js::OpCode opcode, LabelInstr * branchTarget, Opnd *src1Opnd, Opnd *src2Opnd, Func *func);
  626. BranchInstr(bool hasBailOutInfo = false) : Instr(hasBailOutInfo), m_branchTarget(nullptr), m_isAirlock(false), m_isSwitchBr(false), m_isOrphanedLeave(false)
  627. {
  628. #if DBG
  629. m_isMultiBranch = false;
  630. #endif
  631. }
  632. void SetTarget(LabelInstr *labelInstr); // Only used for non-multi-branch
  633. bool ReplaceTarget(LabelInstr * oldLabelInstr, LabelInstr * newLabelInstr);
  634. void ClearTarget();
  635. LabelInstr * GetTarget() const;
  636. bool IsConditional() const;
  637. bool IsUnconditional() const;
  638. void Invert();
  639. void RetargetClonedBranch();
  640. BranchInstr * CloneBranchInstr() const;
  641. bool IsMultiBranch() const;
  642. MultiBranchInstr * AsMultiBrInstr();
  643. void SetByteCodeReg(Js::RegSlot reg) { m_byteCodeReg = reg; }
  644. Js::RegSlot GetByteCodeReg() { return m_byteCodeReg; }
  645. bool HasByteCodeReg() { return m_byteCodeReg != Js::Constants::NoRegister; }
  646. bool IsLoopTail(Func * func);
  647. public:
  648. Lifetime ** m_regContent;
  649. private:
  650. LabelInstr * m_branchTarget;
  651. Js::RegSlot m_byteCodeReg;
  652. };
  653. ///---------------------------------------------------------------------------
  654. ///
  655. /// class MultiBranchInstr
  656. ///
  657. ///---------------------------------------------------------------------------
  658. class MultiBranchInstr : public BranchInstr
  659. {
  660. private:
  661. /*
  662. The value field in the dictionary has different semantics at different points of time. Hence the 'value' field is implemented as a void *.
  663. In IR Layer:
  664. Offset is stored in the dictionary until we generate the Labels in InsertLabels().
  665. LabelInstr is stored in the dictionary, after we generate the LabelInstrs.
  666. In Encoder:
  667. After the fixup, actual machine address corresponding to the LabelInstr is stored as the 'value'.
  668. */
  669. private:
  670. typedef Js::JavascriptString* TBranchKey;
  671. typedef Js::BranchDictionaryWrapper<TBranchKey> BranchDictionaryWrapper;
  672. typedef BranchDictionaryWrapper::BranchDictionary BranchDictionary;
  673. typedef BranchJumpTableWrapper BranchJumpTable;
  674. void * m_branchTargets; // can point to a dictionary or a jump table
  675. public:
  676. static MultiBranchInstr * New(Js::OpCode opcode, IR::Opnd * srcOpnd, Func *func);
  677. static MultiBranchInstr * New(Js::OpCode opcode, Func *func);
  678. enum Kind
  679. {
  680. IntJumpTable,
  681. StrDictionary,
  682. SingleCharStrJumpTable,
  683. };
  684. Kind m_kind;
  685. IntConstType m_baseCaseValue;
  686. IntConstType m_lastCaseValue;
  687. MultiBranchInstr() :
  688. m_branchTargets(nullptr)
  689. {
  690. #if DBG
  691. m_isMultiBranch = true;
  692. #endif
  693. }
  694. void AddtoDictionary(uint32 offset, TBranchKey key);
  695. void AddtoJumpTable(uint32 offset, uint32 jmpIndex);
  696. void CreateBranchTargetsAndSetDefaultTarget(int dictionarySize, Kind kind, uint defaultTargetOffset);
  697. void ChangeLabelRef(LabelInstr * oldTarget, LabelInstr * newTarget);
  698. bool ReplaceTarget(IR::LabelInstr * oldLabelInstr, IR::LabelInstr * newLabelInstr);
  699. void MultiBranchInstr::FixMultiBrDefaultTarget(uint32 targetOffset);
  700. void ClearTarget();
  701. BranchDictionaryWrapper * GetBranchDictionary();
  702. BranchJumpTable * GetBranchJumpTable();
  703. ///---------------------------------------------------------------------------
  704. ///
  705. /// template MapMultiBrLabels
  706. /// - Maps through the branchTargets dictionary for all the labelInstrs
  707. ///---------------------------------------------------------------------------
  708. template<class Fn>
  709. void MapMultiBrLabels(Fn fn)
  710. {
  711. MapMultiBrTargetByAddress([fn](void ** value) -> void
  712. {
  713. fn((LabelInstr*) *value);
  714. });
  715. }
  716. ///---------------------------------------------------------------------------
  717. ///
  718. /// template MapUniqueMultiBrLabels
  719. /// - Maps through the branchTargets dictionary for all unique labelInstrs
  720. ///---------------------------------------------------------------------------
  721. template<class Fn>
  722. void MapUniqueMultiBrLabels(Fn fn)
  723. {
  724. BVSparse<JitArenaAllocator> visitedTargets(m_func->m_alloc);
  725. MapMultiBrLabels([&](IR::LabelInstr *const targetLabel)
  726. {
  727. if(visitedTargets.Test(targetLabel->m_id))
  728. {
  729. return;
  730. }
  731. visitedTargets.Set(targetLabel->m_id);
  732. fn(targetLabel);
  733. });
  734. }
  735. ///--------------------------------------------------------------------------------------------
  736. ///
  737. /// template UpdateMultiBrTargetOffsets
  738. /// - Maps through the branchTargets dictionary for updating the target offset by returning the target offset.
  739. ///--------------------------------------------------------------------------------------------
  740. template<class Fn>
  741. void UpdateMultiBrTargetOffsets(Fn fn)
  742. {
  743. MapMultiBrTargetByAddress([fn](void ** value) -> void
  744. {
  745. *value = (void*)fn(::Math::PointerCastToIntegral<uint32>(*value));
  746. });
  747. }
  748. ///--------------------------------------------------------------------------------------------
  749. ///
  750. /// template UpdateMultiBrLabels
  751. /// - Maps through the branchDictionary for updating the labelInstr
  752. ///--------------------------------------------------------------------------------------------
  753. template<class Fn>
  754. void UpdateMultiBrLabels(Fn fn)
  755. {
  756. MapMultiBrTargetByAddress([fn](void ** value) -> void
  757. {
  758. IR::LabelInstr * oldLabelInstr = (LabelInstr*)*value;
  759. IR::LabelInstr * newLabelInstr = fn(oldLabelInstr);
  760. *value = (void*)newLabelInstr;
  761. });
  762. }
  763. ///-------------------------------------------------------------------------------------------------------------
  764. ///
  765. /// template MapMultiBrTargetByAddress
  766. /// - Maps through the branchDictionary accessing the address of the 'value'
  767. ///-------------------------------------------------------------------------------------------------------------
  768. template<class Fn>
  769. void MapMultiBrTargetByAddress(Fn fn)
  770. {
  771. if(!m_branchTargets)
  772. {
  773. return;
  774. }
  775. void ** defaultTarget = nullptr;
  776. switch (m_kind)
  777. {
  778. case StrDictionary:
  779. {
  780. BranchDictionary& branchDictionary = GetBranchDictionary()->dictionary;
  781. defaultTarget = &(((MultiBranchInstr::BranchDictionaryWrapper*)(m_branchTargets))->defaultTarget);
  782. branchDictionary.MapAddress([fn](TBranchKey key, void ** value)
  783. {
  784. fn(value);
  785. });
  786. break;
  787. }
  788. case IntJumpTable:
  789. case SingleCharStrJumpTable:
  790. {
  791. void ** branchJumpTable = GetBranchJumpTable()->jmpTable;
  792. defaultTarget = &(GetBranchJumpTable()->defaultTarget);
  793. for (IntConstType i = m_baseCaseValue; i <= m_lastCaseValue; i++)
  794. {
  795. fn(&branchJumpTable[i - m_baseCaseValue]);
  796. }
  797. break;
  798. }
  799. default:
  800. Assert(false);
  801. };
  802. fn(defaultTarget);
  803. }
  804. };
  805. ///---------------------------------------------------------------------------
  806. ///
  807. /// class PragmaInstr
  808. ///
  809. ///---------------------------------------------------------------------------
  810. class PragmaInstr : public Instr
  811. {
  812. public:
  813. uint32 m_statementIndex;
  814. uint32 m_offsetInBuffer; // offset in the binary code buffer
  815. public:
  816. static PragmaInstr * New(Js::OpCode opcode, uint32 index, Func *func);
  817. PragmaInstr() : Instr(), m_statementIndex(0)
  818. {
  819. }
  820. #if DBG_DUMP
  821. virtual void Dump(IRDumpFlags flags) override;
  822. #endif
  823. #if DBG_DUMP | defined(VTUNE_PROFILING)
  824. void Record(uint32 nativeBufferOffset);
  825. #endif
  826. void RecordThrowMap(Js::SmallSpanSequenceIter& iter, uint32 nativeBufferOffset);
  827. PragmaInstr * ClonePragma();
  828. PragmaInstr * CopyPragma();
  829. };
  830. template <typename InstrType>
  831. class BailOutInstrTemplate : public InstrType
  832. {
  833. private:
  834. BailOutInstrTemplate() : InstrType(true) {}
  835. public:
  836. static BailOutInstrTemplate * New(Js::OpCode opcode, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  837. static BailOutInstrTemplate * New(Js::OpCode opcode, IR::Opnd *dst, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  838. static BailOutInstrTemplate * New(Js::OpCode opcode, IR::Opnd *dst, IR::Opnd *src1, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  839. static BailOutInstrTemplate * New(Js::OpCode opcode, IR::Opnd *dst, IR::Opnd *src1, IR::Opnd *src2, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  840. static BailOutInstrTemplate * New(Js::OpCode opcode, BailOutKind kind, BailOutInfo * bailOutInfo, Func * func);
  841. BailOutInstrTemplate * CloneBailOut() const;
  842. BailOutInfo * bailOutInfo;
  843. BailOutKind bailOutKind;
  844. // Auxiliary bailout kind.
  845. // This is kind of a decoration on top of main bail out kind and is not used for runtime bail out logic (in globopt, etc).
  846. // It's added when we convert instr to bailout instr for which there is already bailout,
  847. // and is not used/just preserved until lowerer, in the beginning of lowerer we split it out.
  848. // Currently used for debugger bailout when it is shared with main bailout.
  849. BailOutKind auxBailOutKind;
  850. };
  851. typedef BailOutInstrTemplate<Instr> BailOutInstr;
  852. typedef BailOutInstrTemplate<ProfiledInstr> ProfiledBailOutInstr;
  853. typedef BailOutInstrTemplate<BranchInstr> BranchBailOutInstr;
  854. //
  855. // FOREACH_INSTR iterators
  856. //
  857. #ifdef DBG
  858. # define INIT_PREV IR::Instr * __prevInstrCheck = nullptr
  859. # define CHECK_PREV(instr)\
  860. AssertMsg(__prevInstrCheck == nullptr || __prevInstrCheck->m_next == instr, \
  861. "Modifying instr list but not using EDITING iterator!"); \
  862. __prevInstrCheck = instr;
  863. #else
  864. # define INIT_PREV
  865. # define CHECK_PREV(instr)
  866. #endif
  867. #ifdef DBG
  868. # define INIT_NEXT IR::Instr * __nextInstrCheck = nullptr
  869. # define CHECK_NEXT(instr)\
  870. AssertMsg(__nextInstrCheck == nullptr || __nextInstrCheck->m_prev == instr, \
  871. "Modifying instr list but not using EDITING iterator!"); \
  872. __nextInstrCheck = instr;
  873. #else
  874. # define INIT_NEXT
  875. # define CHECK_NEXT(instr)
  876. #endif
  877. #define FOREACH_INSTR_IN_RANGE(instr, instrList, instrLast)\
  878. {\
  879. INIT_PREV;\
  880. IR::Instr *instrStop = instrLast ? ((IR::Instr*)instrLast)->m_next : nullptr; \
  881. for ( IR::Instr *instr = instrList;\
  882. instr != instrStop;\
  883. instr = instr->m_next)\
  884. {\
  885. CHECK_PREV(instr);
  886. #define NEXT_INSTR_IN_RANGE }}
  887. #define FOREACH_REAL_INSTR_IN_RANGE(instr, instrList, instrLast)\
  888. FOREACH_INSTR_IN_RANGE(instr, instrList, instrLast)\
  889. {\
  890. if (!instr->IsRealInstr())\
  891. {\
  892. continue;\
  893. }
  894. #define NEXT_REAL_INSTR_IN_RANGE NEXT_INSTR_IN_RANGE }
  895. #define FOREACH_INSTR_BACKWARD_IN_RANGE(instr, instrList, instrLast)\
  896. {\
  897. INIT_NEXT;\
  898. IR::Instr *instrStop = instrLast ? ((IR::Instr*)instrLast)->m_prev : nullptr; \
  899. for ( IR::Instr *instr = instrList;\
  900. instr != instrStop;\
  901. instr = instr->m_prev)\
  902. {\
  903. CHECK_NEXT(instr);
  904. #define NEXT_INSTR_BACKWARD_IN_RANGE }}
  905. #define FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, instrLast)\
  906. {\
  907. IR::Instr * instrNext;\
  908. IR::Instr *instrStop = instrLast ? ((IR::Instr*)instrLast)->m_next : nullptr; \
  909. for ( IR::Instr *instr = instrList;\
  910. instr != instrStop;\
  911. instr = instrNext)\
  912. {\
  913. instrNext = instr->m_next;
  914. #define NEXT_INSTR_EDITING_IN_RANGE }}
  915. #define FOREACH_REAL_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, instrLast)\
  916. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, instrLast)\
  917. {\
  918. if (!instr->IsRealInstr())\
  919. {\
  920. continue;\
  921. }
  922. #define NEXT_REAL_INSTR_EDITING_IN_RANGE NEXT_INSTR_EDITING_IN_RANGE }
  923. #define FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrList, instrLast)\
  924. {\
  925. IR::Instr * instrPrev;\
  926. IR::Instr *instrStop = instrLast ? ((IR::Instr*)instrLast)->m_prev : nullptr; \
  927. for ( IR::Instr *instr = instrList;\
  928. instr != instrStop;\
  929. instr = instrPrev)\
  930. {\
  931. instrPrev = instr->m_prev;
  932. #define NEXT_INSTR_BACKWARD_EDITING_IN_RANGE }}
  933. #define FOREACH_INSTR_EDITING(instr, instrNext, instrList)\
  934. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, nullptr)
  935. #define NEXT_INSTR_EDITING NEXT_INSTR_EDITING_IN_RANGE
  936. #define FOREACH_INSTR(instr, instrList)\
  937. FOREACH_INSTR_IN_RANGE(instr, instrList, nullptr)
  938. #define NEXT_INSTR NEXT_INSTR_IN_RANGE
  939. #define FOREACH_REAL_INSTR(instr, instrList)\
  940. FOREACH_REAL_INSTR_IN_RANGE(instr, instrList, nullptr)
  941. #define NEXT_REAL_INSTR NEXT_REAL_INSTR_IN_RANGE
  942. #define FOREACH_INSTR_BACKWARD(instr, instrList)\
  943. FOREACH_INSTR_BACKWARD_IN_RANGE(instr, instrList, nullptr)
  944. #define NEXT_INSTR_BACKWARD NEXT_INSTR_BACKWARD_IN_RANGE
  945. #define FOREACH_INSTR_EDITING(instr, instrNext, instrList)\
  946. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, nullptr)
  947. #define NEXT_INSTR_EDITING NEXT_INSTR_EDITING_IN_RANGE
  948. #define FOREACH_REAL_INSTR_EDITING(instr, instrNext, instrList)\
  949. FOREACH_REAL_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, nullptr)
  950. #define NEXT_REAL_INSTR_EDITING NEXT_REAL_INSTR_EDITING_IN_RANGE
  951. #define FOREACH_INSTR_BACKWARD_EDITING(instr, instrPrev, instrList)\
  952. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrList, nullptr)
  953. #define NEXT_INSTR_BACKWARD_EDITING NEXT_INSTR_BACKWARD_EDITING_IN_RANGE
  954. #define FOREACH_INSTR_IN_FUNC(instr, func)\
  955. FOREACH_INSTR(instr, func->m_headInstr)
  956. #define NEXT_INSTR_IN_FUNC NEXT_INSTR
  957. #define FOREACH_REAL_INSTR_IN_FUNC(instr, func)\
  958. FOREACH_REAL_INSTR(instr, func->m_headInstr)
  959. #define NEXT_REAL_INSTR_IN_FUNC NEXT_REAL_INSTR
  960. #define FOREACH_INSTR_IN_FUNC_BACKWARD(instr, func)\
  961. FOREACH_INSTR_BACKWARD(instr, func->m_tailInstr)
  962. #define NEXT_INSTR_IN_FUNC_BACKWARD NEXT_INSTR_BACKWARD
  963. #define FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, func)\
  964. FOREACH_INSTR_EDITING(instr, instrNext, func->m_headInstr)
  965. #define NEXT_INSTR_IN_FUNC_EDITING NEXT_INSTR_EDITING
  966. #define FOREACH_REAL_INSTR_IN_FUNC_EDITING(instr, instrNext, func)\
  967. FOREACH_REAL_INSTR_EDITING(instr, instrNext, func->m_headInstr)
  968. #define NEXT_REAL_INSTR_IN_FUNC_EDITING NEXT_REAL_INSTR_EDITING
  969. #define FOREACH_INSTR_IN_FUNC_BACKWARD_EDITING(instr, instrPrev, func)\
  970. FOREACH_INSTR_BACKWARD_EDITING(instr, instrPrev, func->m_tailInstr)
  971. #define NEXT_INSTR_IN_FUNC_BACKWARD_EDITING NEXT_INSTR_BACKWARD_EDITING
  972. #define FOREACH_INSTR_IN_BLOCK(instr, block)\
  973. FOREACH_INSTR_IN_RANGE(instr, block->GetFirstInstr(), block->GetLastInstr())
  974. #define NEXT_INSTR_IN_BLOCK\
  975. NEXT_INSTR_IN_RANGE
  976. #define FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)\
  977. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, block->GetFirstInstr(), block->GetLastInstr())
  978. #define NEXT_INSTR_IN_BLOCK_EDITING \
  979. NEXT_INSTR_EDITING_IN_RANGE
  980. #define FOREACH_INSTR_BACKWARD_IN_BLOCK(instr, block)\
  981. FOREACH_INSTR_BACKWARD_IN_RANGE(instr, block->GetLastInstr(), block->GetFirstInstr())
  982. #define NEXT_INSTR_BACKWARD_IN_BLOCK\
  983. NEXT_INSTR_BACKWARD_IN_RANGE
  984. #define FOREACH_INSTR_BACKWARD_IN_BLOCK_EDITING(instr, instrPrev, block)\
  985. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, block->GetLastInstr(), block->GetFirstInstr())
  986. #define NEXT_INSTR_BACKWARD_IN_BLOCK_EDITING\
  987. NEXT_INSTR_BACKWARD_EDITING_IN_RANGE
  988. } // namespace IR
  989. typedef JsUtil::BaseDictionary<IR::Instr*, IR::Instr*, JitArenaAllocator, PrimeSizePolicy> InstrMap;