IR.h 44 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. #include "Language/JavascriptNativeOperators.h"
  7. class Func;
  8. class BasicBlock;
  9. class Region;
  10. class Lowerer;
  11. class IRBuilder;
  12. class IRBuilderAsmJs;
  13. class FlowGraph;
  14. class GlobOpt;
  15. class BailOutInfo;
  16. struct LazyBailOutRecord;
  17. typedef JsUtil::KeyValuePair<StackSym *, BailoutConstantValue> ConstantStackSymValue;
  18. typedef JsUtil::KeyValuePair<StackSym *, StackSym*> CopyPropSyms;
  19. struct CapturedValues
  20. {
  21. SListBase<ConstantStackSymValue> constantValues; // Captured constant values during glob opt
  22. SListBase<CopyPropSyms> copyPropSyms; // Captured copy prop values during glob opt
  23. BVSparse<JitArenaAllocator> * argObjSyms; // Captured arg object symbols during glob opt
  24. ~CapturedValues()
  25. {
  26. // Reset SListBase to be exception safe. Captured values are from GlobOpt->func->alloc
  27. // in normal case the 2 SListBase are empty so no Clear needed, also no need to Clear in exception case
  28. constantValues.Reset();
  29. copyPropSyms.Reset();
  30. }
  31. };
  32. class LoweredBasicBlock;
  33. class BranchJumpTableWrapper
  34. {
  35. public:
  36. BranchJumpTableWrapper(uint tableSize) : defaultTarget(nullptr), labelInstr(nullptr), tableSize(tableSize)
  37. {
  38. }
  39. void** jmpTable;
  40. void* defaultTarget;
  41. IR::LabelInstr * labelInstr;
  42. int tableSize;
  43. static BranchJumpTableWrapper* New(JitArenaAllocator * allocator, uint tableSize)
  44. {
  45. BranchJumpTableWrapper * branchTargets = JitAnew(allocator, BranchJumpTableWrapper, tableSize);
  46. //Create the jump table for integers
  47. void* * jmpTable = JitAnewArrayZ(allocator, void*, tableSize);
  48. branchTargets->jmpTable = jmpTable;
  49. return branchTargets;
  50. }
  51. };
  52. namespace IR {
  53. class EntryInstr;
  54. class ExitInstr;
  55. class BranchInstr;
  56. class LabelInstr;
  57. class JitProfilingInstr;
  58. class ProfiledInstr;
  59. class ProfiledLabelInstr;
  60. class MultiBranchInstr;
  61. class PragmaInstr;
  62. class ByteCodeUsesInstr;
  63. class Opnd;
  64. class RegOpnd;
  65. class IndirOpnd;
  66. class SymOpnd;
  67. class MemRefOpnd;
  68. class PropertySymOpnd;
  69. enum AddrOpndKind : BYTE;
  70. enum IRKind : BYTE {
  71. InstrKindInvalid,
  72. InstrKindInstr,
  73. InstrKindBranch,
  74. InstrKindLabel,
  75. InstrKindProfiled,
  76. InstrKindProfiledLabel,
  77. InstrKindEntry,
  78. InstrKindExit,
  79. InstrKindPragma,
  80. InstrKindByteCodeUses,
  81. InstrKindJitProfiling,
  82. };
  83. const int32 InvalidInstrLayout = -1;
  84. ///---------------------------------------------------------------------------
  85. ///
  86. /// class Instr
  87. /// BranchInstr
  88. /// MultiBranchInstr
  89. /// LabelInstr
  90. /// JitProfilingInstr
  91. /// ProfiledInstr
  92. /// EntryInstr
  93. /// ExitInstr
  94. /// PragmaInstr
  95. /// BailoutInstr
  96. /// ByteCoteUsesInstr
  97. ///
  98. ///---------------------------------------------------------------------------
  99. class Instr
  100. {
  101. protected:
  102. Instr(bool hasBailOutInfo = false) :
  103. m_next(nullptr),
  104. m_prev(nullptr),
  105. m_opcode(Js::OpCode::MaxByteSizedOpcodes),
  106. m_func(nullptr),
  107. m_number(Js::Constants::NoByteCodeOffset),
  108. m_dst(nullptr),
  109. m_src1(nullptr),
  110. m_src2(nullptr),
  111. #if DBG_DUMP
  112. globOptInstrString(nullptr),
  113. #endif
  114. #if _CONTROL_FLOW_GUARD_SHADOW_STACK
  115. isFsBased(false),
  116. #endif
  117. dstIsTempNumber(false),
  118. dstIsTempNumberTransferred(false),
  119. dstIsTempObject(false),
  120. isCloned(false),
  121. hasBailOutInfo(hasBailOutInfo),
  122. hasAuxBailOut(false),
  123. forcePreOpBailOutIfNeeded(false),
  124. usesStackArgumentsObject(false),
  125. isInlineeEntryInstr(false),
  126. ignoreNegativeZero(false),
  127. dstIsAlwaysConvertedToInt32(false),
  128. dstIsAlwaysConvertedToNumber(false),
  129. ignoreIntOverflow(false),
  130. ignoreIntOverflowInRange(false),
  131. loadedArrayHeadSegment(false),
  132. loadedArrayHeadSegmentLength(false),
  133. extractedUpperBoundCheckWithoutHoisting(false),
  134. ignoreOverflowBitCount(32),
  135. isCtorCall(false),
  136. isCallInstrProtectedByNoProfileBailout(false),
  137. hasSideEffects(false)
  138. {
  139. }
  140. public:
  141. static Instr * New(Js::OpCode opcode, Func *func);
  142. static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Func *func);
  143. static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func *func);
  144. static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func *func);
  145. static Instr* NewConstantLoad(IR::RegOpnd* dstOpnd, intptr_t varConst, ValueType type, Func *func, Js::Var varLocal = nullptr);
  146. public:
  147. bool IsPlainInstr() const;
  148. bool IsEntryInstr() const;
  149. EntryInstr * AsEntryInstr();
  150. bool IsExitInstr() const;
  151. ExitInstr * AsExitInstr();
  152. bool IsBranchInstr() const;
  153. BranchInstr * AsBranchInstr();
  154. bool IsLabelInstr() const;
  155. LabelInstr * AsLabelInstr();
  156. bool IsJitProfilingInstr() const;
  157. JitProfilingInstr * AsJitProfilingInstr();
  158. bool IsProfiledInstr() const;
  159. ProfiledInstr * AsProfiledInstr();
  160. bool IsProfiledLabelInstr() const;
  161. ProfiledLabelInstr * AsProfiledLabelInstr();
  162. bool IsPragmaInstr() const;
  163. PragmaInstr * AsPragmaInstr();
  164. bool IsByteCodeUsesInstr() const;
  165. ByteCodeUsesInstr * AsByteCodeUsesInstr();
  166. bool IsLowered() const;
  167. bool IsRealInstr() const;
  168. bool IsInlined() const;
  169. bool IsNewScObjectInstr() const;
  170. bool IsInvalidInstr() const;
  171. Instr* GetInvalidInstr();
  172. bool IsLinked() const { return this->m_prev != nullptr || this->m_next != nullptr; }
  173. bool StartsBasicBlock() const;
  174. bool EndsBasicBlock() const;
  175. bool HasFallThrough() const;
  176. bool DoStackArgsOpt(Func *topFunc) const;
  177. bool HasAnyLoadHeapArgsOpCode();
  178. bool IsEqual(IR::Instr *instr) const;
  179. bool IsCloned() const { return isCloned; }
  180. void SetIsCloned(bool isCloned) { this->isCloned = isCloned; }
  181. bool HasBailOutInfo() const { return hasBailOutInfo; }
  182. bool HasAuxBailOut() const { return hasAuxBailOut; }
  183. bool HasTypeCheckBailOut() const;
  184. bool HasEquivalentTypeCheckBailOut() const;
  185. void ClearBailOutInfo();
  186. bool IsDstNotAlwaysConvertedToInt32() const;
  187. bool IsDstNotAlwaysConvertedToNumber() const;
  188. bool ShouldCheckForNegativeZero() const;
  189. bool ShouldCheckForIntOverflow() const;
  190. bool ShouldCheckFor32BitOverflow() const;
  191. bool ShouldCheckForNon32BitOverflow() const;
  192. bool HasAnyImplicitCalls() const;
  193. bool HasAnySideEffects() const;
  194. bool AreAllOpndInt64() const;
  195. IRKind GetKind() const;
  196. Opnd * GetDst() const;
  197. Opnd * SetDst(Opnd * newDst);
  198. Opnd * SetFakeDst(Opnd * newDst);
  199. Opnd * UnlinkDst();
  200. void FreeDst();
  201. Opnd * Replace(Opnd *oldOpnd, Opnd *newOpnd);
  202. Opnd * DeepReplace(Opnd *const oldOpnd, Opnd *const newOpnd);
  203. Opnd * ReplaceDst(Opnd * newDst);
  204. Instr * SinkDst(Js::OpCode assignOpcode, RegNum regNum = RegNOREG, IR::Instr *insertAfterInstr = nullptr);
  205. Instr * SinkDst(Js::OpCode assignOpcode, StackSym * stackSym, RegNum regNum = RegNOREG, IR::Instr *insertAfterInstr = nullptr);
  206. Instr * SinkInstrBefore(IR::Instr * instrTarget);
  207. Opnd * GetSrc1() const;
  208. Opnd * SetSrc1(Opnd * newSrc);
  209. Opnd * UnlinkSrc1();
  210. void FreeSrc1();
  211. Opnd * ReplaceSrc1(Opnd * newSrc);
  212. Instr * HoistSrc1(Js::OpCode assignOpcode, RegNum regNum = RegNOREG, StackSym *newSym = nullptr);
  213. Opnd * GetSrc2() const;
  214. Opnd * SetSrc2(Opnd * newSrc);
  215. Opnd * UnlinkSrc2();
  216. void FreeSrc2();
  217. Opnd * ReplaceSrc2(Opnd * newSrc);
  218. Instr * HoistSrc2(Js::OpCode assignOpcode, RegNum regNum = RegNOREG, StackSym *newSym = nullptr);
  219. Instr * HoistIndirOffset(IndirOpnd *indirOpnd, RegNum regNum = RegNOREG);
  220. Instr * HoistSymOffset(SymOpnd *symOpnd, RegNum baseReg, uint32 offset, RegNum regNum = RegNOREG);
  221. Instr * HoistIndirOffsetAsAdd(IndirOpnd *orgOpnd, IR::Opnd *baseOpnd, int offset, RegNum regNum);
  222. Instr * HoistSymOffsetAsAdd(SymOpnd *orgOpnd, IR::Opnd *baseOpnd, int offset, RegNum regNum);
  223. Instr * HoistIndirIndexOpndAsAdd(IR::IndirOpnd *orgOpnd, IR::Opnd *baseOpnd, IR::Opnd *indexOpnd, RegNum regNum);
  224. IndirOpnd * HoistMemRefAddress(MemRefOpnd *const memRefOpnd, const Js::OpCode loadOpCode);
  225. Opnd * UnlinkSrc(Opnd *src);
  226. Opnd * ReplaceSrc(Opnd *oldSrc, Opnd * newSrc);
  227. void InsertBefore(Instr *instr);
  228. void InsertAfter(Instr *instr);
  229. void InsertRangeBefore(Instr *startInstr, Instr *endInstr);
  230. void InsertMultipleBefore(Instr *endInstr);
  231. void InsertRangeAfter(Instr *startInstr, Instr *endInstr);
  232. void InsertMultipleAfter(Instr *endInstr);
  233. void Unlink();
  234. void Free();
  235. void Remove();
  236. void SwapOpnds();
  237. void TransferTo(Instr * instr);
  238. void TransferDstAttributesTo(Instr * instr);
  239. IR::Instr * Copy();
  240. IR::Instr * Clone();
  241. IR::Instr * ConvertToBailOutInstr(IR::Instr * bailOutTarget, BailOutKind kind, uint32 bailOutOffset = Js::Constants::NoByteCodeOffset);
  242. IR::Instr * ConvertToBailOutInstr(BailOutInfo * bailOutInfo, BailOutKind kind, bool useAuxBailout = false);
  243. IR::Instr * GetNextRealInstr() const;
  244. IR::Instr * GetNextRealInstrOrLabel() const;
  245. IR::Instr * GetNextBranchOrLabel() const;
  246. IR::Instr * GetPrevRealInstr() const;
  247. IR::Instr * GetPrevRealInstrOrLabel() const;
  248. IR::Instr * GetInsertBeforeByteCodeUsesInstr();
  249. IR::LabelInstr *GetOrCreateContinueLabel(const bool isHelper = false);
  250. RegOpnd * FindRegUse(StackSym *sym);
  251. static RegOpnd *FindRegUseInRange(StackSym *sym, Instr *instrBegin, Instr *instrEnd);
  252. RegOpnd * FindRegDef(StackSym *sym);
  253. static Instr* FindSingleDefInstr(Js::OpCode opCode, Opnd* src);
  254. BranchInstr * ChangeCmCCToBranchInstr(LabelInstr *targetInstr);
  255. static void MoveRangeAfter(Instr * instrStart, Instr * instrLast, Instr * instrAfter);
  256. static IR::Instr * CloneRange(Instr * instrStart, Instr * instrLast, Instr * instrInsert, Lowerer *lowerer, JitArenaAllocator *alloc, bool (*fMapTest)(IR::Instr*), bool clonedInstrGetOrigArgSlot);
  257. bool CanHaveArgOutChain() const;
  258. bool HasEmptyArgOutChain(IR::Instr** startCallInstrOut = nullptr);
  259. bool HasFixedFunctionAddressTarget() const;
  260. #if ENABLE_DEBUG_CONFIG_OPTIONS
  261. const char * GetBailOutKindName() const;
  262. #endif
  263. #if DBG_DUMP
  264. virtual void Dump(IRDumpFlags flags);
  265. void Dump();
  266. void DumpSimple();
  267. char16* DumpString();
  268. void DumpGlobOptInstrString();
  269. void Dump(int window);
  270. void DumpRange(Instr *instrEnd);
  271. void DumpByteCodeOffset();
  272. #endif
  273. #if ENABLE_DEBUG_CONFIG_OPTIONS
  274. void DumpTestTrace();
  275. void DumpFieldCopyPropTestTrace();
  276. #endif
  277. uint32 GetByteCodeOffset() const;
  278. uint32 GetNumber() const;
  279. void SetByteCodeOffset(IR::Instr * instr);
  280. void ClearByteCodeOffset();
  281. BailOutInfo * GetBailOutInfo() const;
  282. BailOutInfo * UnlinkBailOutInfo();
  283. bool ReplaceBailOutInfo(BailOutInfo *newBailOutInfo);
  284. IR::Instr * ShareBailOut();
  285. BailOutKind GetBailOutKind() const;
  286. BailOutKind GetBailOutKindNoBits() const;
  287. BailOutKind GetAuxBailOutKind() const;
  288. void SetBailOutKind(const IR::BailOutKind bailOutKind);
  289. void SetAuxBailOutKind(const IR::BailOutKind bailOutKind);
  290. void PromoteAuxBailOut();
  291. void ResetAuxBailOut();
  292. void UnlinkStartCallFromBailOutInfo(IR::Instr *endInstr) const;
  293. void ChangeEquivalentToMonoTypeCheckBailOut();
  294. intptr_t TryOptimizeInstrWithFixedDataProperty(IR::Instr ** pInstr, GlobOpt* globopt);
  295. Opnd * FindCallArgumentOpnd(const Js::ArgSlot argSlot, IR::Instr * *const ownerInstrRef = nullptr);
  296. void CopyNumber(IR::Instr *instr) { this->SetNumber(instr->GetNumber()); }
  297. bool FetchOperands(_Out_writes_(argsOpndLength) IR::Opnd **argsOpnd, uint argsOpndLength);
  298. template <typename Fn>
  299. bool ForEachCallDirectArgOutInstrBackward(Fn fn, uint argsOpndLength) const;
  300. bool IsCmCC_A();
  301. bool IsCmCC_R8();
  302. bool IsCmCC_I4();
  303. bool BinaryCalculator(IntConstType src1Const, IntConstType src2Const, IntConstType *pResult);
  304. bool UnaryCalculator(IntConstType src1Const, IntConstType *pResult);
  305. IR::Instr* GetNextArg();
  306. // Iterates argument chain
  307. template<class Fn>
  308. bool IterateArgInstrs(Fn callback)
  309. {
  310. StackSym* linkSym = this->GetSrc2()->GetStackSym();
  311. Assert(linkSym->IsSingleDef());
  312. IR::Instr *argInstr = linkSym->m_instrDef;
  313. IR::Instr* nextArg = nullptr;
  314. do
  315. {
  316. // Get the next instr before calling 'callback' since callback might modify the IR.
  317. if (argInstr->GetSrc2() && argInstr->GetSrc2()->IsSymOpnd())
  318. {
  319. linkSym = argInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  320. Assert(linkSym->IsArgSlotSym());
  321. // Due to dead code elimination in FGPeeps, it is possible for the definitions of the
  322. // the instructions that we are visiting during FG to have been freed. In this case,
  323. // the ArgSlot, even though its was a single def, will report IsSingleDef() as false
  324. // since instrDef is reset to nullptr when the def instr is freed
  325. Assert(linkSym->IsSingleDef() ||
  326. (m_func->IsInPhase(Js::Phase::FGPeepsPhase) || m_func->IsInPhase(Js::Phase::FGBuildPhase)));
  327. nextArg = linkSym->GetInstrDef();
  328. }
  329. else
  330. {
  331. nextArg = nullptr;
  332. }
  333. if(argInstr->m_opcode == Js::OpCode::ArgOut_A_InlineSpecialized)
  334. {
  335. argInstr = nextArg;
  336. // This is a fake ArgOut, skip it
  337. continue;
  338. }
  339. if (argInstr->m_opcode == Js::OpCode::StartCall)
  340. {
  341. Assert(nextArg == nullptr);
  342. break;
  343. }
  344. if(callback(argInstr))
  345. {
  346. return true;
  347. }
  348. argInstr = nextArg;
  349. } while(argInstr && !argInstr->IsInvalidInstr());
  350. // If an instr in the call sequence is invalid (0xFDFDFDFD), it must have been freed.
  351. // This is possible if some dead-code-removal/peeps code removed only part of the call sequence, while the whole sequence was dead (TH Bug 594245).
  352. // We allow this possibility here, while relying on the more involved dead-code-removal to remove the rest of the call sequence.
  353. // Inserting the opcode InvalidOpCode, with no lowering, here to safeguard against the possibility of a dead part of the call sequence not being removed. The lowerer would assert then.
  354. if (argInstr && argInstr->IsInvalidInstr())
  355. {
  356. this->InsertBefore(Instr::New(Js::OpCode::InvalidOpCode, this->m_func));
  357. }
  358. return false;
  359. }
  360. // Iterates all meta args for inlinee
  361. template<class Fn>
  362. bool IterateMetaArgs(Fn callback)
  363. {
  364. Assert(this->m_opcode == Js::OpCode::InlineeStart);
  365. Instr* currentInstr = this;
  366. while(currentInstr->m_opcode != Js::OpCode::InlineeMetaArg)
  367. {
  368. currentInstr = currentInstr->m_prev;
  369. }
  370. // backward iteration
  371. while (currentInstr->m_prev->m_opcode == Js::OpCode::InlineeMetaArg)
  372. {
  373. currentInstr = currentInstr->m_prev;
  374. }
  375. // forward iteration
  376. while(currentInstr->m_opcode == Js::OpCode::InlineeMetaArg)
  377. {
  378. // cache next instr as callback might move meta arg.
  379. IR::Instr* nextInstr = currentInstr->m_next;
  380. if(callback(currentInstr))
  381. {
  382. return true;
  383. }
  384. currentInstr = nextInstr;
  385. }
  386. return false;
  387. }
  388. IR::Instr* GetBytecodeArgOutCapture();
  389. void GenerateBytecodeArgOutCapture();
  390. bool HasByteCodeArgOutCapture();
  391. void GenerateArgOutSnapshot();
  392. IR::Instr* GetArgOutSnapshot();
  393. JITTimeFixedField* GetFixedFunction() const;
  394. uint GetArgOutCount(bool getInterpreterArgOutCount);
  395. IR::PropertySymOpnd *GetPropertySymOpnd() const;
  396. bool CallsAccessor(IR::PropertySymOpnd* methodOpnd = nullptr);
  397. bool CallsGetter(IR::PropertySymOpnd* methodOpnd = nullptr);
  398. bool CallsSetter(IR::PropertySymOpnd* methodOpnd = nullptr);
  399. bool UsesAllFields();
  400. void MoveArgs(bool generateByteCodeCapture = false);
  401. void Move(IR::Instr* insertInstr);
  402. private:
  403. void ClearNumber() { this->m_number = 0; }
  404. void SetNumber(uint32 number);
  405. friend class ::Func;
  406. friend class ::Lowerer;
  407. friend class IR::ByteCodeUsesInstr;
  408. void SetByteCodeOffset(uint32 number);
  409. friend class ::IRBuilder;
  410. friend class ::IRBuilderAsmJs;
  411. friend class ::FlowGraph;
  412. void SetBailOutKind_NoAssert(const IR::BailOutKind bailOutKind);
  413. public:
  414. // used only for SIMD Ld/St from typed arrays.
  415. // we keep these here to avoid increase in number of opcodes and to not use ExtendedArgs
  416. uint8 dataWidth;
  417. #ifdef BAILOUT_INJECTION
  418. uint bailOutByteCodeLocation;
  419. #endif
  420. Instr * m_next;
  421. Instr * m_prev;
  422. Func * m_func;
  423. #if DBG_DUMP
  424. char16 * globOptInstrString;
  425. #endif
  426. // These should be together to pack into a uint32
  427. Js::OpCode m_opcode;
  428. uint8 ignoreOverflowBitCount; // Number of bits after which ovf matters. Currently used for MULs.
  429. bool isFsBased : 1; // TEMP : just for BS testing
  430. bool dstIsTempNumber : 1;
  431. bool dstIsTempNumberTransferred : 1;
  432. bool dstIsTempObject : 1;
  433. bool usesStackArgumentsObject: 1;
  434. // An inlinee entry instruction initializes the InlineeCallInfo on the frame.
  435. bool isInlineeEntryInstr: 1;
  436. bool ignoreNegativeZero: 1;
  437. bool ignoreIntOverflow: 1;
  438. bool ignoreIntOverflowInRange: 1;
  439. bool forcePreOpBailOutIfNeeded: 1;
  440. bool loadedArrayHeadSegment : 1;
  441. bool loadedArrayHeadSegmentLength : 1;
  442. bool extractedUpperBoundCheckWithoutHoisting : 1;
  443. bool isCtorCall : 1;
  444. bool dstIsAlwaysConvertedToInt32 : 1;
  445. bool dstIsAlwaysConvertedToNumber : 1;
  446. bool isCallInstrProtectedByNoProfileBailout : 1;
  447. bool hasSideEffects : 1; // The instruction cannot be dead stored
  448. bool isNonFastPathFrameDisplay : 1;
  449. protected:
  450. bool isCloned:1;
  451. bool hasBailOutInfo:1;
  452. // Used for aux bail out. We are using same bailOutInfo, just different boolean to hide regular bail out.
  453. // Refer to ConvertToBailOutInstr implementation for details.
  454. bool hasAuxBailOut:1;
  455. IRKind m_kind;
  456. uint32 m_number;
  457. Opnd * m_dst;
  458. Opnd * m_src1;
  459. Opnd * m_src2;
  460. void Init(Js::OpCode opcode, IRKind kind, Func * func);
  461. IR::Instr * CloneInstr() const;
  462. };
  463. class ByteCodeUsesInstr : public Instr
  464. {
  465. private:
  466. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed;
  467. public:
  468. static ByteCodeUsesInstr * New(IR::Instr * originalBytecodeInstr);
  469. static ByteCodeUsesInstr * New(Func * containingFunction, uint32 offset);
  470. const BVSparse<JitArenaAllocator> * GetByteCodeUpwardExposedUsed() const;
  471. PropertySym * propertySymUse;
  472. // In the case of instances where you would like to add a ByteCodeUses to some sym,
  473. // which doesn't have an operand associated with it (like a block closure sym), use
  474. // this to set it without needing to pass the check for JIT-Optimized registers.
  475. void SetNonOpndSymbol(uint symId);
  476. // In cases where the operand you're working on may be changed between when you get
  477. // access to it and when you determine that you can set it in the ByteCodeUsesInstr
  478. // set method, cache the values and use this caller.
  479. void SetRemovedOpndSymbol(bool isJITOptimizedReg, uint symId);
  480. void Set(IR::Opnd * originalOperand);
  481. void Clear(uint symId);
  482. // Set the byteCodeUpwardExposedUsed bitvector on a new ByteCodeUses instruction.
  483. void SetBV(BVSparse<JitArenaAllocator>* newbv);
  484. // If possible, we want to aggregate with subsequent ByteCodeUses Instructions, so
  485. // that we can do some optimizations in other places where we can simplify args in
  486. // a compare, but still need to generate them for bailouts. Without this, we cause
  487. // problems because we end up with an instruction losing atomicity in terms of its
  488. // bytecode use and generation lifetimes.
  489. void Aggregate();
  490. };
  491. class JitProfilingInstr : public Instr
  492. {
  493. public:
  494. static JitProfilingInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func * func);
  495. static JitProfilingInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func * func);
  496. JitProfilingInstr* CloneJitProfiling() const;
  497. JitProfilingInstr* CopyJitProfiling() const;
  498. Js::ProfileId profileId;
  499. Js::ProfileId arrayProfileId;
  500. union
  501. {
  502. Js::InlineCacheIndex inlineCacheIndex;
  503. uint loopNumber;
  504. };
  505. bool isProfiledReturnCall : 1;
  506. bool isBeginSwitch : 1;
  507. bool isNewArray : 1;
  508. bool isLoopHelper: 1;
  509. };
  510. class ProfiledInstr: public Instr
  511. {
  512. protected:
  513. ProfiledInstr(bool hasBailOutInfo = false) : Instr(hasBailOutInfo) {}
  514. public:
  515. static ProfiledInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func * func);
  516. static ProfiledInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func * func);
  517. ProfiledInstr *CloneProfiledInstr() const;
  518. ProfiledInstr *CopyProfiledInstr() const;
  519. union
  520. {
  521. public:
  522. uint profileId;
  523. const Js::LdElemInfo * ldElemInfo;
  524. const Js::StElemInfo * stElemInfo;
  525. private:
  526. Js::FldInfo::TSize fldInfoData;
  527. public:
  528. Js::FldInfo &FldInfo()
  529. {
  530. return reinterpret_cast<Js::FldInfo &>(fldInfoData);
  531. }
  532. } u;
  533. static const uint InvalidProfileId = (uint)-1;
  534. };
  535. ///---------------------------------------------------------------------------
  536. ///
  537. /// class EntryInstr
  538. ///
  539. ///---------------------------------------------------------------------------
  540. class EntryInstr: public Instr
  541. {
  542. public:
  543. static EntryInstr * New(Js::OpCode opcode, Func *func);
  544. };
  545. ///---------------------------------------------------------------------------
  546. ///
  547. /// class ExitInstr
  548. ///
  549. ///---------------------------------------------------------------------------
  550. class ExitInstr: public Instr
  551. {
  552. public:
  553. static ExitInstr * New(Js::OpCode opcode, Func *func);
  554. };
  555. ///---------------------------------------------------------------------------
  556. ///
  557. /// class LabelInstr
  558. ///
  559. ///---------------------------------------------------------------------------
  560. class LabelInstr : public Instr
  561. {
  562. friend class BranchInstr;
  563. friend class IRBuilder;
  564. friend class IRBuilderAsmJs;
  565. friend class MultiBranchInstr;
  566. public:
  567. LabelInstr(JitArenaAllocator * allocator) : Instr(), labelRefs(allocator), m_isLoopTop(false), m_block(nullptr), isOpHelper(false),
  568. m_hasNonBranchRef(false), m_region(nullptr), m_loweredBasicBlock(nullptr), m_isDataLabel(false), m_isForInExit(false)
  569. #if DBG
  570. , m_noHelperAssert(false)
  571. #endif
  572. {
  573. #if DBG_DUMP
  574. m_id = 0;
  575. #endif
  576. m_pc.pc = nullptr;
  577. }
  578. static LabelInstr * New(Js::OpCode opcode, Func *func, bool isOpHelper = false);
  579. public:
  580. SListCounted<BranchInstr *> labelRefs;
  581. Lifetime ** m_regContent;
  582. BYTE m_isLoopTop : 1;
  583. BYTE isOpHelper : 1;
  584. BYTE m_hasNonBranchRef : 1;
  585. BYTE m_isDataLabel : 1;
  586. // Indicate whether the label is the target of a for in loop exit (BrOnEmpty or BrOnNotEmpty)
  587. // It is used by Inliner to track inlinee for in loop level to assign stack allocated for in
  588. // This bit has unknown validity outside of inliner
  589. BYTE m_isForInExit : 1;
  590. #if DBG
  591. BYTE m_noHelperAssert : 1;
  592. #endif
  593. unsigned int m_id;
  594. LoweredBasicBlock* m_loweredBasicBlock;
  595. private:
  596. union labelLocation
  597. {
  598. BYTE * pc; // Used by encoder and is the real pc offset
  599. uint32 offset; // Used by preEncoder and is an estimation pc offset, not accurate
  600. } m_pc;
  601. BasicBlock * m_block;
  602. Loop * m_loop;
  603. Region * m_region;
  604. public:
  605. inline void SetPC(BYTE * pc);
  606. inline BYTE * GetPC(void) const;
  607. inline void SetOffset(uint32 offset);
  608. inline void ResetOffset(uint32 offset);
  609. inline uint32 GetOffset(void) const;
  610. inline void SetBasicBlock(BasicBlock * block);
  611. inline BasicBlock * GetBasicBlock(void) const;
  612. inline void SetLoop(Loop *loop);
  613. inline Loop * GetLoop(void) const;
  614. inline void UnlinkBasicBlock(void);
  615. inline void SetRegion(Region *);
  616. inline Region * GetRegion(void) const;
  617. inline BOOL IsUnreferenced(void) const;
  618. LabelInstr * CloneLabel(BOOL fCreate);
  619. #if DBG_DUMP
  620. virtual void Dump(IRDumpFlags flags) override;
  621. #endif
  622. private:
  623. void AddLabelRef(BranchInstr *branchRef);
  624. void RemoveLabelRef(BranchInstr *branchRef);
  625. protected:
  626. void Init(Js::OpCode opcode, IRKind kind, Func *func, bool isOpHelper);
  627. };
  628. class ProfiledLabelInstr: public LabelInstr
  629. {
  630. private:
  631. ProfiledLabelInstr(JitArenaAllocator * allocator);
  632. public:
  633. static ProfiledLabelInstr * New(Js::OpCode opcode, Func *func, Js::ImplicitCallFlags flags, Js::LoopFlags loopFlags);
  634. Js::ImplicitCallFlags loopImplicitCallFlags;
  635. Js::LoopFlags loopFlags;
  636. #if DBG_DUMP
  637. uint loopNum;
  638. #endif
  639. };
  640. ///---------------------------------------------------------------------------
  641. ///
  642. /// class BranchInstr
  643. ///
  644. ///---------------------------------------------------------------------------
  645. class BranchInstr : public Instr
  646. {
  647. public:
  648. bool m_isAirlock : 1;
  649. bool m_isSwitchBr : 1;
  650. bool m_isOrphanedLeave : 1; // A Leave in a loop body in a try, most likely generated because of a return statement.
  651. #if DBG
  652. bool m_isMultiBranch;
  653. bool m_isHelperToNonHelperBranch;
  654. #endif
  655. public:
  656. static BranchInstr * New(Js::OpCode opcode, LabelInstr * branchTarget, Func *func);
  657. static BranchInstr * New(Js::OpCode opcode, LabelInstr * branchTarget, Opnd *srcOpnd, Func *func);
  658. static BranchInstr * New(Js::OpCode opcode, Opnd* destOpnd, LabelInstr * branchTarget, Opnd *srcOpnd, Func *func);
  659. static BranchInstr * New(Js::OpCode opcode, LabelInstr * branchTarget, Opnd *src1Opnd, Opnd *src2Opnd, Func *func);
  660. BranchInstr(bool hasBailOutInfo = false) : Instr(hasBailOutInfo), m_branchTarget(nullptr), m_isAirlock(false), m_isSwitchBr(false), m_isOrphanedLeave(false)
  661. {
  662. #if DBG
  663. m_isMultiBranch = false;
  664. #endif
  665. }
  666. void SetTarget(LabelInstr *labelInstr); // Only used for non-multi-branch
  667. bool ReplaceTarget(LabelInstr * oldLabelInstr, LabelInstr * newLabelInstr);
  668. void ClearTarget();
  669. LabelInstr * GetTarget() const;
  670. bool IsConditional() const;
  671. bool IsUnconditional() const;
  672. void Invert();
  673. void RetargetClonedBranch();
  674. BranchInstr * CloneBranchInstr() const;
  675. bool IsMultiBranch() const;
  676. MultiBranchInstr * AsMultiBrInstr();
  677. void SetByteCodeReg(Js::RegSlot reg) { m_byteCodeReg = reg; }
  678. Js::RegSlot GetByteCodeReg() { return m_byteCodeReg; }
  679. bool HasByteCodeReg() { return m_byteCodeReg != Js::Constants::NoRegister; }
  680. bool IsLoopTail(Func * func);
  681. public:
  682. Lifetime ** m_regContent;
  683. private:
  684. LabelInstr * m_branchTarget;
  685. Js::RegSlot m_byteCodeReg;
  686. };
  687. ///---------------------------------------------------------------------------
  688. ///
  689. /// class MultiBranchInstr
  690. ///
  691. ///---------------------------------------------------------------------------
  692. class MultiBranchInstr : public BranchInstr
  693. {
  694. private:
  695. /*
  696. The value field in the dictionary has different semantics at different points of time. Hence the 'value' field is implemented as a void *.
  697. In IR Layer:
  698. Offset is stored in the dictionary until we generate the Labels in InsertLabels().
  699. LabelInstr is stored in the dictionary, after we generate the LabelInstrs.
  700. In Encoder:
  701. After the fixup, actual machine address corresponding to the LabelInstr is stored as the 'value'.
  702. */
  703. private:
  704. typedef JITJavascriptString* TBranchKey;
  705. typedef Js::BranchDictionaryWrapper<TBranchKey> BranchDictionaryWrapper;
  706. typedef BranchDictionaryWrapper::BranchDictionary BranchDictionary;
  707. typedef BranchJumpTableWrapper BranchJumpTable;
  708. void * m_branchTargets; // can point to a dictionary or a jump table
  709. public:
  710. static MultiBranchInstr * New(Js::OpCode opcode, IR::Opnd * srcOpnd, Func *func);
  711. static MultiBranchInstr * New(Js::OpCode opcode, Func *func);
  712. enum Kind
  713. {
  714. IntJumpTable,
  715. StrDictionary,
  716. SingleCharStrJumpTable,
  717. };
  718. Kind m_kind;
  719. IntConstType m_baseCaseValue;
  720. IntConstType m_lastCaseValue;
  721. MultiBranchInstr() :
  722. m_branchTargets(nullptr)
  723. {
  724. #if DBG
  725. m_isMultiBranch = true;
  726. #endif
  727. }
  728. void AddtoDictionary(uint32 offset, TBranchKey key, void* remoteVar);
  729. void AddtoJumpTable(uint32 offset, uint32 jmpIndex);
  730. void CreateBranchTargetsAndSetDefaultTarget(int dictionarySize, Kind kind, uint defaultTargetOffset);
  731. void ChangeLabelRef(LabelInstr * oldTarget, LabelInstr * newTarget);
  732. bool ReplaceTarget(IR::LabelInstr * oldLabelInstr, IR::LabelInstr * newLabelInstr);
  733. void FixMultiBrDefaultTarget(uint32 targetOffset);
  734. void ClearTarget();
  735. BranchDictionaryWrapper * GetBranchDictionary();
  736. BranchJumpTable * GetBranchJumpTable();
  737. ///---------------------------------------------------------------------------
  738. ///
  739. /// template MapMultiBrLabels
  740. /// - Maps through the branchTargets dictionary for all the labelInstrs
  741. ///---------------------------------------------------------------------------
  742. template<class Fn>
  743. void MapMultiBrLabels(Fn fn)
  744. {
  745. MapMultiBrTargetByAddress([fn](void ** value) -> void
  746. {
  747. fn((LabelInstr*) *value);
  748. });
  749. }
  750. ///---------------------------------------------------------------------------
  751. ///
  752. /// template MapUniqueMultiBrLabels
  753. /// - Maps through the branchTargets dictionary for all unique labelInstrs
  754. ///---------------------------------------------------------------------------
  755. template<class Fn>
  756. void MapUniqueMultiBrLabels(Fn fn)
  757. {
  758. BVSparse<JitArenaAllocator> visitedTargets(m_func->m_alloc);
  759. MapMultiBrLabels([&](IR::LabelInstr *const targetLabel)
  760. {
  761. if(visitedTargets.Test(targetLabel->m_id))
  762. {
  763. return;
  764. }
  765. visitedTargets.Set(targetLabel->m_id);
  766. fn(targetLabel);
  767. });
  768. }
  769. ///--------------------------------------------------------------------------------------------
  770. ///
  771. /// template UpdateMultiBrTargetOffsets
  772. /// - Maps through the branchTargets dictionary for updating the target offset by returning the target offset.
  773. ///--------------------------------------------------------------------------------------------
  774. template<class Fn>
  775. void UpdateMultiBrTargetOffsets(Fn fn)
  776. {
  777. MapMultiBrTargetByAddress([fn](void ** value) -> void
  778. {
  779. *value = (void*)fn(::Math::PointerCastToIntegral<uint32>(*value));
  780. });
  781. }
  782. ///--------------------------------------------------------------------------------------------
  783. ///
  784. /// template UpdateMultiBrLabels
  785. /// - Maps through the branchDictionary for updating the labelInstr
  786. ///--------------------------------------------------------------------------------------------
  787. template<class Fn>
  788. void UpdateMultiBrLabels(Fn fn)
  789. {
  790. MapMultiBrTargetByAddress([fn](void ** value) -> void
  791. {
  792. IR::LabelInstr * oldLabelInstr = (LabelInstr*)*value;
  793. IR::LabelInstr * newLabelInstr = fn(oldLabelInstr);
  794. *value = (void*)newLabelInstr;
  795. });
  796. }
  797. ///-------------------------------------------------------------------------------------------------------------
  798. ///
  799. /// template MapMultiBrTargetByAddress
  800. /// - Maps through the branchDictionary accessing the address of the 'value'
  801. ///-------------------------------------------------------------------------------------------------------------
  802. template<class Fn>
  803. void MapMultiBrTargetByAddress(Fn fn)
  804. {
  805. if(!m_branchTargets)
  806. {
  807. return;
  808. }
  809. void ** defaultTarget = nullptr;
  810. switch (m_kind)
  811. {
  812. case StrDictionary:
  813. {
  814. BranchDictionary& branchDictionary = GetBranchDictionary()->dictionary;
  815. defaultTarget = &(((MultiBranchInstr::BranchDictionaryWrapper*)(m_branchTargets))->defaultTarget);
  816. branchDictionary.MapAddress([fn](TBranchKey key, void ** value)
  817. {
  818. fn(value);
  819. });
  820. break;
  821. }
  822. case IntJumpTable:
  823. case SingleCharStrJumpTable:
  824. {
  825. void ** branchJumpTable = GetBranchJumpTable()->jmpTable;
  826. defaultTarget = &(GetBranchJumpTable()->defaultTarget);
  827. for (IntConstType i = m_baseCaseValue; i <= m_lastCaseValue; i++)
  828. {
  829. fn(&branchJumpTable[i - m_baseCaseValue]);
  830. }
  831. break;
  832. }
  833. default:
  834. Assert(false);
  835. };
  836. fn(defaultTarget);
  837. }
  838. };
  839. ///---------------------------------------------------------------------------
  840. ///
  841. /// class PragmaInstr
  842. ///
  843. ///---------------------------------------------------------------------------
  844. class PragmaInstr : public Instr
  845. {
  846. public:
  847. uint32 m_statementIndex;
  848. uint32 m_offsetInBuffer; // offset in the binary code buffer
  849. public:
  850. static PragmaInstr * New(Js::OpCode opcode, uint32 index, Func *func);
  851. PragmaInstr() : Instr(), m_statementIndex(0)
  852. {
  853. }
  854. #if DBG_DUMP
  855. virtual void Dump(IRDumpFlags flags) override;
  856. #endif
  857. #if DBG_DUMP | defined(VTUNE_PROFILING)
  858. void Record(uint32 nativeBufferOffset);
  859. #endif
  860. PragmaInstr * ClonePragma();
  861. PragmaInstr * CopyPragma();
  862. };
  863. template <typename InstrType>
  864. class BailOutInstrTemplate : public InstrType
  865. {
  866. private:
  867. BailOutInstrTemplate() : InstrType(true) {}
  868. public:
  869. static BailOutInstrTemplate * New(Js::OpCode opcode, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  870. static BailOutInstrTemplate * New(Js::OpCode opcode, IR::Opnd *dst, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  871. static BailOutInstrTemplate * New(Js::OpCode opcode, IR::Opnd *dst, IR::Opnd *src1, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  872. static BailOutInstrTemplate * New(Js::OpCode opcode, IR::Opnd *dst, IR::Opnd *src1, IR::Opnd *src2, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  873. static BailOutInstrTemplate * New(Js::OpCode opcode, BailOutKind kind, BailOutInfo * bailOutInfo, Func * func);
  874. BailOutInstrTemplate * CloneBailOut() const;
  875. BailOutInfo * bailOutInfo;
  876. BailOutKind bailOutKind;
  877. // Auxiliary bailout kind.
  878. // This is kind of a decoration on top of main bail out kind and is not used for runtime bail out logic (in globopt, etc).
  879. // It's added when we convert instr to bailout instr for which there is already bailout,
  880. // and is not used/just preserved until lowerer, in the beginning of lowerer we split it out.
  881. // Currently used for debugger bailout when it is shared with main bailout.
  882. BailOutKind auxBailOutKind;
  883. };
  884. typedef BailOutInstrTemplate<Instr> BailOutInstr;
  885. typedef BailOutInstrTemplate<ProfiledInstr> ProfiledBailOutInstr;
  886. typedef BailOutInstrTemplate<BranchInstr> BranchBailOutInstr;
  887. //
  888. // FOREACH_INSTR iterators
  889. //
  890. #ifdef DBG
  891. # define INIT_PREV IR::Instr * __prevInstrCheck = nullptr
  892. # define CHECK_PREV(instr)\
  893. AssertMsg(__prevInstrCheck == nullptr || __prevInstrCheck->m_next == instr, \
  894. "Modifying instr list but not using EDITING iterator!"); \
  895. __prevInstrCheck = instr;
  896. #else
  897. # define INIT_PREV
  898. # define CHECK_PREV(instr)
  899. #endif
  900. #ifdef DBG
  901. # define INIT_NEXT IR::Instr * __nextInstrCheck = nullptr
  902. # define CHECK_NEXT(instr)\
  903. AssertMsg(__nextInstrCheck == nullptr || __nextInstrCheck->m_prev == instr, \
  904. "Modifying instr list but not using EDITING iterator!"); \
  905. __nextInstrCheck = instr;
  906. #else
  907. # define INIT_NEXT
  908. # define CHECK_NEXT(instr)
  909. #endif
  910. #define FOREACH_INSTR_IN_RANGE(instr, instrList, instrLast)\
  911. {\
  912. INIT_PREV;\
  913. IR::Instr *instr##Stop = instrLast ? ((IR::Instr*)instrLast)->m_next : nullptr; \
  914. for ( IR::Instr *instr = instrList;\
  915. instr != instr##Stop;\
  916. instr = instr->m_next)\
  917. {\
  918. CHECK_PREV(instr);
  919. #define NEXT_INSTR_IN_RANGE }}
  920. #define FOREACH_REAL_INSTR_IN_RANGE(instr, instrList, instrLast)\
  921. FOREACH_INSTR_IN_RANGE(instr, instrList, instrLast)\
  922. {\
  923. if (!instr->IsRealInstr())\
  924. {\
  925. continue;\
  926. }
  927. #define NEXT_REAL_INSTR_IN_RANGE NEXT_INSTR_IN_RANGE }
  928. #define FOREACH_INSTR_BACKWARD_IN_RANGE(instr, instrList, instrLast)\
  929. {\
  930. INIT_NEXT;\
  931. IR::Instr *instr##Stop = instrLast ? ((IR::Instr*)instrLast)->m_prev : nullptr; \
  932. for ( IR::Instr *instr = instrList;\
  933. instr != instr##Stop;\
  934. instr = instr->m_prev)\
  935. {\
  936. CHECK_NEXT(instr);
  937. #define NEXT_INSTR_BACKWARD_IN_RANGE }}
  938. #define FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, instrLast)\
  939. {\
  940. IR::Instr * instrNext;\
  941. IR::Instr *instr##Stop = instrLast ? ((IR::Instr*)instrLast)->m_next : nullptr; \
  942. for ( IR::Instr *instr = instrList;\
  943. instr != instr##Stop;\
  944. instr = instrNext)\
  945. {\
  946. instrNext = instr->m_next;
  947. #define NEXT_INSTR_EDITING_IN_RANGE }}
  948. #define FOREACH_REAL_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, instrLast)\
  949. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, instrLast)\
  950. {\
  951. if (!instr->IsRealInstr())\
  952. {\
  953. continue;\
  954. }
  955. #define NEXT_REAL_INSTR_EDITING_IN_RANGE NEXT_INSTR_EDITING_IN_RANGE }
  956. #define FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrList, instrLast)\
  957. {\
  958. IR::Instr * instrPrev;\
  959. IR::Instr *instr##Stop = instrLast ? ((IR::Instr*)instrLast)->m_prev : nullptr; \
  960. for ( IR::Instr *instr = instrList;\
  961. instr != instr##Stop;\
  962. instr = instrPrev)\
  963. {\
  964. instrPrev = instr->m_prev;
  965. #define NEXT_INSTR_BACKWARD_EDITING_IN_RANGE }}
  966. #define FOREACH_INSTR_EDITING(instr, instrNext, instrList)\
  967. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, nullptr)
  968. #define NEXT_INSTR_EDITING NEXT_INSTR_EDITING_IN_RANGE
  969. #define FOREACH_INSTR(instr, instrList)\
  970. FOREACH_INSTR_IN_RANGE(instr, instrList, nullptr)
  971. #define NEXT_INSTR NEXT_INSTR_IN_RANGE
  972. #define FOREACH_REAL_INSTR(instr, instrList)\
  973. FOREACH_REAL_INSTR_IN_RANGE(instr, instrList, nullptr)
  974. #define NEXT_REAL_INSTR NEXT_REAL_INSTR_IN_RANGE
  975. #define FOREACH_INSTR_BACKWARD(instr, instrList)\
  976. FOREACH_INSTR_BACKWARD_IN_RANGE(instr, instrList, nullptr)
  977. #define NEXT_INSTR_BACKWARD NEXT_INSTR_BACKWARD_IN_RANGE
  978. #define FOREACH_INSTR_EDITING(instr, instrNext, instrList)\
  979. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, nullptr)
  980. #define NEXT_INSTR_EDITING NEXT_INSTR_EDITING_IN_RANGE
  981. #define FOREACH_REAL_INSTR_EDITING(instr, instrNext, instrList)\
  982. FOREACH_REAL_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, nullptr)
  983. #define NEXT_REAL_INSTR_EDITING NEXT_REAL_INSTR_EDITING_IN_RANGE
  984. #define FOREACH_INSTR_BACKWARD_EDITING(instr, instrPrev, instrList)\
  985. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrList, nullptr)
  986. #define NEXT_INSTR_BACKWARD_EDITING NEXT_INSTR_BACKWARD_EDITING_IN_RANGE
  987. #define FOREACH_INSTR_IN_FUNC(instr, func)\
  988. FOREACH_INSTR(instr, func->m_headInstr)
  989. #define NEXT_INSTR_IN_FUNC NEXT_INSTR
  990. #define FOREACH_REAL_INSTR_IN_FUNC(instr, func)\
  991. FOREACH_REAL_INSTR(instr, func->m_headInstr)
  992. #define NEXT_REAL_INSTR_IN_FUNC NEXT_REAL_INSTR
  993. #define FOREACH_INSTR_IN_FUNC_BACKWARD(instr, func)\
  994. FOREACH_INSTR_BACKWARD(instr, func->m_tailInstr)
  995. #define NEXT_INSTR_IN_FUNC_BACKWARD NEXT_INSTR_BACKWARD
  996. #define FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, func)\
  997. FOREACH_INSTR_EDITING(instr, instrNext, func->m_headInstr)
  998. #define NEXT_INSTR_IN_FUNC_EDITING NEXT_INSTR_EDITING
  999. #define FOREACH_REAL_INSTR_IN_FUNC_EDITING(instr, instrNext, func)\
  1000. FOREACH_REAL_INSTR_EDITING(instr, instrNext, func->m_headInstr)
  1001. #define NEXT_REAL_INSTR_IN_FUNC_EDITING NEXT_REAL_INSTR_EDITING
  1002. #define FOREACH_INSTR_IN_FUNC_BACKWARD_EDITING(instr, instrPrev, func)\
  1003. FOREACH_INSTR_BACKWARD_EDITING(instr, instrPrev, func->m_tailInstr)
  1004. #define NEXT_INSTR_IN_FUNC_BACKWARD_EDITING NEXT_INSTR_BACKWARD_EDITING
  1005. #define FOREACH_INSTR_IN_BLOCK(instr, block)\
  1006. FOREACH_INSTR_IN_RANGE(instr, block->GetFirstInstr(), block->GetLastInstr())
  1007. #define NEXT_INSTR_IN_BLOCK\
  1008. NEXT_INSTR_IN_RANGE
  1009. #define FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)\
  1010. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, block->GetFirstInstr(), block->GetLastInstr())
  1011. #define NEXT_INSTR_IN_BLOCK_EDITING \
  1012. NEXT_INSTR_EDITING_IN_RANGE
  1013. #define FOREACH_INSTR_BACKWARD_IN_BLOCK(instr, block)\
  1014. FOREACH_INSTR_BACKWARD_IN_RANGE(instr, block->GetLastInstr(), block->GetFirstInstr())
  1015. #define NEXT_INSTR_BACKWARD_IN_BLOCK\
  1016. NEXT_INSTR_BACKWARD_IN_RANGE
  1017. #define FOREACH_INSTR_BACKWARD_IN_BLOCK_EDITING(instr, instrPrev, block)\
  1018. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, block->GetLastInstr(), block->GetFirstInstr())
  1019. #define NEXT_INSTR_BACKWARD_IN_BLOCK_EDITING\
  1020. NEXT_INSTR_BACKWARD_EDITING_IN_RANGE
  1021. } // namespace IR
  1022. typedef JsUtil::BaseDictionary<IR::Instr*, IR::Instr*, JitArenaAllocator, PrimeSizePolicy> InstrMap;