IR.h 44 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. #include "Language/JavascriptNativeOperators.h"
  7. class Func;
  8. class BasicBlock;
  9. class Region;
  10. class Lowerer;
  11. class IRBuilder;
  12. class IRBuilderAsmJs;
  13. class FlowGraph;
  14. class GlobOpt;
  15. class BailOutInfo;
  16. struct LazyBailOutRecord;
  17. typedef JsUtil::KeyValuePair<StackSym *, BailoutConstantValue> ConstantStackSymValue;
  18. typedef JsUtil::KeyValuePair<StackSym *, StackSym*> CopyPropSyms;
  19. struct CapturedValues
  20. {
  21. SListBase<ConstantStackSymValue> constantValues; // Captured constant values during glob opt
  22. SListBase<CopyPropSyms> copyPropSyms; // Captured copy prop values during glob opt
  23. BVSparse<JitArenaAllocator> * argObjSyms; // Captured arg object symbols during glob opt
  24. ~CapturedValues()
  25. {
  26. // Reset SListBase to be exception safe. Captured values are from GlobOpt->func->alloc
  27. // in normal case the 2 SListBase are empty so no Clear needed, also no need to Clear in exception case
  28. constantValues.Reset();
  29. copyPropSyms.Reset();
  30. }
  31. };
  32. class LoweredBasicBlock;
  33. class BranchJumpTableWrapper
  34. {
  35. public:
  36. BranchJumpTableWrapper(uint tableSize) : defaultTarget(nullptr), labelInstr(nullptr), tableSize(tableSize)
  37. {
  38. }
  39. void** jmpTable;
  40. void* defaultTarget;
  41. IR::LabelInstr * labelInstr;
  42. int tableSize;
  43. static BranchJumpTableWrapper* New(JitArenaAllocator * allocator, uint tableSize)
  44. {
  45. BranchJumpTableWrapper * branchTargets = JitAnew(allocator, BranchJumpTableWrapper, tableSize);
  46. //Create the jump table for integers
  47. void* * jmpTable = JitAnewArrayZ(allocator, void*, tableSize);
  48. branchTargets->jmpTable = jmpTable;
  49. return branchTargets;
  50. }
  51. };
  52. namespace IR {
  53. class EntryInstr;
  54. class ExitInstr;
  55. class BranchInstr;
  56. class LabelInstr;
  57. class JitProfilingInstr;
  58. class ProfiledInstr;
  59. class ProfiledLabelInstr;
  60. class MultiBranchInstr;
  61. class PragmaInstr;
  62. class ByteCodeUsesInstr;
  63. class Opnd;
  64. class RegOpnd;
  65. class IndirOpnd;
  66. class SymOpnd;
  67. class MemRefOpnd;
  68. class PropertySymOpnd;
  69. enum AddrOpndKind : BYTE;
  70. enum IRKind : BYTE {
  71. InstrKindInvalid,
  72. InstrKindInstr,
  73. InstrKindBranch,
  74. InstrKindLabel,
  75. InstrKindProfiled,
  76. InstrKindProfiledLabel,
  77. InstrKindEntry,
  78. InstrKindExit,
  79. InstrKindPragma,
  80. InstrKindByteCodeUses,
  81. InstrKindJitProfiling,
  82. };
  83. const int32 InvalidInstrLayout = -1;
  84. ///---------------------------------------------------------------------------
  85. ///
  86. /// class Instr
  87. /// BranchInstr
  88. /// MultiBranchInstr
  89. /// LabelInstr
  90. /// JitProfilingInstr
  91. /// ProfiledInstr
  92. /// EntryInstr
  93. /// ExitInstr
  94. /// PragmaInstr
  95. /// BailoutInstr
  96. /// ByteCoteUsesInstr
  97. ///
  98. ///---------------------------------------------------------------------------
  99. class Instr
  100. {
  101. protected:
  102. Instr(bool hasBailOutInfo = false) :
  103. m_next(nullptr),
  104. m_prev(nullptr),
  105. m_opcode(Js::OpCode::MaxByteSizedOpcodes),
  106. m_func(nullptr),
  107. m_number(Js::Constants::NoByteCodeOffset),
  108. m_dst(nullptr),
  109. m_src1(nullptr),
  110. m_src2(nullptr),
  111. #if DBG_DUMP
  112. globOptInstrString(nullptr),
  113. #endif
  114. dstIsTempNumber(false),
  115. dstIsTempNumberTransferred(false),
  116. dstIsTempObject(false),
  117. isCloned(false),
  118. hasBailOutInfo(hasBailOutInfo),
  119. hasAuxBailOut(false),
  120. forcePreOpBailOutIfNeeded(false),
  121. usesStackArgumentsObject(false),
  122. isInlineeEntryInstr(false),
  123. ignoreNegativeZero(false),
  124. dstIsAlwaysConvertedToInt32(false),
  125. dstIsAlwaysConvertedToNumber(false),
  126. ignoreIntOverflow(false),
  127. ignoreIntOverflowInRange(false),
  128. loadedArrayHeadSegment(false),
  129. loadedArrayHeadSegmentLength(false),
  130. extractedUpperBoundCheckWithoutHoisting(false),
  131. ignoreOverflowBitCount(32),
  132. isCtorCall(false),
  133. isCallInstrProtectedByNoProfileBailout(false),
  134. hasSideEffects(false),
  135. isNonFastPathFrameDisplay(false)
  136. {
  137. }
  138. public:
  139. static Instr * New(Js::OpCode opcode, Func *func);
  140. static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Func *func);
  141. static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func *func);
  142. static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func *func);
  143. static Instr* NewConstantLoad(IR::RegOpnd* dstOpnd, intptr_t varConst, ValueType type, Func *func, Js::Var varLocal = nullptr);
  144. public:
  145. bool IsPlainInstr() const;
  146. bool IsEntryInstr() const;
  147. EntryInstr * AsEntryInstr();
  148. bool IsExitInstr() const;
  149. ExitInstr * AsExitInstr();
  150. bool IsBranchInstr() const;
  151. BranchInstr * AsBranchInstr();
  152. bool IsLabelInstr() const;
  153. LabelInstr * AsLabelInstr();
  154. bool IsJitProfilingInstr() const;
  155. JitProfilingInstr * AsJitProfilingInstr();
  156. bool IsProfiledInstr() const;
  157. ProfiledInstr * AsProfiledInstr();
  158. bool IsProfiledLabelInstr() const;
  159. ProfiledLabelInstr * AsProfiledLabelInstr();
  160. bool IsPragmaInstr() const;
  161. PragmaInstr * AsPragmaInstr();
  162. bool IsByteCodeUsesInstr() const;
  163. ByteCodeUsesInstr * AsByteCodeUsesInstr();
  164. bool IsLowered() const;
  165. bool IsRealInstr() const;
  166. bool IsInlined() const;
  167. bool IsNewScObjectInstr() const;
  168. bool IsInvalidInstr() const;
  169. Instr* GetInvalidInstr();
  170. bool IsLinked() const { return this->m_prev != nullptr || this->m_next != nullptr; }
  171. bool StartsBasicBlock() const;
  172. bool EndsBasicBlock() const;
  173. bool HasFallThrough() const;
  174. bool DoStackArgsOpt(Func *topFunc) const;
  175. bool HasAnyLoadHeapArgsOpCode();
  176. bool IsEqual(IR::Instr *instr) const;
  177. bool IsCloned() const { return isCloned; }
  178. void SetIsCloned(bool isCloned) { this->isCloned = isCloned; }
  179. bool HasBailOutInfo() const { return hasBailOutInfo; }
  180. bool HasAuxBailOut() const { return hasAuxBailOut; }
  181. bool HasTypeCheckBailOut() const;
  182. bool HasEquivalentTypeCheckBailOut() const;
  183. bool HasBailOnNoProfile() const;
  184. void ClearBailOutInfo();
  185. bool IsDstNotAlwaysConvertedToInt32() const;
  186. bool IsDstNotAlwaysConvertedToNumber() const;
  187. bool ShouldCheckForNegativeZero() const;
  188. bool ShouldCheckForIntOverflow() const;
  189. bool ShouldCheckFor32BitOverflow() const;
  190. bool ShouldCheckForNon32BitOverflow() const;
  191. bool HasAnyImplicitCalls() const;
  192. bool HasAnySideEffects() const;
  193. bool AreAllOpndInt64() const;
  194. IRKind GetKind() const;
  195. Opnd * GetDst() const;
  196. Opnd * SetDst(Opnd * newDst);
  197. Opnd * SetFakeDst(Opnd * newDst);
  198. Opnd * UnlinkDst();
  199. void FreeDst();
  200. Opnd * Replace(Opnd *oldOpnd, Opnd *newOpnd);
  201. Opnd * DeepReplace(Opnd *const oldOpnd, Opnd *const newOpnd);
  202. Opnd * ReplaceDst(Opnd * newDst);
  203. Instr * SinkDst(Js::OpCode assignOpcode, RegNum regNum = RegNOREG, IR::Instr *insertAfterInstr = nullptr);
  204. Instr * SinkDst(Js::OpCode assignOpcode, StackSym * stackSym, RegNum regNum = RegNOREG, IR::Instr *insertAfterInstr = nullptr);
  205. Instr * SinkInstrBefore(IR::Instr * instrTarget);
  206. Opnd * GetSrc1() const;
  207. Opnd * SetSrc1(Opnd * newSrc);
  208. Opnd * UnlinkSrc1();
  209. void FreeSrc1();
  210. Opnd * ReplaceSrc1(Opnd * newSrc);
  211. Instr * HoistSrc1(Js::OpCode assignOpcode, RegNum regNum = RegNOREG, StackSym *newSym = nullptr);
  212. Opnd * GetSrc2() const;
  213. Opnd * SetSrc2(Opnd * newSrc);
  214. Opnd * UnlinkSrc2();
  215. void FreeSrc2();
  216. Opnd * ReplaceSrc2(Opnd * newSrc);
  217. Instr * HoistSrc2(Js::OpCode assignOpcode, RegNum regNum = RegNOREG, StackSym *newSym = nullptr);
  218. Instr * HoistIndirOffset(IndirOpnd *indirOpnd, RegNum regNum = RegNOREG);
  219. Instr * HoistSymOffset(SymOpnd *symOpnd, RegNum baseReg, uint32 offset, RegNum regNum = RegNOREG);
  220. Instr * HoistIndirOffsetAsAdd(IndirOpnd *orgOpnd, IR::Opnd *baseOpnd, int offset, RegNum regNum);
  221. Instr * HoistSymOffsetAsAdd(SymOpnd *orgOpnd, IR::Opnd *baseOpnd, int offset, RegNum regNum);
  222. Instr * HoistIndirIndexOpndAsAdd(IR::IndirOpnd *orgOpnd, IR::Opnd *baseOpnd, IR::Opnd *indexOpnd, RegNum regNum);
  223. IndirOpnd * HoistMemRefAddress(MemRefOpnd *const memRefOpnd, const Js::OpCode loadOpCode);
  224. Opnd * UnlinkSrc(Opnd *src);
  225. Opnd * ReplaceSrc(Opnd *oldSrc, Opnd * newSrc);
  226. void InsertBefore(Instr *instr);
  227. void InsertAfter(Instr *instr);
  228. void InsertRangeBefore(Instr *startInstr, Instr *endInstr);
  229. void InsertMultipleBefore(Instr *endInstr);
  230. void InsertRangeAfter(Instr *startInstr, Instr *endInstr);
  231. void InsertMultipleAfter(Instr *endInstr);
  232. void Unlink();
  233. void Free();
  234. void Remove();
  235. void SwapOpnds();
  236. void TransferTo(Instr * instr);
  237. void TransferDstAttributesTo(Instr * instr);
  238. IR::Instr * Copy();
  239. IR::Instr * Clone();
  240. IR::Instr * ConvertToBailOutInstr(IR::Instr * bailOutTarget, BailOutKind kind, uint32 bailOutOffset = Js::Constants::NoByteCodeOffset);
  241. IR::Instr * ConvertToBailOutInstr(BailOutInfo * bailOutInfo, BailOutKind kind, bool useAuxBailout = false);
  242. IR::Instr * GetNextRealInstr() const;
  243. IR::Instr * GetNextRealInstrOrLabel() const;
  244. IR::Instr * GetNextBranchOrLabel() const;
  245. IR::Instr * GetPrevRealInstr() const;
  246. IR::Instr * GetPrevRealInstrOrLabel() const;
  247. IR::Instr * GetInsertBeforeByteCodeUsesInstr();
  248. IR::LabelInstr *GetOrCreateContinueLabel(const bool isHelper = false);
  249. RegOpnd * FindRegUse(StackSym *sym);
  250. static RegOpnd *FindRegUseInRange(StackSym *sym, Instr *instrBegin, Instr *instrEnd);
  251. RegOpnd * FindRegDef(StackSym *sym);
  252. static Instr* FindSingleDefInstr(Js::OpCode opCode, Opnd* src);
  253. BranchInstr * ChangeCmCCToBranchInstr(LabelInstr *targetInstr);
  254. static void MoveRangeAfter(Instr * instrStart, Instr * instrLast, Instr * instrAfter);
  255. static IR::Instr * CloneRange(Instr * instrStart, Instr * instrLast, Instr * instrInsert, Lowerer *lowerer, JitArenaAllocator *alloc, bool (*fMapTest)(IR::Instr*), bool clonedInstrGetOrigArgSlot);
  256. bool CanHaveArgOutChain() const;
  257. bool HasEmptyArgOutChain(IR::Instr** startCallInstrOut = nullptr);
  258. bool HasFixedFunctionAddressTarget() const;
  259. // Return whether the instruction transfer value from the src to the dst for copy prop
  260. bool TransfersSrcValue();
  261. #if ENABLE_DEBUG_CONFIG_OPTIONS
  262. const char * GetBailOutKindName() const;
  263. #endif
  264. #if DBG_DUMP
  265. virtual void Dump(IRDumpFlags flags);
  266. void Dump();
  267. void DumpSimple();
  268. char16* DumpString();
  269. void DumpGlobOptInstrString();
  270. void Dump(int window);
  271. void DumpRange(Instr *instrEnd);
  272. void DumpByteCodeOffset();
  273. #endif
  274. #if ENABLE_DEBUG_CONFIG_OPTIONS
  275. void DumpTestTrace();
  276. void DumpFieldCopyPropTestTrace();
  277. #endif
  278. uint32 GetByteCodeOffset() const;
  279. uint32 GetNumber() const;
  280. void SetByteCodeOffset(IR::Instr * instr);
  281. void ClearByteCodeOffset();
  282. BailOutInfo * GetBailOutInfo() const;
  283. BailOutInfo * UnlinkBailOutInfo();
  284. bool ReplaceBailOutInfo(BailOutInfo *newBailOutInfo);
  285. IR::Instr * ShareBailOut();
  286. BailOutKind GetBailOutKind() const;
  287. BailOutKind GetBailOutKindNoBits() const;
  288. BailOutKind GetAuxBailOutKind() const;
  289. void SetBailOutKind(const IR::BailOutKind bailOutKind);
  290. void SetAuxBailOutKind(const IR::BailOutKind bailOutKind);
  291. void PromoteAuxBailOut();
  292. void ResetAuxBailOut();
  293. void UnlinkStartCallFromBailOutInfo(IR::Instr *endInstr) const;
  294. void ChangeEquivalentToMonoTypeCheckBailOut();
  295. intptr_t TryOptimizeInstrWithFixedDataProperty(IR::Instr ** pInstr, GlobOpt* globopt);
  296. Opnd * FindCallArgumentOpnd(const Js::ArgSlot argSlot, IR::Instr * *const ownerInstrRef = nullptr);
  297. void CopyNumber(IR::Instr *instr) { this->SetNumber(instr->GetNumber()); }
  298. bool FetchOperands(_Out_writes_(argsOpndLength) IR::Opnd **argsOpnd, uint argsOpndLength);
  299. template <typename Fn>
  300. bool ForEachCallDirectArgOutInstrBackward(Fn fn, uint argsOpndLength) const;
  301. bool IsCmCC_A();
  302. bool IsCmCC_R8();
  303. bool IsCmCC_I4();
  304. bool IsNeq();
  305. bool BinaryCalculator(IntConstType src1Const, IntConstType src2Const, IntConstType *pResult, IRType type);
  306. template <typename T>
  307. bool BinaryCalculatorT(T src1Const, T src2Const, int64 *pResult, bool checkWouldTrap);
  308. bool UnaryCalculator(IntConstType src1Const, IntConstType *pResult, IRType type);
  309. IR::Instr* GetNextArg();
  310. // Iterates argument chain
  311. template<class Fn>
  312. bool IterateArgInstrs(Fn callback)
  313. {
  314. StackSym* linkSym = this->GetSrc2()->GetStackSym();
  315. Assert(linkSym->IsSingleDef());
  316. IR::Instr *argInstr = linkSym->m_instrDef;
  317. IR::Instr* nextArg = nullptr;
  318. do
  319. {
  320. // Get the next instr before calling 'callback' since callback might modify the IR.
  321. if (argInstr->GetSrc2() && argInstr->GetSrc2()->IsSymOpnd())
  322. {
  323. linkSym = argInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  324. Assert(linkSym->IsArgSlotSym());
  325. // Due to dead code elimination in FGPeeps, it is possible for the definitions of the
  326. // the instructions that we are visiting during FG to have been freed. In this case,
  327. // the ArgSlot, even though its was a single def, will report IsSingleDef() as false
  328. // since instrDef is reset to nullptr when the def instr is freed
  329. Assert(linkSym->IsSingleDef() ||
  330. (m_func->IsInPhase(Js::Phase::FGPeepsPhase) || m_func->IsInPhase(Js::Phase::FGBuildPhase)));
  331. nextArg = linkSym->GetInstrDef();
  332. }
  333. else
  334. {
  335. nextArg = nullptr;
  336. }
  337. if(argInstr->m_opcode == Js::OpCode::ArgOut_A_InlineSpecialized)
  338. {
  339. argInstr = nextArg;
  340. // This is a fake ArgOut, skip it
  341. continue;
  342. }
  343. if (argInstr->m_opcode == Js::OpCode::StartCall)
  344. {
  345. Assert(nextArg == nullptr);
  346. break;
  347. }
  348. if(callback(argInstr))
  349. {
  350. return true;
  351. }
  352. argInstr = nextArg;
  353. } while(argInstr && !argInstr->IsInvalidInstr());
  354. // If an instr in the call sequence is invalid (0xFDFDFDFD), it must have been freed.
  355. // This is possible if some dead-code-removal/peeps code removed only part of the call sequence, while the whole sequence was dead (TH Bug 594245).
  356. // We allow this possibility here, while relying on the more involved dead-code-removal to remove the rest of the call sequence.
  357. // Inserting the opcode InvalidOpCode, with no lowering, here to safeguard against the possibility of a dead part of the call sequence not being removed. The lowerer would assert then.
  358. if (argInstr && argInstr->IsInvalidInstr())
  359. {
  360. this->InsertBefore(Instr::New(Js::OpCode::InvalidOpCode, this->m_func));
  361. }
  362. return false;
  363. }
  364. // Iterates all meta args for inlinee
  365. template<class Fn>
  366. bool IterateMetaArgs(Fn callback)
  367. {
  368. Assert(this->m_opcode == Js::OpCode::InlineeStart);
  369. Instr* currentInstr = this;
  370. while(currentInstr->m_opcode != Js::OpCode::InlineeMetaArg)
  371. {
  372. currentInstr = currentInstr->m_prev;
  373. }
  374. // backward iteration
  375. while (currentInstr->m_prev->m_opcode == Js::OpCode::InlineeMetaArg)
  376. {
  377. currentInstr = currentInstr->m_prev;
  378. }
  379. // forward iteration
  380. while(currentInstr->m_opcode == Js::OpCode::InlineeMetaArg)
  381. {
  382. // cache next instr as callback might move meta arg.
  383. IR::Instr* nextInstr = currentInstr->m_next;
  384. if(callback(currentInstr))
  385. {
  386. return true;
  387. }
  388. currentInstr = nextInstr;
  389. }
  390. return false;
  391. }
  392. IR::Instr* GetBytecodeArgOutCapture();
  393. void GenerateBytecodeArgOutCapture();
  394. bool HasByteCodeArgOutCapture();
  395. void GenerateArgOutSnapshot();
  396. IR::Instr* GetArgOutSnapshot();
  397. FixedFieldInfo* GetFixedFunction() const;
  398. uint GetArgOutCount(bool getInterpreterArgOutCount);
  399. IR::PropertySymOpnd *GetPropertySymOpnd() const;
  400. bool CallsAccessor(IR::PropertySymOpnd * methodOpnd = nullptr);
  401. bool CallsGetter();
  402. bool CallsSetter();
  403. bool UsesAllFields();
  404. void MoveArgs(bool generateByteCodeCapture = false);
  405. void Move(IR::Instr* insertInstr);
  406. private:
  407. void ClearNumber() { this->m_number = 0; }
  408. void SetNumber(uint32 number);
  409. friend class ::Func;
  410. friend class ::Lowerer;
  411. friend class IR::ByteCodeUsesInstr;
  412. void SetByteCodeOffset(uint32 number);
  413. friend class ::IRBuilder;
  414. friend class ::IRBuilderAsmJs;
  415. friend class ::FlowGraph;
  416. void SetBailOutKind_NoAssert(const IR::BailOutKind bailOutKind);
  417. public:
  418. // used only for SIMD Ld/St from typed arrays.
  419. // we keep these here to avoid increase in number of opcodes and to not use ExtendedArgs
  420. uint8 dataWidth;
  421. #ifdef BAILOUT_INJECTION
  422. uint bailOutByteCodeLocation;
  423. #endif
  424. Instr * m_next;
  425. Instr * m_prev;
  426. Func * m_func;
  427. #if DBG_DUMP
  428. char16 * globOptInstrString;
  429. #endif
  430. // These should be together to pack into a uint32
  431. Js::OpCode m_opcode;
  432. uint8 ignoreOverflowBitCount; // Number of bits after which ovf matters. Currently used for MULs.
  433. bool isFsBased : 1; // TEMP : just for BS testing
  434. bool dstIsTempNumber : 1;
  435. bool dstIsTempNumberTransferred : 1;
  436. bool dstIsTempObject : 1;
  437. bool usesStackArgumentsObject: 1;
  438. // An inlinee entry instruction initializes the InlineeCallInfo on the frame.
  439. bool isInlineeEntryInstr: 1;
  440. bool ignoreNegativeZero: 1;
  441. bool ignoreIntOverflow: 1;
  442. bool ignoreIntOverflowInRange: 1;
  443. bool forcePreOpBailOutIfNeeded: 1;
  444. bool loadedArrayHeadSegment : 1;
  445. bool loadedArrayHeadSegmentLength : 1;
  446. bool extractedUpperBoundCheckWithoutHoisting : 1;
  447. bool isCtorCall : 1;
  448. bool dstIsAlwaysConvertedToInt32 : 1;
  449. bool dstIsAlwaysConvertedToNumber : 1;
  450. bool isCallInstrProtectedByNoProfileBailout : 1;
  451. bool hasSideEffects : 1; // The instruction cannot be dead stored
  452. bool isNonFastPathFrameDisplay : 1;
  453. protected:
  454. bool isCloned:1;
  455. bool hasBailOutInfo:1;
  456. // Used for aux bail out. We are using same bailOutInfo, just different boolean to hide regular bail out.
  457. // Refer to ConvertToBailOutInstr implementation for details.
  458. bool hasAuxBailOut:1;
  459. IRKind m_kind;
  460. uint32 m_number;
  461. Opnd * m_dst;
  462. Opnd * m_src1;
  463. Opnd * m_src2;
  464. void Init(Js::OpCode opcode, IRKind kind, Func * func);
  465. IR::Instr * CloneInstr() const;
  466. };
  467. class ByteCodeUsesInstr : public Instr
  468. {
  469. private:
  470. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed;
  471. public:
  472. static ByteCodeUsesInstr * New(IR::Instr * originalBytecodeInstr);
  473. static ByteCodeUsesInstr * New(Func * containingFunction, uint32 offset);
  474. const BVSparse<JitArenaAllocator> * GetByteCodeUpwardExposedUsed() const;
  475. PropertySym * propertySymUse;
  476. // In the case of instances where you would like to add a ByteCodeUses to some sym,
  477. // which doesn't have an operand associated with it (like a block closure sym), use
  478. // this to set it without needing to pass the check for JIT-Optimized registers.
  479. void SetNonOpndSymbol(uint symId);
  480. // In cases where the operand you're working on may be changed between when you get
  481. // access to it and when you determine that you can set it in the ByteCodeUsesInstr
  482. // set method, cache the values and use this caller.
  483. void SetRemovedOpndSymbol(bool isJITOptimizedReg, uint symId);
  484. void Set(IR::Opnd * originalOperand);
  485. void Clear(uint symId);
  486. // Set the byteCodeUpwardExposedUsed bitvector on a new ByteCodeUses instruction.
  487. void SetBV(BVSparse<JitArenaAllocator>* newbv);
  488. // If possible, we want to aggregate with subsequent ByteCodeUses Instructions, so
  489. // that we can do some optimizations in other places where we can simplify args in
  490. // a compare, but still need to generate them for bailouts. Without this, we cause
  491. // problems because we end up with an instruction losing atomicity in terms of its
  492. // bytecode use and generation lifetimes.
  493. void Aggregate();
  494. };
  495. class JitProfilingInstr : public Instr
  496. {
  497. public:
  498. static JitProfilingInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func * func);
  499. static JitProfilingInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func * func);
  500. JitProfilingInstr* CloneJitProfiling() const;
  501. JitProfilingInstr* CopyJitProfiling() const;
  502. Js::ProfileId profileId;
  503. Js::ProfileId arrayProfileId;
  504. union
  505. {
  506. Js::InlineCacheIndex inlineCacheIndex;
  507. uint loopNumber;
  508. };
  509. bool isProfiledReturnCall : 1;
  510. bool isBeginSwitch : 1;
  511. bool isNewArray : 1;
  512. bool isLoopHelper: 1;
  513. };
  514. class ProfiledInstr: public Instr
  515. {
  516. protected:
  517. ProfiledInstr(bool hasBailOutInfo = false) : Instr(hasBailOutInfo) {}
  518. public:
  519. static ProfiledInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func * func);
  520. static ProfiledInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func * func);
  521. ProfiledInstr *CloneProfiledInstr() const;
  522. ProfiledInstr *CopyProfiledInstr() const;
  523. union
  524. {
  525. public:
  526. uint profileId;
  527. const Js::LdElemInfo * ldElemInfo;
  528. const Js::StElemInfo * stElemInfo;
  529. private:
  530. Js::FldInfo::TSize fldInfoData;
  531. public:
  532. Js::FldInfo &FldInfo()
  533. {
  534. return reinterpret_cast<Js::FldInfo &>(fldInfoData);
  535. }
  536. } u;
  537. static const uint InvalidProfileId = (uint)-1;
  538. };
  539. ///---------------------------------------------------------------------------
  540. ///
  541. /// class EntryInstr
  542. ///
  543. ///---------------------------------------------------------------------------
  544. class EntryInstr: public Instr
  545. {
  546. public:
  547. static EntryInstr * New(Js::OpCode opcode, Func *func);
  548. };
  549. ///---------------------------------------------------------------------------
  550. ///
  551. /// class ExitInstr
  552. ///
  553. ///---------------------------------------------------------------------------
  554. class ExitInstr: public Instr
  555. {
  556. public:
  557. static ExitInstr * New(Js::OpCode opcode, Func *func);
  558. };
  559. ///---------------------------------------------------------------------------
  560. ///
  561. /// class LabelInstr
  562. ///
  563. ///---------------------------------------------------------------------------
  564. class LabelInstr : public Instr
  565. {
  566. friend class BranchInstr;
  567. friend class IRBuilder;
  568. friend class IRBuilderAsmJs;
  569. friend class MultiBranchInstr;
  570. public:
  571. LabelInstr(JitArenaAllocator * allocator) : Instr(), labelRefs(allocator), m_isLoopTop(false), m_block(nullptr), isOpHelper(false),
  572. m_hasNonBranchRef(false), m_region(nullptr), m_loweredBasicBlock(nullptr), m_isDataLabel(false), m_isForInExit(false)
  573. #if DBG
  574. , m_noHelperAssert(false)
  575. #endif
  576. {
  577. #if DBG_DUMP
  578. m_id = 0;
  579. #endif
  580. m_pc.pc = nullptr;
  581. }
  582. static LabelInstr * New(Js::OpCode opcode, Func *func, bool isOpHelper = false);
  583. public:
  584. SListCounted<BranchInstr *> labelRefs;
  585. Lifetime ** m_regContent;
  586. BYTE m_isLoopTop : 1;
  587. BYTE isOpHelper : 1;
  588. BYTE m_hasNonBranchRef : 1;
  589. BYTE m_isDataLabel : 1;
  590. // Indicate whether the label is the target of a for in loop exit (BrOnEmpty or BrOnNotEmpty)
  591. // It is used by Inliner to track inlinee for in loop level to assign stack allocated for in
  592. // This bit has unknown validity outside of inliner
  593. BYTE m_isForInExit : 1;
  594. #if DBG
  595. BYTE m_noHelperAssert : 1;
  596. #endif
  597. unsigned int m_id;
  598. LoweredBasicBlock* m_loweredBasicBlock;
  599. private:
  600. union labelLocation
  601. {
  602. BYTE * pc; // Used by encoder and is the real pc offset
  603. uint32 offset; // Used by preEncoder and is an estimation pc offset, not accurate
  604. } m_pc;
  605. BasicBlock * m_block;
  606. Loop * m_loop;
  607. Region * m_region;
  608. public:
  609. inline void SetPC(BYTE * pc);
  610. inline BYTE * GetPC(void) const;
  611. inline void SetOffset(uint32 offset);
  612. inline void ResetOffset(uint32 offset);
  613. inline uint32 GetOffset(void) const;
  614. inline void SetBasicBlock(BasicBlock * block);
  615. inline BasicBlock * GetBasicBlock(void) const;
  616. inline void SetLoop(Loop *loop);
  617. inline Loop * GetLoop(void) const;
  618. inline void UnlinkBasicBlock(void);
  619. inline void SetRegion(Region *);
  620. inline Region * GetRegion(void) const;
  621. inline BOOL IsUnreferenced(void) const;
  622. LabelInstr * CloneLabel(BOOL fCreate);
  623. #if DBG_DUMP
  624. virtual void Dump(IRDumpFlags flags) override;
  625. #endif
  626. private:
  627. void AddLabelRef(BranchInstr *branchRef);
  628. void RemoveLabelRef(BranchInstr *branchRef);
  629. protected:
  630. void Init(Js::OpCode opcode, IRKind kind, Func *func, bool isOpHelper);
  631. };
  632. class ProfiledLabelInstr: public LabelInstr
  633. {
  634. private:
  635. ProfiledLabelInstr(JitArenaAllocator * allocator);
  636. public:
  637. static ProfiledLabelInstr * New(Js::OpCode opcode, Func *func, Js::ImplicitCallFlags flags, Js::LoopFlags loopFlags);
  638. Js::ImplicitCallFlags loopImplicitCallFlags;
  639. Js::LoopFlags loopFlags;
  640. #if DBG_DUMP
  641. uint loopNum;
  642. #endif
  643. };
  644. ///---------------------------------------------------------------------------
  645. ///
  646. /// class BranchInstr
  647. ///
  648. ///---------------------------------------------------------------------------
  649. class BranchInstr : public Instr
  650. {
  651. public:
  652. bool m_isAirlock : 1;
  653. bool m_isSwitchBr : 1;
  654. bool m_isOrphanedLeave : 1; // A Leave in a loop body in a try, most likely generated because of a return statement.
  655. bool m_areCmpRegisterFlagsUsedLater : 1; // Indicate that this branch is not the only instr using the register flags set by cmp
  656. bool m_brFinallyToEarlyExit : 1; // BrOnException from finally to early exit, can be turned into BrOnNoException on break blocks removal
  657. #if DBG
  658. bool m_isMultiBranch;
  659. bool m_isHelperToNonHelperBranch;
  660. bool m_leaveConvToBr;
  661. #endif
  662. public:
  663. static BranchInstr * New(Js::OpCode opcode, LabelInstr * branchTarget, Func *func);
  664. static BranchInstr * New(Js::OpCode opcode, LabelInstr * branchTarget, Opnd *srcOpnd, Func *func);
  665. static BranchInstr * New(Js::OpCode opcode, Opnd* destOpnd, LabelInstr * branchTarget, Opnd *srcOpnd, Func *func);
  666. static BranchInstr * New(Js::OpCode opcode, LabelInstr * branchTarget, Opnd *src1Opnd, Opnd *src2Opnd, Func *func);
  667. BranchInstr(bool hasBailOutInfo = false) : Instr(hasBailOutInfo), m_branchTarget(nullptr), m_isAirlock(false), m_isSwitchBr(false), m_isOrphanedLeave(false), m_areCmpRegisterFlagsUsedLater(false), m_brFinallyToEarlyExit(false)
  668. {
  669. #if DBG
  670. m_isMultiBranch = false;
  671. m_leaveConvToBr = false;
  672. #endif
  673. }
  674. void SetTarget(LabelInstr *labelInstr); // Only used for non-multi-branch
  675. bool ReplaceTarget(LabelInstr * oldLabelInstr, LabelInstr * newLabelInstr);
  676. void ClearTarget();
  677. LabelInstr * GetTarget() const;
  678. bool IsConditional() const;
  679. bool IsUnconditional() const;
  680. void Invert();
  681. void RetargetClonedBranch();
  682. BranchInstr * CloneBranchInstr() const;
  683. bool IsMultiBranch() const;
  684. MultiBranchInstr * AsMultiBrInstr();
  685. void SetByteCodeReg(Js::RegSlot reg) { m_byteCodeReg = reg; }
  686. Js::RegSlot GetByteCodeReg() { return m_byteCodeReg; }
  687. bool HasByteCodeReg() { return m_byteCodeReg != Js::Constants::NoRegister; }
  688. bool IsLoopTail(Func * func);
  689. public:
  690. Lifetime ** m_regContent;
  691. private:
  692. LabelInstr * m_branchTarget;
  693. Js::RegSlot m_byteCodeReg;
  694. };
  695. ///---------------------------------------------------------------------------
  696. ///
  697. /// class MultiBranchInstr
  698. ///
  699. ///---------------------------------------------------------------------------
  700. class MultiBranchInstr : public BranchInstr
  701. {
  702. private:
  703. /*
  704. The value field in the dictionary has different semantics at different points of time. Hence the 'value' field is implemented as a void *.
  705. In IR Layer:
  706. Offset is stored in the dictionary until we generate the Labels in InsertLabels().
  707. LabelInstr is stored in the dictionary, after we generate the LabelInstrs.
  708. In Encoder:
  709. After the fixup, actual machine address corresponding to the LabelInstr is stored as the 'value'.
  710. */
  711. private:
  712. typedef JITJavascriptString* TBranchKey;
  713. typedef Js::BranchDictionaryWrapper<TBranchKey> BranchDictionaryWrapper;
  714. typedef BranchDictionaryWrapper::BranchDictionary BranchDictionary;
  715. typedef BranchJumpTableWrapper BranchJumpTable;
  716. void * m_branchTargets; // can point to a dictionary or a jump table
  717. public:
  718. static MultiBranchInstr * New(Js::OpCode opcode, IR::Opnd * srcOpnd, Func *func);
  719. static MultiBranchInstr * New(Js::OpCode opcode, Func *func);
  720. enum Kind
  721. {
  722. IntJumpTable,
  723. StrDictionary,
  724. SingleCharStrJumpTable,
  725. };
  726. Kind m_kind;
  727. IntConstType m_baseCaseValue;
  728. IntConstType m_lastCaseValue;
  729. MultiBranchInstr() :
  730. m_branchTargets(nullptr)
  731. {
  732. #if DBG
  733. m_isMultiBranch = true;
  734. #endif
  735. }
  736. void AddtoDictionary(uint32 offset, TBranchKey key, void* remoteVar);
  737. void AddtoJumpTable(uint32 offset, uint32 jmpIndex);
  738. void CreateBranchTargetsAndSetDefaultTarget(int dictionarySize, Kind kind, uint defaultTargetOffset);
  739. void ChangeLabelRef(LabelInstr * oldTarget, LabelInstr * newTarget);
  740. bool ReplaceTarget(IR::LabelInstr * oldLabelInstr, IR::LabelInstr * newLabelInstr);
  741. void FixMultiBrDefaultTarget(uint32 targetOffset);
  742. void ClearTarget();
  743. BranchDictionaryWrapper * GetBranchDictionary();
  744. BranchJumpTable * GetBranchJumpTable();
  745. ///---------------------------------------------------------------------------
  746. ///
  747. /// template MapMultiBrLabels
  748. /// - Maps through the branchTargets dictionary for all the labelInstrs
  749. ///---------------------------------------------------------------------------
  750. template<class Fn>
  751. void MapMultiBrLabels(Fn fn)
  752. {
  753. MapMultiBrTargetByAddress([fn](void ** value) -> void
  754. {
  755. fn((LabelInstr*) *value);
  756. });
  757. }
  758. ///---------------------------------------------------------------------------
  759. ///
  760. /// template MapUniqueMultiBrLabels
  761. /// - Maps through the branchTargets dictionary for all unique labelInstrs
  762. ///---------------------------------------------------------------------------
  763. template<class Fn>
  764. void MapUniqueMultiBrLabels(Fn fn)
  765. {
  766. BVSparse<JitArenaAllocator> visitedTargets(m_func->m_alloc);
  767. MapMultiBrLabels([&](IR::LabelInstr *const targetLabel)
  768. {
  769. if(visitedTargets.Test(targetLabel->m_id))
  770. {
  771. return;
  772. }
  773. visitedTargets.Set(targetLabel->m_id);
  774. fn(targetLabel);
  775. });
  776. }
  777. ///--------------------------------------------------------------------------------------------
  778. ///
  779. /// template UpdateMultiBrTargetOffsets
  780. /// - Maps through the branchTargets dictionary for updating the target offset by returning the target offset.
  781. ///--------------------------------------------------------------------------------------------
  782. template<class Fn>
  783. void UpdateMultiBrTargetOffsets(Fn fn)
  784. {
  785. MapMultiBrTargetByAddress([fn](void ** value) -> void
  786. {
  787. *value = (void*)fn(::Math::PointerCastToIntegral<uint32>(*value));
  788. });
  789. }
  790. ///--------------------------------------------------------------------------------------------
  791. ///
  792. /// template UpdateMultiBrLabels
  793. /// - Maps through the branchDictionary for updating the labelInstr
  794. ///--------------------------------------------------------------------------------------------
  795. template<class Fn>
  796. void UpdateMultiBrLabels(Fn fn)
  797. {
  798. MapMultiBrTargetByAddress([fn](void ** value) -> void
  799. {
  800. IR::LabelInstr * oldLabelInstr = (LabelInstr*)*value;
  801. IR::LabelInstr * newLabelInstr = fn(oldLabelInstr);
  802. *value = (void*)newLabelInstr;
  803. });
  804. }
  805. ///-------------------------------------------------------------------------------------------------------------
  806. ///
  807. /// template MapMultiBrTargetByAddress
  808. /// - Maps through the branchDictionary accessing the address of the 'value'
  809. ///-------------------------------------------------------------------------------------------------------------
  810. template<class Fn>
  811. void MapMultiBrTargetByAddress(Fn fn)
  812. {
  813. if(!m_branchTargets)
  814. {
  815. return;
  816. }
  817. void ** defaultTarget = nullptr;
  818. switch (m_kind)
  819. {
  820. case StrDictionary:
  821. {
  822. BranchDictionary& branchDictionary = GetBranchDictionary()->dictionary;
  823. defaultTarget = &(((MultiBranchInstr::BranchDictionaryWrapper*)(m_branchTargets))->defaultTarget);
  824. branchDictionary.MapAddress([fn](TBranchKey key, void ** value)
  825. {
  826. fn(value);
  827. });
  828. break;
  829. }
  830. case IntJumpTable:
  831. case SingleCharStrJumpTable:
  832. {
  833. void ** branchJumpTable = GetBranchJumpTable()->jmpTable;
  834. defaultTarget = &(GetBranchJumpTable()->defaultTarget);
  835. for (IntConstType i = m_baseCaseValue; i <= m_lastCaseValue; i++)
  836. {
  837. fn(&branchJumpTable[i - m_baseCaseValue]);
  838. }
  839. break;
  840. }
  841. default:
  842. Assert(false);
  843. };
  844. fn(defaultTarget);
  845. }
  846. };
  847. ///---------------------------------------------------------------------------
  848. ///
  849. /// class PragmaInstr
  850. ///
  851. ///---------------------------------------------------------------------------
  852. class PragmaInstr : public Instr
  853. {
  854. public:
  855. uint32 m_statementIndex;
  856. uint32 m_offsetInBuffer; // offset in the binary code buffer
  857. public:
  858. static PragmaInstr * New(Js::OpCode opcode, uint32 index, Func *func);
  859. PragmaInstr() : Instr(), m_statementIndex(0)
  860. {
  861. }
  862. #if DBG_DUMP
  863. virtual void Dump(IRDumpFlags flags) override;
  864. #endif
  865. #if DBG_DUMP | defined(VTUNE_PROFILING)
  866. void Record(uint32 nativeBufferOffset);
  867. #endif
  868. PragmaInstr * ClonePragma();
  869. PragmaInstr * CopyPragma();
  870. };
  871. template <typename InstrType>
  872. class BailOutInstrTemplate : public InstrType
  873. {
  874. private:
  875. BailOutInstrTemplate() : InstrType(true) {}
  876. public:
  877. static BailOutInstrTemplate * New(Js::OpCode opcode, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  878. static BailOutInstrTemplate * New(Js::OpCode opcode, IR::Opnd *dst, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  879. static BailOutInstrTemplate * New(Js::OpCode opcode, IR::Opnd *dst, IR::Opnd *src1, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  880. static BailOutInstrTemplate * New(Js::OpCode opcode, IR::Opnd *dst, IR::Opnd *src1, IR::Opnd *src2, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  881. static BailOutInstrTemplate * New(Js::OpCode opcode, BailOutKind kind, BailOutInfo * bailOutInfo, Func * func);
  882. BailOutInstrTemplate * CloneBailOut() const;
  883. BailOutInfo * bailOutInfo;
  884. BailOutKind bailOutKind;
  885. // Auxiliary bailout kind.
  886. // This is kind of a decoration on top of main bail out kind and is not used for runtime bail out logic (in globopt, etc).
  887. // It's added when we convert instr to bailout instr for which there is already bailout,
  888. // and is not used/just preserved until lowerer, in the beginning of lowerer we split it out.
  889. // Currently used for debugger bailout when it is shared with main bailout.
  890. BailOutKind auxBailOutKind;
  891. };
  892. typedef BailOutInstrTemplate<Instr> BailOutInstr;
  893. typedef BailOutInstrTemplate<ProfiledInstr> ProfiledBailOutInstr;
  894. typedef BailOutInstrTemplate<BranchInstr> BranchBailOutInstr;
  895. //
  896. // FOREACH_INSTR iterators
  897. //
  898. #ifdef DBG
  899. # define INIT_PREV IR::Instr * __prevInstrCheck = nullptr
  900. # define CHECK_PREV(instr)\
  901. AssertMsg(__prevInstrCheck == nullptr || __prevInstrCheck->m_next == instr, \
  902. "Modifying instr list but not using EDITING iterator!"); \
  903. __prevInstrCheck = instr;
  904. #else
  905. # define INIT_PREV
  906. # define CHECK_PREV(instr)
  907. #endif
  908. #ifdef DBG
  909. # define INIT_NEXT IR::Instr * __nextInstrCheck = nullptr
  910. # define CHECK_NEXT(instr)\
  911. AssertMsg(__nextInstrCheck == nullptr || __nextInstrCheck->m_prev == instr, \
  912. "Modifying instr list but not using EDITING iterator!"); \
  913. __nextInstrCheck = instr;
  914. #else
  915. # define INIT_NEXT
  916. # define CHECK_NEXT(instr)
  917. #endif
  918. #define FOREACH_INSTR_IN_RANGE(instr, instrList, instrLast)\
  919. {\
  920. INIT_PREV;\
  921. IR::Instr *instr##Stop = instrLast ? ((IR::Instr*)instrLast)->m_next : nullptr; \
  922. for ( IR::Instr *instr = instrList;\
  923. instr != instr##Stop;\
  924. instr = instr->m_next)\
  925. {\
  926. CHECK_PREV(instr);
  927. #define NEXT_INSTR_IN_RANGE }}
  928. #define FOREACH_REAL_INSTR_IN_RANGE(instr, instrList, instrLast)\
  929. FOREACH_INSTR_IN_RANGE(instr, instrList, instrLast)\
  930. {\
  931. if (!instr->IsRealInstr())\
  932. {\
  933. continue;\
  934. }
  935. #define NEXT_REAL_INSTR_IN_RANGE NEXT_INSTR_IN_RANGE }
  936. #define FOREACH_INSTR_BACKWARD_IN_RANGE(instr, instrList, instrLast)\
  937. {\
  938. INIT_NEXT;\
  939. IR::Instr *instr##Stop = instrLast ? ((IR::Instr*)instrLast)->m_prev : nullptr; \
  940. for ( IR::Instr *instr = instrList;\
  941. instr != instr##Stop;\
  942. instr = instr->m_prev)\
  943. {\
  944. CHECK_NEXT(instr);
  945. #define NEXT_INSTR_BACKWARD_IN_RANGE }}
  946. #define FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, instrLast)\
  947. {\
  948. IR::Instr * instrNext;\
  949. IR::Instr *instr##Stop = instrLast ? ((IR::Instr*)instrLast)->m_next : nullptr; \
  950. for ( IR::Instr *instr = instrList;\
  951. instr != instr##Stop;\
  952. instr = instrNext)\
  953. {\
  954. instrNext = instr->m_next;
  955. #define NEXT_INSTR_EDITING_IN_RANGE }}
  956. #define FOREACH_REAL_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, instrLast)\
  957. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, instrLast)\
  958. {\
  959. if (!instr->IsRealInstr())\
  960. {\
  961. continue;\
  962. }
  963. #define NEXT_REAL_INSTR_EDITING_IN_RANGE NEXT_INSTR_EDITING_IN_RANGE }
  964. #define FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrList, instrLast)\
  965. {\
  966. IR::Instr * instrPrev;\
  967. IR::Instr *instr##Stop = instrLast ? ((IR::Instr*)instrLast)->m_prev : nullptr; \
  968. for ( IR::Instr *instr = instrList;\
  969. instr != instr##Stop;\
  970. instr = instrPrev)\
  971. {\
  972. instrPrev = instr->m_prev;
  973. #define NEXT_INSTR_BACKWARD_EDITING_IN_RANGE }}
  974. #define FOREACH_INSTR_EDITING(instr, instrNext, instrList)\
  975. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, nullptr)
  976. #define NEXT_INSTR_EDITING NEXT_INSTR_EDITING_IN_RANGE
  977. #define FOREACH_INSTR(instr, instrList)\
  978. FOREACH_INSTR_IN_RANGE(instr, instrList, nullptr)
  979. #define NEXT_INSTR NEXT_INSTR_IN_RANGE
  980. #define FOREACH_REAL_INSTR(instr, instrList)\
  981. FOREACH_REAL_INSTR_IN_RANGE(instr, instrList, nullptr)
  982. #define NEXT_REAL_INSTR NEXT_REAL_INSTR_IN_RANGE
  983. #define FOREACH_INSTR_BACKWARD(instr, instrList)\
  984. FOREACH_INSTR_BACKWARD_IN_RANGE(instr, instrList, nullptr)
  985. #define NEXT_INSTR_BACKWARD NEXT_INSTR_BACKWARD_IN_RANGE
  986. #define FOREACH_INSTR_EDITING(instr, instrNext, instrList)\
  987. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, nullptr)
  988. #define NEXT_INSTR_EDITING NEXT_INSTR_EDITING_IN_RANGE
  989. #define FOREACH_REAL_INSTR_EDITING(instr, instrNext, instrList)\
  990. FOREACH_REAL_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, nullptr)
  991. #define NEXT_REAL_INSTR_EDITING NEXT_REAL_INSTR_EDITING_IN_RANGE
  992. #define FOREACH_INSTR_BACKWARD_EDITING(instr, instrPrev, instrList)\
  993. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrList, nullptr)
  994. #define NEXT_INSTR_BACKWARD_EDITING NEXT_INSTR_BACKWARD_EDITING_IN_RANGE
  995. #define FOREACH_INSTR_IN_FUNC(instr, func)\
  996. FOREACH_INSTR(instr, func->m_headInstr)
  997. #define NEXT_INSTR_IN_FUNC NEXT_INSTR
  998. #define FOREACH_REAL_INSTR_IN_FUNC(instr, func)\
  999. FOREACH_REAL_INSTR(instr, func->m_headInstr)
  1000. #define NEXT_REAL_INSTR_IN_FUNC NEXT_REAL_INSTR
  1001. #define FOREACH_INSTR_IN_FUNC_BACKWARD(instr, func)\
  1002. FOREACH_INSTR_BACKWARD(instr, func->m_tailInstr)
  1003. #define NEXT_INSTR_IN_FUNC_BACKWARD NEXT_INSTR_BACKWARD
  1004. #define FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, func)\
  1005. FOREACH_INSTR_EDITING(instr, instrNext, func->m_headInstr)
  1006. #define NEXT_INSTR_IN_FUNC_EDITING NEXT_INSTR_EDITING
  1007. #define FOREACH_REAL_INSTR_IN_FUNC_EDITING(instr, instrNext, func)\
  1008. FOREACH_REAL_INSTR_EDITING(instr, instrNext, func->m_headInstr)
  1009. #define NEXT_REAL_INSTR_IN_FUNC_EDITING NEXT_REAL_INSTR_EDITING
  1010. #define FOREACH_INSTR_IN_FUNC_BACKWARD_EDITING(instr, instrPrev, func)\
  1011. FOREACH_INSTR_BACKWARD_EDITING(instr, instrPrev, func->m_tailInstr)
  1012. #define NEXT_INSTR_IN_FUNC_BACKWARD_EDITING NEXT_INSTR_BACKWARD_EDITING
  1013. #define FOREACH_INSTR_IN_BLOCK(instr, block)\
  1014. FOREACH_INSTR_IN_RANGE(instr, block->GetFirstInstr(), block->GetLastInstr())
  1015. #define NEXT_INSTR_IN_BLOCK\
  1016. NEXT_INSTR_IN_RANGE
  1017. #define FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)\
  1018. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, block->GetFirstInstr(), block->GetLastInstr())
  1019. #define NEXT_INSTR_IN_BLOCK_EDITING \
  1020. NEXT_INSTR_EDITING_IN_RANGE
  1021. #define FOREACH_INSTR_BACKWARD_IN_BLOCK(instr, block)\
  1022. FOREACH_INSTR_BACKWARD_IN_RANGE(instr, block->GetLastInstr(), block->GetFirstInstr())
  1023. #define NEXT_INSTR_BACKWARD_IN_BLOCK\
  1024. NEXT_INSTR_BACKWARD_IN_RANGE
  1025. #define FOREACH_INSTR_BACKWARD_IN_BLOCK_EDITING(instr, instrPrev, block)\
  1026. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, block->GetLastInstr(), block->GetFirstInstr())
  1027. #define NEXT_INSTR_BACKWARD_IN_BLOCK_EDITING\
  1028. NEXT_INSTR_BACKWARD_EDITING_IN_RANGE
  1029. } // namespace IR
  1030. typedef JsUtil::BaseDictionary<IR::Instr*, IR::Instr*, JitArenaAllocator, PrimeSizePolicy> InstrMap;