IR.h 49 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. #include "JavascriptNativeOperators.h"
  7. class Func;
  8. class BasicBlock;
  9. class Region;
  10. class Lowerer;
  11. class IRBuilder;
  12. class IRBuilderAsmJs;
  13. class FlowGraph;
  14. class GlobOpt;
  15. class BailOutInfo;
  16. class GeneratorBailInInfo;
  17. class SCCLiveness;
  18. struct LazyBailOutRecord;
  19. typedef JsUtil::KeyValuePair<StackSym *, BailoutConstantValue> ConstantStackSymValue;
  20. typedef JsUtil::KeyValuePair<StackSym *, StackSym*> CopyPropSyms;
  21. struct CapturedValues
  22. {
  23. SListBase<ConstantStackSymValue> constantValues; // Captured constant values during glob opt
  24. SListBase<CopyPropSyms> copyPropSyms; // Captured copy prop values during glob opt
  25. BVSparse<JitArenaAllocator> * argObjSyms; // Captured arg object symbols during glob opt
  26. uint refCount;
  27. CapturedValues() : argObjSyms(nullptr), refCount(0) {}
  28. ~CapturedValues()
  29. {
  30. // Reset SListBase to be exception safe. Captured values are from GlobOpt->func->alloc
  31. // in normal case the 2 SListBase are empty so no Clear needed, also no need to Clear in exception case
  32. constantValues.Reset();
  33. copyPropSyms.Reset();
  34. argObjSyms = nullptr;
  35. Assert(refCount == 0);
  36. }
  37. uint DecrementRefCount()
  38. {
  39. Assert(refCount != 0);
  40. return --refCount;
  41. }
  42. void IncrementRefCount()
  43. {
  44. Assert(refCount > 0);
  45. refCount++;
  46. }
  47. void CopyTo(JitArenaAllocator *allocator, CapturedValues *other) const
  48. {
  49. Assert(other != nullptr);
  50. this->constantValues.CopyTo(allocator, other->constantValues);
  51. this->copyPropSyms.CopyTo(allocator, other->copyPropSyms);
  52. if (other->argObjSyms != nullptr)
  53. {
  54. other->argObjSyms->ClearAll();
  55. JitAdelete(allocator, other->argObjSyms);
  56. }
  57. if (this->argObjSyms != nullptr)
  58. {
  59. other->argObjSyms = this->argObjSyms->CopyNew(allocator);
  60. }
  61. else
  62. {
  63. other->argObjSyms = nullptr;
  64. }
  65. // Ignore refCount because other objects might still reference it
  66. }
  67. };
  68. class LoweredBasicBlock;
  69. class BranchJumpTableWrapper
  70. {
  71. public:
  72. BranchJumpTableWrapper(uint tableSize) : jmpTable(nullptr), defaultTarget(nullptr), labelInstr(nullptr), tableSize(tableSize)
  73. {
  74. }
  75. void** jmpTable;
  76. void* defaultTarget;
  77. IR::LabelInstr * labelInstr;
  78. int tableSize;
  79. static BranchJumpTableWrapper* New(JitArenaAllocator * allocator, uint tableSize)
  80. {
  81. BranchJumpTableWrapper * branchTargets = JitAnew(allocator, BranchJumpTableWrapper, tableSize);
  82. //Create the jump table for integers
  83. void* * jmpTable = JitAnewArrayZ(allocator, void*, tableSize);
  84. branchTargets->jmpTable = jmpTable;
  85. return branchTargets;
  86. }
  87. };
  88. namespace IR {
  89. class EntryInstr;
  90. class ExitInstr;
  91. class BranchInstr;
  92. class LabelInstr;
  93. class JitProfilingInstr;
  94. class ProfiledInstr;
  95. class ProfiledLabelInstr;
  96. class MultiBranchInstr;
  97. class PragmaInstr;
  98. class ByteCodeUsesInstr;
  99. class GeneratorBailInInstr;
  100. class Opnd;
  101. class RegOpnd;
  102. class IndirOpnd;
  103. class SymOpnd;
  104. class MemRefOpnd;
  105. class PropertySymOpnd;
  106. enum AddrOpndKind : BYTE;
  107. enum IRKind : BYTE {
  108. InstrKindInvalid,
  109. InstrKindInstr,
  110. InstrKindBranch,
  111. InstrKindLabel,
  112. InstrKindProfiled,
  113. InstrKindProfiledLabel,
  114. InstrKindEntry,
  115. InstrKindExit,
  116. InstrKindPragma,
  117. InstrKindByteCodeUses,
  118. InstrKindJitProfiling,
  119. };
  120. const int32 InvalidInstrLayout = -1;
  121. ///---------------------------------------------------------------------------
  122. ///
  123. /// class Instr
  124. /// BranchInstr
  125. /// MultiBranchInstr
  126. /// LabelInstr
  127. /// JitProfilingInstr
  128. /// ProfiledInstr
  129. /// EntryInstr
  130. /// ExitInstr
  131. /// PragmaInstr
  132. /// BailoutInstr
  133. /// ByteCodeUsesInstr
  134. /// GeneratorBailInInstr
  135. ///---------------------------------------------------------------------------
  136. class Instr
  137. {
  138. protected:
  139. Instr(bool hasBailOutInfo = false) :
  140. m_next(nullptr),
  141. m_prev(nullptr),
  142. m_opcode(Js::OpCode::MaxByteSizedOpcodes),
  143. m_func(nullptr),
  144. m_number(Js::Constants::NoByteCodeOffset),
  145. m_dst(nullptr),
  146. m_src1(nullptr),
  147. m_src2(nullptr),
  148. #if DBG_DUMP
  149. globOptInstrString(nullptr),
  150. #endif
  151. dstIsTempNumber(false),
  152. dstIsTempNumberTransferred(false),
  153. dstIsTempObject(false),
  154. isCloned(false),
  155. hasBailOutInfo(hasBailOutInfo),
  156. hasAuxBailOut(false),
  157. forcePreOpBailOutIfNeeded(false),
  158. usesStackArgumentsObject(false),
  159. isInlineeEntryInstr(false),
  160. ignoreNegativeZero(false),
  161. dstIsAlwaysConvertedToInt32(false),
  162. dstIsAlwaysConvertedToNumber(false),
  163. ignoreIntOverflow(false),
  164. ignoreIntOverflowInRange(false),
  165. loadedArrayHeadSegment(false),
  166. loadedArrayHeadSegmentLength(false),
  167. extractedUpperBoundCheckWithoutHoisting(false),
  168. ignoreOverflowBitCount(32),
  169. isCtorCall(false),
  170. isCallInstrProtectedByNoProfileBailout(false),
  171. hasSideEffects(false),
  172. isNonFastPathFrameDisplay(false),
  173. isSafeToSpeculate(false)
  174. #if DBG
  175. , highlight(0)
  176. , m_noLazyHelperAssert(false)
  177. #endif
  178. {
  179. }
  180. public:
  181. static Instr * New(Js::OpCode opcode, Func *func);
  182. static Instr * New(Js::OpCode opcode, Func *func, IR::Instr * bytecodeOffsetInstr);
  183. static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Func *func);
  184. static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func *func);
  185. static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func *func);
  186. static Instr* NewConstantLoad(IR::RegOpnd* dstOpnd, intptr_t varConst, ValueType type, Func *func, Js::Var varLocal = nullptr);
  187. public:
  188. bool IsPlainInstr() const;
  189. bool IsEntryInstr() const;
  190. EntryInstr * AsEntryInstr();
  191. bool IsExitInstr() const;
  192. ExitInstr * AsExitInstr();
  193. bool IsBranchInstr() const;
  194. BranchInstr * AsBranchInstr();
  195. bool IsLabelInstr() const;
  196. LabelInstr * AsLabelInstr();
  197. bool IsGeneratorBailInInstr() const;
  198. GeneratorBailInInstr * AsGeneratorBailInInstr();
  199. bool IsJitProfilingInstr() const;
  200. JitProfilingInstr * AsJitProfilingInstr();
  201. bool IsProfiledInstr() const;
  202. ProfiledInstr * AsProfiledInstr();
  203. bool IsProfiledLabelInstr() const;
  204. ProfiledLabelInstr * AsProfiledLabelInstr();
  205. bool IsPragmaInstr() const;
  206. PragmaInstr * AsPragmaInstr();
  207. bool IsByteCodeUsesInstr() const;
  208. ByteCodeUsesInstr * AsByteCodeUsesInstr();
  209. bool IsLowered() const;
  210. bool IsRealInstr() const;
  211. bool IsInlined() const;
  212. bool IsNewScObjectInstr() const;
  213. bool IsInvalidInstr() const;
  214. Instr* GetInvalidInstr();
  215. bool IsLinked() const { return this->m_prev != nullptr || this->m_next != nullptr; }
  216. bool StartsBasicBlock() const;
  217. bool EndsBasicBlock() const;
  218. bool HasFallThrough() const;
  219. bool DoStackArgsOpt() const;
  220. bool HasAnyLoadHeapArgsOpCode();
  221. bool IsEqual(IR::Instr *instr) const;
  222. bool IsCloned() const { return isCloned; }
  223. void SetIsCloned(bool isCloned) { this->isCloned = isCloned; }
  224. bool IsSafeToSpeculate() const { return isSafeToSpeculate; }
  225. void SetIsSafeToSpeculate(bool isSafe) { this->isSafeToSpeculate = isSafe; }
  226. bool HasBailOutInfo() const { return hasBailOutInfo; }
  227. bool HasAuxBailOut() const { return hasAuxBailOut; }
  228. bool HasTypeCheckBailOut() const;
  229. bool HasEquivalentTypeCheckBailOut() const;
  230. bool HasBailOnNoProfile() const;
  231. void ClearBailOutInfo();
  232. bool IsDstNotAlwaysConvertedToInt32() const;
  233. bool IsDstNotAlwaysConvertedToNumber() const;
  234. bool ShouldCheckForNegativeZero() const;
  235. bool ShouldCheckForIntOverflow() const;
  236. bool ShouldCheckFor32BitOverflow() const;
  237. bool ShouldCheckForNon32BitOverflow() const;
  238. static bool OpndHasAnyImplicitCalls(IR::Opnd* opnd, bool isSrc);
  239. bool HasAnyImplicitCalls() const;
  240. bool HasAnySideEffects() const;
  241. bool AreAllOpndInt64() const;
  242. IRKind GetKind() const;
  243. Opnd * GetDst() const;
  244. Opnd * SetDst(Opnd * newDst);
  245. Opnd * SetFakeDst(Opnd * newDst);
  246. Opnd * UnlinkDst();
  247. void FreeDst();
  248. Opnd * Replace(Opnd *oldOpnd, Opnd *newOpnd);
  249. Opnd * DeepReplace(Opnd *const oldOpnd, Opnd *const newOpnd);
  250. Opnd * ReplaceDst(Opnd * newDst);
  251. Instr * SinkDst(Js::OpCode assignOpcode, RegNum regNum = RegNOREG, IR::Instr *insertAfterInstr = nullptr);
  252. Instr * SinkDst(Js::OpCode assignOpcode, StackSym * stackSym, RegNum regNum = RegNOREG, IR::Instr *insertAfterInstr = nullptr);
  253. Instr * SinkInstrBefore(IR::Instr * instrTarget);
  254. Opnd * GetSrc1() const;
  255. Opnd * SetSrc1(Opnd * newSrc);
  256. Opnd * UnlinkSrc1();
  257. void FreeSrc1();
  258. Opnd * ReplaceSrc1(Opnd * newSrc);
  259. Instr * HoistSrc1(Js::OpCode assignOpcode, RegNum regNum = RegNOREG, StackSym *newSym = nullptr);
  260. Opnd * GetSrc2() const;
  261. Opnd * SetSrc2(Opnd * newSrc);
  262. Opnd * UnlinkSrc2();
  263. void FreeSrc2();
  264. Opnd * ReplaceSrc2(Opnd * newSrc);
  265. Instr * HoistSrc2(Js::OpCode assignOpcode, RegNum regNum = RegNOREG, StackSym *newSym = nullptr);
  266. IndirOpnd * HoistMemRefAddress(MemRefOpnd *const memRefOpnd, const Js::OpCode loadOpCode);
  267. Opnd * UnlinkSrc(Opnd *src);
  268. Opnd * ReplaceSrc(Opnd *oldSrc, Opnd * newSrc);
  269. void InsertBefore(Instr *instr);
  270. void InsertAfter(Instr *instr);
  271. void InsertRangeBefore(Instr *startInstr, Instr *endInstr);
  272. void InsertMultipleBefore(Instr *endInstr);
  273. void InsertRangeAfter(Instr *startInstr, Instr *endInstr);
  274. void InsertMultipleAfter(Instr *endInstr);
  275. void Unlink();
  276. void Free();
  277. void Remove();
  278. void SwapOpnds();
  279. void TransferTo(Instr * instr);
  280. void TransferDstAttributesTo(Instr * instr);
  281. IR::Instr * Copy(bool copyDst = true);
  282. IR::Instr * CopyWithoutDst();
  283. IR::Instr * Clone();
  284. IR::Instr * ConvertToBailOutInstr(IR::Instr *bailOutTarget, BailOutKind kind, uint32 bailOutOffset = Js::Constants::NoByteCodeOffset);
  285. IR::Instr * ConvertToBailOutInstr(BailOutInfo *bailOutInfo, BailOutKind kind, bool useAuxBailout = false);
  286. IR::Instr * ConvertToBailOutInstrWithBailOutInfoCopy(BailOutInfo *bailOutInfo, IR::BailOutKind bailOutKind);
  287. #if DBG
  288. IR::LabelInstr *GetNextNonEmptyLabel() const;
  289. #endif
  290. IR::Instr * GetNextRealInstr() const;
  291. IR::Instr * GetNextRealInstrOrLabel() const;
  292. IR::Instr * GetNextBranchOrLabel() const;
  293. IR::Instr * GetNextByteCodeInstr() const;
  294. IR::Instr * GetPrevRealInstr() const;
  295. IR::Instr * GetPrevRealInstrOrLabel() const;
  296. IR::LabelInstr *GetPrevLabelInstr() const;
  297. IR::Instr * GetBlockStartInstr() const;
  298. IR::Instr * GetInsertBeforeByteCodeUsesInstr();
  299. bool IsByteCodeUsesInstrFor(IR::Instr * instr) const;
  300. IR::LabelInstr *GetOrCreateContinueLabel(const bool isHelper = false);
  301. static bool HasSymUseSrc(StackSym *sym, IR::Opnd*);
  302. static bool HasSymUseDst(StackSym *sym, IR::Opnd*);
  303. bool HasSymUse(StackSym *sym);
  304. static bool HasSymUseInRange(StackSym *sym, Instr *instrBegin, Instr *instrEnd);
  305. RegOpnd * FindRegDef(StackSym *sym);
  306. static Instr* FindSingleDefInstr(Js::OpCode opCode, Opnd* src);
  307. bool CanAggregateByteCodeUsesAcrossInstr(IR::Instr * instr);
  308. bool DontHoistBailOnNoProfileAboveInGeneratorFunction() const;
  309. // LazyBailOut
  310. bool AreAllOpndsTypeSpecialized() const;
  311. bool IsStFldVariant() const;
  312. bool IsStElemVariant() const;
  313. bool CanChangeFieldValueWithoutImplicitCall() const;
  314. void ClearLazyBailOut();
  315. bool OnlyHasLazyBailOut() const;
  316. bool HasLazyBailOut() const;
  317. bool HasPreOpBailOut() const;
  318. bool HasPostOpBailOut() const;
  319. #if DBG
  320. bool m_noLazyHelperAssert;
  321. #endif
  322. BranchInstr * ChangeCmCCToBranchInstr(LabelInstr *targetInstr);
  323. static void MoveRangeAfter(Instr * instrStart, Instr * instrLast, Instr * instrAfter);
  324. static IR::Instr * CloneRange(Instr * instrStart, Instr * instrLast, Instr * instrInsert, Lowerer *lowerer, JitArenaAllocator *alloc, bool (*fMapTest)(IR::Instr*), bool clonedInstrGetOrigArgSlot);
  325. bool CanHaveArgOutChain() const;
  326. bool HasEmptyArgOutChain(IR::Instr** startCallInstrOut = nullptr);
  327. bool HasFixedFunctionAddressTarget() const;
  328. // Return whether the instruction transfer value from the src to the dst for copy prop
  329. bool TransfersSrcValue();
  330. #if ENABLE_DEBUG_CONFIG_OPTIONS
  331. const char * GetBailOutKindName() const;
  332. #endif
  333. #if DBG_DUMP
  334. virtual void Dump(IRDumpFlags flags);
  335. void Dump();
  336. void DumpSimple();
  337. char16* DumpString();
  338. void DumpGlobOptInstrString();
  339. void Dump(int window);
  340. void DumpRange(Instr *instrEnd);
  341. void DumpByteCodeOffset();
  342. #endif
  343. #if ENABLE_DEBUG_CONFIG_OPTIONS
  344. void DumpTestTrace();
  345. void DumpFieldCopyPropTestTrace(bool inLandingPad);
  346. #endif
  347. uint32 GetByteCodeOffset() const;
  348. uint32 GetNumber() const;
  349. void SetByteCodeOffset(IR::Instr * instr);
  350. void ClearByteCodeOffset();
  351. BailOutInfo * GetBailOutInfo() const;
  352. BailOutInfo * UnlinkBailOutInfo();
  353. void ReplaceBailOutInfo(BailOutInfo *newBailOutInfo);
  354. IR::Instr * ShareBailOut();
  355. BailOutKind GetBailOutKind() const;
  356. BailOutKind GetBailOutKindNoBits() const;
  357. BailOutKind GetAuxBailOutKind() const;
  358. void SetBailOutKind(const IR::BailOutKind bailOutKind);
  359. void SetAuxBailOutKind(const IR::BailOutKind bailOutKind);
  360. void PromoteAuxBailOut();
  361. void ResetAuxBailOut();
  362. void UnlinkStartCallFromBailOutInfo(IR::Instr *endInstr) const;
  363. void ChangeEquivalentToMonoTypeCheckBailOut();
  364. intptr_t TryOptimizeInstrWithFixedDataProperty(IR::Instr ** pInstr, GlobOpt* globopt);
  365. Opnd * FindCallArgumentOpnd(const Js::ArgSlot argSlot, IR::Instr * *const ownerInstrRef = nullptr);
  366. void CopyNumber(IR::Instr *instr) { this->SetNumber(instr->GetNumber()); }
  367. bool FetchOperands(_Out_writes_(argsOpndLength) IR::Opnd **argsOpnd, uint argsOpndLength);
  368. template <typename Fn>
  369. bool ForEachCallDirectArgOutInstrBackward(Fn fn, uint argsOpndLength) const;
  370. bool IsCmCC_A();
  371. bool IsCmCC_R8();
  372. bool IsCmCC_I4();
  373. bool IsNeq();
  374. bool BinaryCalculator(IntConstType src1Const, IntConstType src2Const, IntConstType *pResult, IRType type);
  375. template <typename T>
  376. bool BinaryCalculatorT(T src1Const, T src2Const, int64 *pResult, bool checkWouldTrap);
  377. bool UnaryCalculator(IntConstType src1Const, IntConstType *pResult, IRType type);
  378. IR::Instr* GetNextArg();
  379. #if DBG
  380. bool ShouldEmitIntRangeCheck();
  381. #endif
  382. // Iterates argument chain
  383. template<class Fn>
  384. bool IterateArgInstrs(Fn callback)
  385. {
  386. StackSym* linkSym = this->GetSrc2()->GetStackSym();
  387. Assert(linkSym->IsSingleDef());
  388. IR::Instr *argInstr = linkSym->m_instrDef;
  389. IR::Instr* nextArg = nullptr;
  390. do
  391. {
  392. // Get the next instr before calling 'callback' since callback might modify the IR.
  393. if (argInstr->GetSrc2() && argInstr->GetSrc2()->IsSymOpnd())
  394. {
  395. linkSym = argInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  396. Assert(linkSym->IsArgSlotSym());
  397. // Due to dead code elimination in FGPeeps, it is possible for the definitions of the
  398. // the instructions that we are visiting during FG to have been freed. In this case,
  399. // the ArgSlot, even though its was a single def, will report IsSingleDef() as false
  400. // since instrDef is reset to nullptr when the def instr is freed
  401. Assert(linkSym->IsSingleDef() ||
  402. (m_func->IsInPhase(Js::Phase::FGPeepsPhase) || m_func->IsInPhase(Js::Phase::FGBuildPhase)));
  403. nextArg = linkSym->GetInstrDef();
  404. }
  405. else
  406. {
  407. nextArg = nullptr;
  408. }
  409. if(argInstr->m_opcode == Js::OpCode::ArgOut_A_InlineSpecialized)
  410. {
  411. argInstr = nextArg;
  412. // This is a fake ArgOut, skip it
  413. continue;
  414. }
  415. if (argInstr->m_opcode == Js::OpCode::StartCall)
  416. {
  417. Assert(nextArg == nullptr);
  418. break;
  419. }
  420. if(callback(argInstr))
  421. {
  422. return true;
  423. }
  424. argInstr = nextArg;
  425. } while(argInstr && !argInstr->IsInvalidInstr());
  426. // If an instr in the call sequence is invalid (0xFDFDFDFD), it must have been freed.
  427. // This is possible if some dead-code-removal/peeps code removed only part of the call sequence, while the whole sequence was dead (TH Bug 594245).
  428. // We allow this possibility here, while relying on the more involved dead-code-removal to remove the rest of the call sequence.
  429. // Inserting the opcode InvalidOpCode, with no lowering, here to safeguard against the possibility of a dead part of the call sequence not being removed. The lowerer would assert then.
  430. if (argInstr && argInstr->IsInvalidInstr())
  431. {
  432. this->InsertBefore(Instr::New(Js::OpCode::InvalidOpCode, this->m_func));
  433. }
  434. return false;
  435. }
  436. // Iterates all meta args for inlinee
  437. template<class Fn>
  438. bool IterateMetaArgs(Fn callback)
  439. {
  440. Assert(this->m_opcode == Js::OpCode::InlineeStart);
  441. Instr* currentInstr = this;
  442. while(currentInstr->m_opcode != Js::OpCode::InlineeMetaArg)
  443. {
  444. currentInstr = currentInstr->m_prev;
  445. }
  446. // backward iteration
  447. while (currentInstr->m_prev->m_opcode == Js::OpCode::InlineeMetaArg)
  448. {
  449. currentInstr = currentInstr->m_prev;
  450. }
  451. // forward iteration
  452. while(currentInstr->m_opcode == Js::OpCode::InlineeMetaArg)
  453. {
  454. // cache next instr as callback might move meta arg.
  455. IR::Instr* nextInstr = currentInstr->m_next;
  456. if(callback(currentInstr))
  457. {
  458. return true;
  459. }
  460. currentInstr = nextInstr;
  461. }
  462. return false;
  463. }
  464. IR::Instr* GetBytecodeArgOutCapture();
  465. void GenerateBytecodeArgOutCapture();
  466. bool HasByteCodeArgOutCapture();
  467. void GenerateArgOutSnapshot();
  468. IR::Instr* GetArgOutSnapshot();
  469. FixedFieldInfo* GetFixedFunction() const;
  470. uint GetArgOutCount(bool getInterpreterArgOutCount);
  471. uint GetArgOutSize(bool getInterpreterArgOutCount);
  472. uint GetAsmJsArgOutSize();
  473. IR::PropertySymOpnd *GetPropertySymOpnd() const;
  474. bool CallsAccessor(IR::PropertySymOpnd * methodOpnd = nullptr);
  475. bool CallsGetter();
  476. bool CallsSetter();
  477. bool UsesAllFields();
  478. void MoveArgs(bool generateByteCodeCapture = false);
  479. void Move(IR::Instr* insertInstr);
  480. private:
  481. int GetOpndCount() const;
  482. void ClearNumber() { this->m_number = 0; }
  483. void SetNumber(uint32 number);
  484. friend class ::Func;
  485. friend class ::Lowerer;
  486. friend class IR::ByteCodeUsesInstr;
  487. friend class ::SCCLiveness;
  488. void SetByteCodeOffset(uint32 number);
  489. friend class ::IRBuilder;
  490. friend class ::IRBuilderAsmJs;
  491. friend class ::FlowGraph;
  492. void SetBailOutKind_NoAssert(const IR::BailOutKind bailOutKind);
  493. public:
  494. #ifdef BAILOUT_INJECTION
  495. uint bailOutByteCodeLocation;
  496. #endif
  497. Instr * m_next;
  498. Instr * m_prev;
  499. Func * m_func;
  500. #if DBG_DUMP
  501. char16 * globOptInstrString;
  502. #endif
  503. // These should be together to pack into a uint32
  504. Js::OpCode m_opcode;
  505. uint8 ignoreOverflowBitCount; // Number of bits after which ovf matters. Currently used for MULs.
  506. // used only for SIMD Ld/St from typed arrays.
  507. // we keep these here to avoid increase in number of opcodes and to not use ExtendedArgs
  508. uint8 dataWidth;
  509. #if DBG
  510. WORD highlight;
  511. #endif
  512. bool isFsBased : 1; // TEMP : just for BS testing
  513. bool dstIsTempNumber : 1;
  514. bool dstIsTempNumberTransferred : 1;
  515. bool dstIsTempObject : 1;
  516. bool usesStackArgumentsObject: 1;
  517. // An inlinee entry instruction initializes the InlineeCallInfo on the frame.
  518. bool isInlineeEntryInstr: 1;
  519. bool ignoreNegativeZero: 1;
  520. bool ignoreIntOverflow: 1;
  521. bool ignoreIntOverflowInRange: 1;
  522. bool forcePreOpBailOutIfNeeded: 1;
  523. bool loadedArrayHeadSegment : 1;
  524. bool loadedArrayHeadSegmentLength : 1;
  525. bool extractedUpperBoundCheckWithoutHoisting : 1;
  526. bool isCtorCall : 1;
  527. bool dstIsAlwaysConvertedToInt32 : 1;
  528. bool dstIsAlwaysConvertedToNumber : 1;
  529. bool isCallInstrProtectedByNoProfileBailout : 1;
  530. bool hasSideEffects : 1; // The instruction cannot be dead stored
  531. bool isNonFastPathFrameDisplay : 1;
  532. protected:
  533. bool isCloned : 1;
  534. bool hasBailOutInfo : 1;
  535. bool isSafeToSpeculate : 1;
  536. // Used for aux bail out. We are using same bailOutInfo, just different boolean to hide regular bail out.
  537. // Refer to ConvertToBailOutInstr implementation for details.
  538. bool hasAuxBailOut:1;
  539. IRKind m_kind;
  540. uint32 m_number;
  541. Opnd * m_dst;
  542. Opnd * m_src1;
  543. Opnd * m_src2;
  544. void Init(Js::OpCode opcode, IRKind kind, Func * func);
  545. IR::Instr * CloneInstr() const;
  546. };
  547. class ByteCodeUsesInstr : public Instr
  548. {
  549. private:
  550. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed;
  551. public:
  552. static ByteCodeUsesInstr * New(IR::Instr * originalBytecodeInstr);
  553. static ByteCodeUsesInstr * New(Func * containingFunction, uint32 offset);
  554. const BVSparse<JitArenaAllocator> * GetByteCodeUpwardExposedUsed() const;
  555. PropertySym * propertySymUse;
  556. // In the case of instances where you would like to add a ByteCodeUses to some sym,
  557. // which doesn't have an operand associated with it (like a block closure sym), use
  558. // this to set it without needing to pass the check for JIT-Optimized registers.
  559. void SetNonOpndSymbol(uint symId);
  560. // In cases where the operand you're working on may be changed between when you get
  561. // access to it and when you determine that you can set it in the ByteCodeUsesInstr
  562. // set method, cache the values and use this caller.
  563. void SetRemovedOpndSymbol(bool isJITOptimizedReg, uint symId);
  564. void Set(IR::Opnd * originalOperand);
  565. void Clear(uint symId);
  566. // Set the byteCodeUpwardExposedUsed bitvector on a new ByteCodeUses instruction.
  567. void SetBV(BVSparse<JitArenaAllocator>* newbv);
  568. // If possible, we want to aggregate with subsequent ByteCodeUses Instructions, so
  569. // that we can do some optimizations in other places where we can simplify args in
  570. // a compare, but still need to generate them for bailouts. Without this, we cause
  571. // problems because we end up with an instruction losing atomicity in terms of its
  572. // bytecode use and generation lifetimes.
  573. void AggregateFollowingByteCodeUses();
  574. private:
  575. void Aggregate(ByteCodeUsesInstr * byteCodeUsesInstr);
  576. };
  577. class JitProfilingInstr : public Instr
  578. {
  579. public:
  580. static JitProfilingInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func * func);
  581. static JitProfilingInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func * func);
  582. JitProfilingInstr* CloneJitProfiling() const;
  583. JitProfilingInstr* CopyJitProfiling() const;
  584. Js::ProfileId profileId;
  585. Js::ProfileId arrayProfileId;
  586. union
  587. {
  588. Js::InlineCacheIndex inlineCacheIndex;
  589. uint loopNumber;
  590. };
  591. bool isProfiledReturnCall : 1;
  592. bool isBeginSwitch : 1;
  593. bool isNewArray : 1;
  594. bool isLoopHelper: 1;
  595. };
  596. class ProfiledInstr: public Instr
  597. {
  598. protected:
  599. ProfiledInstr(bool hasBailOutInfo = false) : Instr(hasBailOutInfo) {}
  600. public:
  601. static ProfiledInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func * func);
  602. static ProfiledInstr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func * func);
  603. ProfiledInstr *CloneProfiledInstr() const;
  604. ProfiledInstr *CopyProfiledInstr() const;
  605. union
  606. {
  607. public:
  608. uint profileId;
  609. const Js::LdElemInfo * ldElemInfo;
  610. const Js::StElemInfo * stElemInfo;
  611. private:
  612. struct
  613. {
  614. Js::FldInfo::TSize fldInfoData;
  615. Js::LdLenInfo::TSize ldLenInfoData;
  616. };
  617. public:
  618. Js::FldInfo &FldInfo()
  619. {
  620. return reinterpret_cast<Js::FldInfo &>(fldInfoData);
  621. }
  622. Js::LdLenInfo & LdLenInfo()
  623. {
  624. return reinterpret_cast<Js::LdLenInfo &>(ldLenInfoData);
  625. }
  626. } u;
  627. static const uint InvalidProfileId = (uint)-1;
  628. };
  629. #if TARGET_64
  630. // Ensure that the size of the union doesn't exceed the size of a 64 bit pointer.
  631. CompileAssert(sizeof(ProfiledInstr::u) <= sizeof(void*));
  632. #endif
  633. ///---------------------------------------------------------------------------
  634. ///
  635. /// class EntryInstr
  636. ///
  637. ///---------------------------------------------------------------------------
  638. class EntryInstr: public Instr
  639. {
  640. public:
  641. static EntryInstr * New(Js::OpCode opcode, Func *func);
  642. };
  643. ///---------------------------------------------------------------------------
  644. ///
  645. /// class ExitInstr
  646. ///
  647. ///---------------------------------------------------------------------------
  648. class ExitInstr: public Instr
  649. {
  650. public:
  651. static ExitInstr * New(Js::OpCode opcode, Func *func);
  652. };
  653. ///---------------------------------------------------------------------------
  654. ///
  655. /// class LabelInstr
  656. ///
  657. ///---------------------------------------------------------------------------
  658. class LabelInstr : public Instr
  659. {
  660. friend class BranchInstr;
  661. friend class IRBuilder;
  662. friend class IRBuilderAsmJs;
  663. friend class MultiBranchInstr;
  664. public:
  665. LabelInstr(JitArenaAllocator * allocator) : Instr(), labelRefs(allocator), m_isLoopTop(false), m_block(nullptr), isOpHelper(false),
  666. m_hasNonBranchRef(false), m_region(nullptr), m_loweredBasicBlock(nullptr), m_isDataLabel(false), m_isForInExit(false)
  667. #if DBG
  668. , m_noHelperAssert(false)
  669. , m_name(nullptr)
  670. #endif
  671. {
  672. #if DBG_DUMP
  673. m_id = 0;
  674. #endif
  675. m_pc.pc = nullptr;
  676. }
  677. static LabelInstr * New(Js::OpCode opcode, Func *func, bool isOpHelper = false);
  678. public:
  679. SListCounted<BranchInstr *> labelRefs;
  680. Lifetime ** m_regContent;
  681. BYTE m_isLoopTop : 1;
  682. BYTE isOpHelper : 1;
  683. BYTE m_hasNonBranchRef : 1;
  684. BYTE m_isDataLabel : 1;
  685. // Indicate whether the label is the target of a for in loop exit (BrOnEmpty or BrOnNotEmpty)
  686. // It is used by Inliner to track inlinee for in loop level to assign stack allocated for in
  687. // This bit has unknown validity outside of inliner
  688. BYTE m_isForInExit : 1;
  689. #if DBG
  690. BYTE m_noHelperAssert : 1;
  691. #endif
  692. unsigned int m_id;
  693. LoweredBasicBlock* m_loweredBasicBlock;
  694. #if DBG
  695. const char16* m_name;
  696. #endif
  697. private:
  698. union labelLocation
  699. {
  700. BYTE * pc; // Used by encoder and is the real pc offset
  701. uintptr_t offset; // Used by preEncoder and is an estimation pc offset, not accurate
  702. } m_pc;
  703. BasicBlock * m_block;
  704. Loop * m_loop;
  705. Region * m_region;
  706. public:
  707. inline void SetPC(BYTE * pc);
  708. inline BYTE * GetPC(void) const;
  709. inline void SetOffset(uintptr_t offset);
  710. inline void ResetOffset(uintptr_t offset);
  711. inline uintptr_t GetOffset(void) const;
  712. inline void SetBasicBlock(BasicBlock * block);
  713. inline BasicBlock * GetBasicBlock(void) const;
  714. inline void SetLoop(Loop *loop);
  715. inline Loop * GetLoop(void) const;
  716. inline void UnlinkBasicBlock(void);
  717. inline void SetRegion(Region *);
  718. inline Region * GetRegion(void) const;
  719. inline BOOL IsUnreferenced(void) const;
  720. inline BOOL IsGeneratorEpilogueLabel(void) const;
  721. LabelInstr * CloneLabel(BOOL fCreate);
  722. #if DBG_DUMP
  723. virtual void Dump(IRDumpFlags flags) override;
  724. #endif
  725. private:
  726. void AddLabelRef(BranchInstr *branchRef);
  727. void RemoveLabelRef(BranchInstr *branchRef);
  728. protected:
  729. void Init(Js::OpCode opcode, IRKind kind, Func *func, bool isOpHelper);
  730. };
  731. #if DBG
  732. #define LABELNAMESET(label, name) do { label->m_name = _u(name); } while(false)
  733. #define LABELNAME(label) do { label->m_name = _u(#label); } while(false)
  734. #else
  735. #define LABELNAMESET(label, name)
  736. #define LABELNAME(label)
  737. #endif
  738. class ProfiledLabelInstr: public LabelInstr
  739. {
  740. private:
  741. ProfiledLabelInstr(JitArenaAllocator * allocator);
  742. public:
  743. static ProfiledLabelInstr * New(Js::OpCode opcode, Func *func, Js::ImplicitCallFlags flags, Js::LoopFlags loopFlags);
  744. Js::ImplicitCallFlags loopImplicitCallFlags;
  745. Js::LoopFlags loopFlags;
  746. #if DBG_DUMP
  747. uint loopNum;
  748. #endif
  749. };
  750. ///---------------------------------------------------------------------------
  751. ///
  752. /// class BranchInstr
  753. ///
  754. ///---------------------------------------------------------------------------
  755. class BranchInstr : public Instr
  756. {
  757. public:
  758. bool m_isAirlock : 1;
  759. bool m_isSwitchBr : 1;
  760. bool m_isOrphanedLeave : 1; // A Leave in a loop body in a try, most likely generated because of a return statement.
  761. bool m_areCmpRegisterFlagsUsedLater : 1; // Indicate that this branch is not the only instr using the register flags set by cmp
  762. bool m_brFinallyToEarlyExit : 1; // BrOnException from finally to early exit, can be turned into BrOnNoException on break blocks removal
  763. #if DBG
  764. bool m_isMultiBranch;
  765. bool m_isHelperToNonHelperBranch;
  766. bool m_leaveConvToBr;
  767. #endif
  768. public:
  769. static BranchInstr * New(Js::OpCode opcode, LabelInstr * branchTarget, Func *func);
  770. static BranchInstr * New(Js::OpCode opcode, LabelInstr * branchTarget, Opnd *srcOpnd, Func *func);
  771. static BranchInstr * New(Js::OpCode opcode, Opnd* destOpnd, LabelInstr * branchTarget, Opnd *srcOpnd, Func *func);
  772. static BranchInstr * New(Js::OpCode opcode, LabelInstr * branchTarget, Opnd *src1Opnd, Opnd *src2Opnd, Func *func);
  773. BranchInstr(bool hasBailOutInfo = false) : Instr(hasBailOutInfo), m_branchTarget(nullptr), m_isAirlock(false), m_isSwitchBr(false), m_isOrphanedLeave(false), m_areCmpRegisterFlagsUsedLater(false), m_brFinallyToEarlyExit(false)
  774. {
  775. #if DBG
  776. m_isMultiBranch = false;
  777. m_isHelperToNonHelperBranch = false;
  778. m_leaveConvToBr = false;
  779. #endif
  780. }
  781. void SetTarget(LabelInstr *labelInstr); // Only used for non-multi-branch
  782. bool ReplaceTarget(LabelInstr * oldLabelInstr, LabelInstr * newLabelInstr);
  783. void ClearTarget();
  784. LabelInstr * GetTarget() const;
  785. bool IsConditional() const;
  786. bool IsUnconditional() const;
  787. void Invert();
  788. void RetargetClonedBranch();
  789. BranchInstr * CloneBranchInstr() const;
  790. bool IsMultiBranch() const;
  791. MultiBranchInstr * AsMultiBrInstr();
  792. void SetByteCodeReg(Js::RegSlot reg) { m_byteCodeReg = reg; }
  793. Js::RegSlot GetByteCodeReg() { return m_byteCodeReg; }
  794. bool HasByteCodeReg() { return m_byteCodeReg != Js::Constants::NoRegister; }
  795. bool IsLoopTail(Func * func);
  796. public:
  797. Lifetime ** m_regContent;
  798. private:
  799. LabelInstr * m_branchTarget;
  800. Js::RegSlot m_byteCodeReg;
  801. };
  802. ///---------------------------------------------------------------------------
  803. ///
  804. /// class MultiBranchInstr
  805. ///
  806. ///---------------------------------------------------------------------------
  807. class MultiBranchInstr : public BranchInstr
  808. {
  809. private:
  810. /*
  811. The value field in the dictionary has different semantics at different points of time. Hence the 'value' field is implemented as a void *.
  812. In IR Layer:
  813. Offset is stored in the dictionary until we generate the Labels in InsertLabels().
  814. LabelInstr is stored in the dictionary, after we generate the LabelInstrs.
  815. In Encoder:
  816. After the fixup, actual machine address corresponding to the LabelInstr is stored as the 'value'.
  817. */
  818. private:
  819. typedef JITJavascriptString* TBranchKey;
  820. typedef Js::BranchDictionaryWrapper<TBranchKey> BranchDictionaryWrapper;
  821. typedef BranchDictionaryWrapper::BranchDictionary BranchDictionary;
  822. typedef BranchJumpTableWrapper BranchJumpTable;
  823. void * m_branchTargets; // can point to a dictionary or a jump table
  824. public:
  825. static MultiBranchInstr * New(Js::OpCode opcode, IR::Opnd * srcOpnd, Func *func);
  826. static MultiBranchInstr * New(Js::OpCode opcode, Func *func);
  827. enum Kind
  828. {
  829. IntJumpTable,
  830. StrDictionary,
  831. SingleCharStrJumpTable,
  832. };
  833. Kind m_kind;
  834. IntConstType m_baseCaseValue;
  835. IntConstType m_lastCaseValue;
  836. MultiBranchInstr() :
  837. m_branchTargets(nullptr),
  838. m_kind(IntJumpTable),
  839. m_baseCaseValue(0),
  840. m_lastCaseValue(0)
  841. {
  842. #if DBG
  843. m_isMultiBranch = true;
  844. #endif
  845. }
  846. void AddtoDictionary(uint32 offset, TBranchKey key, void* remoteVar);
  847. void AddtoJumpTable(uint32 offset, uint32 jmpIndex);
  848. void CreateBranchTargetsAndSetDefaultTarget(int dictionarySize, Kind kind, uint defaultTargetOffset);
  849. void ChangeLabelRef(LabelInstr * oldTarget, LabelInstr * newTarget);
  850. bool ReplaceTarget(IR::LabelInstr * oldLabelInstr, IR::LabelInstr * newLabelInstr);
  851. void FixMultiBrDefaultTarget(uint32 targetOffset);
  852. void ClearTarget();
  853. BranchDictionaryWrapper * GetBranchDictionary();
  854. BranchJumpTable * GetBranchJumpTable();
  855. ///---------------------------------------------------------------------------
  856. ///
  857. /// template MapMultiBrLabels
  858. /// - Maps through the branchTargets dictionary for all the labelInstrs
  859. ///---------------------------------------------------------------------------
  860. template<class Fn>
  861. void MapMultiBrLabels(Fn fn)
  862. {
  863. MapMultiBrTargetByAddress([fn](void ** value) -> void
  864. {
  865. fn((LabelInstr*) *value);
  866. });
  867. }
  868. ///---------------------------------------------------------------------------
  869. ///
  870. /// template MapUniqueMultiBrLabels
  871. /// - Maps through the branchTargets dictionary for all unique labelInstrs
  872. ///---------------------------------------------------------------------------
  873. template<class Fn>
  874. void MapUniqueMultiBrLabels(Fn fn)
  875. {
  876. BVSparse<JitArenaAllocator> visitedTargets(m_func->m_alloc);
  877. MapMultiBrLabels([&](IR::LabelInstr *const targetLabel)
  878. {
  879. if(visitedTargets.Test(targetLabel->m_id))
  880. {
  881. return;
  882. }
  883. visitedTargets.Set(targetLabel->m_id);
  884. fn(targetLabel);
  885. });
  886. }
  887. ///--------------------------------------------------------------------------------------------
  888. ///
  889. /// template UpdateMultiBrTargetOffsets
  890. /// - Maps through the branchTargets dictionary for updating the target offset by returning the target offset.
  891. ///--------------------------------------------------------------------------------------------
  892. template<class Fn>
  893. void UpdateMultiBrTargetOffsets(Fn fn)
  894. {
  895. MapMultiBrTargetByAddress([fn](void ** value) -> void
  896. {
  897. *value = (void*)fn(::Math::PointerCastToIntegral<uint32>(*value));
  898. });
  899. }
  900. ///--------------------------------------------------------------------------------------------
  901. ///
  902. /// template UpdateMultiBrLabels
  903. /// - Maps through the branchDictionary for updating the labelInstr
  904. ///--------------------------------------------------------------------------------------------
  905. template<class Fn>
  906. void UpdateMultiBrLabels(Fn fn)
  907. {
  908. MapMultiBrTargetByAddress([fn](void ** value) -> void
  909. {
  910. IR::LabelInstr * oldLabelInstr = (LabelInstr*)*value;
  911. IR::LabelInstr * newLabelInstr = fn(oldLabelInstr);
  912. *value = (void*)newLabelInstr;
  913. });
  914. }
  915. ///-------------------------------------------------------------------------------------------------------------
  916. ///
  917. /// template MapMultiBrTargetByAddress
  918. /// - Maps through the branchDictionary accessing the address of the 'value'
  919. ///-------------------------------------------------------------------------------------------------------------
  920. template<class Fn>
  921. void MapMultiBrTargetByAddress(Fn fn)
  922. {
  923. if(!m_branchTargets)
  924. {
  925. return;
  926. }
  927. void ** defaultTarget = nullptr;
  928. switch (m_kind)
  929. {
  930. case StrDictionary:
  931. {
  932. BranchDictionary& branchDictionary = GetBranchDictionary()->dictionary;
  933. defaultTarget = &(((MultiBranchInstr::BranchDictionaryWrapper*)(m_branchTargets))->defaultTarget);
  934. branchDictionary.MapAddress([fn](TBranchKey key, void ** value)
  935. {
  936. fn(value);
  937. });
  938. break;
  939. }
  940. case IntJumpTable:
  941. case SingleCharStrJumpTable:
  942. {
  943. void ** branchJumpTable = GetBranchJumpTable()->jmpTable;
  944. defaultTarget = &(GetBranchJumpTable()->defaultTarget);
  945. for (IntConstType i = m_baseCaseValue; i <= m_lastCaseValue; i++)
  946. {
  947. fn(&branchJumpTable[i - m_baseCaseValue]);
  948. }
  949. break;
  950. }
  951. default:
  952. Assert(false);
  953. };
  954. fn(defaultTarget);
  955. }
  956. };
  957. ///---------------------------------------------------------------------------
  958. ///
  959. /// class PragmaInstr
  960. ///
  961. ///---------------------------------------------------------------------------
  962. class PragmaInstr : public Instr
  963. {
  964. public:
  965. uint32 m_statementIndex;
  966. uint32 m_offsetInBuffer; // offset in the binary code buffer
  967. public:
  968. static PragmaInstr * New(Js::OpCode opcode, uint32 index, Func *func);
  969. PragmaInstr() : Instr(), m_statementIndex(0)
  970. {
  971. }
  972. #if DBG_DUMP
  973. virtual void Dump(IRDumpFlags flags) override;
  974. #endif
  975. #if DBG_DUMP | defined(VTUNE_PROFILING)
  976. void Record(uint32 nativeBufferOffset);
  977. #endif
  978. PragmaInstr * ClonePragma();
  979. PragmaInstr * CopyPragma();
  980. };
  981. class GeneratorBailInInstr : public LabelInstr
  982. {
  983. private:
  984. GeneratorBailInInstr(JitArenaAllocator* allocator, IR::Instr* yieldInstr) :
  985. LabelInstr(allocator),
  986. yieldInstr(yieldInstr),
  987. upwardExposedUses(allocator)
  988. {
  989. Assert(yieldInstr != nullptr && yieldInstr->m_opcode == Js::OpCode::Yield);
  990. }
  991. public:
  992. IR::Instr* yieldInstr;
  993. CapturedValues capturedValues;
  994. BVSparse<JitArenaAllocator> upwardExposedUses;
  995. static GeneratorBailInInstr* New(IR::Instr* yieldInstr, Func* func);
  996. };
  997. template <typename InstrType>
  998. class BailOutInstrTemplate : public InstrType
  999. {
  1000. private:
  1001. BailOutInstrTemplate() : InstrType(true) {}
  1002. public:
  1003. static BailOutInstrTemplate * New(Js::OpCode opcode, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  1004. static BailOutInstrTemplate * New(Js::OpCode opcode, IR::Opnd *dst, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  1005. static BailOutInstrTemplate * New(Js::OpCode opcode, IR::Opnd *dst, IR::Opnd *src1, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  1006. static BailOutInstrTemplate * New(Js::OpCode opcode, IR::Opnd *dst, IR::Opnd *src1, IR::Opnd *src2, BailOutKind kind, IR::Instr * bailOutTarget, Func * func);
  1007. static BailOutInstrTemplate * New(Js::OpCode opcode, BailOutKind kind, BailOutInfo * bailOutInfo, Func * func);
  1008. BailOutInstrTemplate * CloneBailOut() const;
  1009. BailOutInfo * bailOutInfo;
  1010. BailOutKind bailOutKind;
  1011. // Auxiliary bailout kind.
  1012. // This is kind of a decoration on top of main bail out kind and is not used for runtime bail out logic (in globopt, etc).
  1013. // It's added when we convert instr to bailout instr for which there is already bailout,
  1014. // and is not used/just preserved until lowerer, in the beginning of lowerer we split it out.
  1015. // Currently used for debugger bailout when it is shared with main bailout.
  1016. BailOutKind auxBailOutKind;
  1017. };
  1018. typedef BailOutInstrTemplate<Instr> BailOutInstr;
  1019. typedef BailOutInstrTemplate<ProfiledInstr> ProfiledBailOutInstr;
  1020. typedef BailOutInstrTemplate<BranchInstr> BranchBailOutInstr;
  1021. //
  1022. // FOREACH_INSTR iterators
  1023. //
  1024. #ifdef DBG
  1025. # define INIT_PREV IR::Instr * __prevInstrCheck = nullptr
  1026. # define CHECK_PREV(instr)\
  1027. AssertMsg(__prevInstrCheck == nullptr || __prevInstrCheck->m_next == instr, \
  1028. "Modifying instr list but not using EDITING iterator!"); \
  1029. __prevInstrCheck = instr;
  1030. #else
  1031. # define INIT_PREV
  1032. # define CHECK_PREV(instr)
  1033. #endif
  1034. #ifdef DBG
  1035. # define INIT_NEXT IR::Instr * __nextInstrCheck = nullptr
  1036. # define CHECK_NEXT(instr)\
  1037. AssertMsg(__nextInstrCheck == nullptr || __nextInstrCheck->m_prev == instr, \
  1038. "Modifying instr list but not using EDITING iterator!"); \
  1039. __nextInstrCheck = instr;
  1040. #else
  1041. # define INIT_NEXT
  1042. # define CHECK_NEXT(instr)
  1043. #endif
  1044. #define FOREACH_INSTR_IN_RANGE(instr, instrList, instrLast)\
  1045. {\
  1046. INIT_PREV;\
  1047. IR::Instr *instr##Stop = instrLast ? ((IR::Instr*)instrLast)->m_next : nullptr; \
  1048. for ( IR::Instr *instr = instrList;\
  1049. instr != instr##Stop;\
  1050. instr = instr->m_next)\
  1051. {\
  1052. CHECK_PREV(instr);
  1053. #define NEXT_INSTR_IN_RANGE }}
  1054. #define FOREACH_REAL_INSTR_IN_RANGE(instr, instrList, instrLast)\
  1055. FOREACH_INSTR_IN_RANGE(instr, instrList, instrLast)\
  1056. {\
  1057. if (!instr->IsRealInstr())\
  1058. {\
  1059. continue;\
  1060. }
  1061. #define NEXT_REAL_INSTR_IN_RANGE NEXT_INSTR_IN_RANGE }
  1062. #define FOREACH_INSTR_BACKWARD_IN_RANGE(instr, instrList, instrLast)\
  1063. {\
  1064. INIT_NEXT;\
  1065. IR::Instr *instr##Stop = instrLast ? ((IR::Instr*)instrLast)->m_prev : nullptr; \
  1066. for ( IR::Instr *instr = instrList;\
  1067. instr != instr##Stop;\
  1068. instr = instr->m_prev)\
  1069. {\
  1070. CHECK_NEXT(instr);
  1071. #define NEXT_INSTR_BACKWARD_IN_RANGE }}
  1072. #define FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, instrLast)\
  1073. {\
  1074. IR::Instr * instrNext;\
  1075. IR::Instr *instr##Stop = instrLast ? ((IR::Instr*)instrLast)->m_next : nullptr; \
  1076. for ( IR::Instr *instr = instrList;\
  1077. instr != instr##Stop;\
  1078. instr = instrNext)\
  1079. {\
  1080. instrNext = instr->m_next;
  1081. #define NEXT_INSTR_EDITING_IN_RANGE }}
  1082. #define FOREACH_REAL_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, instrLast)\
  1083. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, instrLast)\
  1084. {\
  1085. if (!instr->IsRealInstr())\
  1086. {\
  1087. continue;\
  1088. }
  1089. #define NEXT_REAL_INSTR_EDITING_IN_RANGE NEXT_INSTR_EDITING_IN_RANGE }
  1090. #define FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrList, instrLast)\
  1091. {\
  1092. IR::Instr * instrPrev;\
  1093. IR::Instr *instr##Stop = instrLast ? ((IR::Instr*)instrLast)->m_prev : nullptr; \
  1094. for ( IR::Instr *instr = instrList;\
  1095. instr != instr##Stop;\
  1096. instr = instrPrev)\
  1097. {\
  1098. instrPrev = instr->m_prev;
  1099. #define NEXT_INSTR_BACKWARD_EDITING_IN_RANGE }}
  1100. #define FOREACH_INSTR_EDITING(instr, instrNext, instrList)\
  1101. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, nullptr)
  1102. #define NEXT_INSTR_EDITING NEXT_INSTR_EDITING_IN_RANGE
  1103. #define FOREACH_INSTR(instr, instrList)\
  1104. FOREACH_INSTR_IN_RANGE(instr, instrList, nullptr)
  1105. #define NEXT_INSTR NEXT_INSTR_IN_RANGE
  1106. #define FOREACH_REAL_INSTR(instr, instrList)\
  1107. FOREACH_REAL_INSTR_IN_RANGE(instr, instrList, nullptr)
  1108. #define NEXT_REAL_INSTR NEXT_REAL_INSTR_IN_RANGE
  1109. #define FOREACH_INSTR_BACKWARD(instr, instrList)\
  1110. FOREACH_INSTR_BACKWARD_IN_RANGE(instr, instrList, nullptr)
  1111. #define NEXT_INSTR_BACKWARD NEXT_INSTR_BACKWARD_IN_RANGE
  1112. #define FOREACH_INSTR_EDITING(instr, instrNext, instrList)\
  1113. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, nullptr)
  1114. #define NEXT_INSTR_EDITING NEXT_INSTR_EDITING_IN_RANGE
  1115. #define FOREACH_REAL_INSTR_EDITING(instr, instrNext, instrList)\
  1116. FOREACH_REAL_INSTR_EDITING_IN_RANGE(instr, instrNext, instrList, nullptr)
  1117. #define NEXT_REAL_INSTR_EDITING NEXT_REAL_INSTR_EDITING_IN_RANGE
  1118. #define FOREACH_INSTR_BACKWARD_EDITING(instr, instrPrev, instrList)\
  1119. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrList, nullptr)
  1120. #define NEXT_INSTR_BACKWARD_EDITING NEXT_INSTR_BACKWARD_EDITING_IN_RANGE
  1121. #define FOREACH_INSTR_IN_FUNC(instr, func)\
  1122. FOREACH_INSTR(instr, func->m_headInstr)
  1123. #define NEXT_INSTR_IN_FUNC NEXT_INSTR
  1124. #define FOREACH_REAL_INSTR_IN_FUNC(instr, func)\
  1125. FOREACH_REAL_INSTR(instr, func->m_headInstr)
  1126. #define NEXT_REAL_INSTR_IN_FUNC NEXT_REAL_INSTR
  1127. #define FOREACH_INSTR_IN_FUNC_BACKWARD(instr, func)\
  1128. FOREACH_INSTR_BACKWARD(instr, func->m_tailInstr)
  1129. #define NEXT_INSTR_IN_FUNC_BACKWARD NEXT_INSTR_BACKWARD
  1130. #define FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, func)\
  1131. FOREACH_INSTR_EDITING(instr, instrNext, func->m_headInstr)
  1132. #define NEXT_INSTR_IN_FUNC_EDITING NEXT_INSTR_EDITING
  1133. #define FOREACH_REAL_INSTR_IN_FUNC_EDITING(instr, instrNext, func)\
  1134. FOREACH_REAL_INSTR_EDITING(instr, instrNext, func->m_headInstr)
  1135. #define NEXT_REAL_INSTR_IN_FUNC_EDITING NEXT_REAL_INSTR_EDITING
  1136. #define FOREACH_INSTR_IN_FUNC_BACKWARD_EDITING(instr, instrPrev, func)\
  1137. FOREACH_INSTR_BACKWARD_EDITING(instr, instrPrev, func->m_tailInstr)
  1138. #define NEXT_INSTR_IN_FUNC_BACKWARD_EDITING NEXT_INSTR_BACKWARD_EDITING
  1139. #define FOREACH_INSTR_IN_BLOCK(instr, block)\
  1140. FOREACH_INSTR_IN_RANGE(instr, block->GetFirstInstr(), block->GetLastInstr())
  1141. #define NEXT_INSTR_IN_BLOCK\
  1142. NEXT_INSTR_IN_RANGE
  1143. #define FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)\
  1144. FOREACH_INSTR_EDITING_IN_RANGE(instr, instrNext, block->GetFirstInstr(), block->GetLastInstr())
  1145. #define NEXT_INSTR_IN_BLOCK_EDITING \
  1146. NEXT_INSTR_EDITING_IN_RANGE
  1147. #define FOREACH_INSTR_BACKWARD_IN_BLOCK(instr, block)\
  1148. FOREACH_INSTR_BACKWARD_IN_RANGE(instr, block->GetLastInstr(), block->GetFirstInstr())
  1149. #define NEXT_INSTR_BACKWARD_IN_BLOCK\
  1150. NEXT_INSTR_BACKWARD_IN_RANGE
  1151. #define FOREACH_INSTR_BACKWARD_IN_BLOCK_EDITING(instr, instrPrev, block)\
  1152. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, block->GetLastInstr(), block->GetFirstInstr())
  1153. #define NEXT_INSTR_BACKWARD_IN_BLOCK_EDITING\
  1154. NEXT_INSTR_BACKWARD_EDITING_IN_RANGE
  1155. } // namespace IR
  1156. typedef JsUtil::BaseDictionary<IR::Instr*, IR::Instr*, JitArenaAllocator, PrimeSizePolicy> InstrMap;