IRBuilder.h 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. ///---------------------------------------------------------------------------
  7. ///
  8. /// class IRBuilder
  9. ///
  10. /// To generate IR from the Jn bytecodes.
  11. ///
  12. ///---------------------------------------------------------------------------
  13. class BranchReloc
  14. {
  15. public:
  16. BranchReloc(IR::BranchInstr * instr, uint32 branchOffset, uint32 offs)
  17. : branchInstr(instr), branchOffset(branchOffset), offset(offs), isNotBackEdge(false)
  18. { }
  19. private:
  20. IR::BranchInstr * branchInstr;
  21. uint32 offset;
  22. bool isNotBackEdge;
  23. uint32 branchOffset;
  24. public:
  25. IR::BranchInstr * GetBranchInstr()
  26. {
  27. return this->branchInstr;
  28. }
  29. uint32 GetOffset() const
  30. {
  31. return this->offset;
  32. }
  33. uint32 GetBranchOffset() const
  34. {
  35. return this->branchOffset;
  36. }
  37. bool IsNotBackEdge() const
  38. {
  39. return this->isNotBackEdge;
  40. }
  41. void SetNotBackEdge()
  42. {
  43. this->isNotBackEdge = true;
  44. }
  45. };
  46. class IRBuilder
  47. {
  48. friend struct IRBuilderSwitchAdapter;
  49. public:
  50. IRBuilder(Func * func)
  51. : m_func(func)
  52. , m_argsOnStack(0)
  53. , m_loopBodyRetIPSym(nullptr)
  54. , m_ldSlots(nullptr)
  55. , m_loopCounterSym(nullptr)
  56. , callTreeHasSomeProfileInfo(false)
  57. , m_saveLoopImplicitCallFlags(nullptr)
  58. , catchOffsetStack(nullptr)
  59. , m_switchAdapter(this)
  60. , m_switchBuilder(&m_switchAdapter)
  61. , m_stackFuncPtrSym(nullptr)
  62. , m_loopBodyForInEnumeratorArrayOpnd(nullptr)
  63. #if DBG
  64. , m_callsOnStack(0)
  65. , m_usedAsTemp(nullptr)
  66. #endif
  67. #ifdef BAILOUT_INJECTION
  68. , seenLdStackArgPtr(false)
  69. , expectApplyArg(false)
  70. , seenProfiledBeginSwitch(false)
  71. #endif
  72. #ifdef BYTECODE_BRANCH_ISLAND
  73. , longBranchMap(nullptr)
  74. #endif
  75. {
  76. auto loopCount = func->GetJITFunctionBody()->GetLoopCount();
  77. if (loopCount > 0) {
  78. m_saveLoopImplicitCallFlags = (IR::Opnd**)func->m_alloc->Alloc(sizeof(IR::Opnd*) * loopCount);
  79. #if DBG
  80. memset(m_saveLoopImplicitCallFlags, 0, sizeof(IR::Opnd*) * loopCount);
  81. #endif
  82. }
  83. // Note: use original byte code without debugging probes, so that we don't jit BPs inserted by the user.
  84. func->m_workItem->InitializeReader(&m_jnReader, &m_statementReader, func->m_alloc);
  85. };
  86. ~IRBuilder() {
  87. Assert(m_func->GetJITFunctionBody()->GetLoopCount() == 0 || m_saveLoopImplicitCallFlags);
  88. if (m_saveLoopImplicitCallFlags) {
  89. m_func->m_alloc->Free(m_saveLoopImplicitCallFlags, sizeof(IR::Opnd*) * m_func->GetJITFunctionBody()->GetLoopCount());
  90. }
  91. }
  92. void Build();
  93. void InsertLabels();
  94. IR::LabelInstr * CreateLabel(IR::BranchInstr * branchInstr, uint& offset);
  95. private:
  96. void InsertInstr(IR::Instr *instr, IR::Instr* insertBeforeInstr);
  97. void AddInstr(IR::Instr *instr, uint32 offset);
  98. BranchReloc * AddBranchInstr(IR::BranchInstr *instr, uint32 offset, uint32 targetOffset);
  99. #ifdef BYTECODE_BRANCH_ISLAND
  100. void ConsumeBranchIsland();
  101. void EnsureConsumeBranchIsland();
  102. uint ResolveVirtualLongBranch(IR::BranchInstr * branchInstr, uint offset);
  103. #endif
  104. BranchReloc * CreateRelocRecord(IR::BranchInstr * branchInstr, uint32 offset, uint32 targetOffset);
  105. void BuildGeneratorPreamble();
  106. void LoadNativeCodeData();
  107. void BuildConstantLoads();
  108. void BuildImplicitArgIns();
  109. #define LAYOUT_TYPE(layout) \
  110. void Build##layout(Js::OpCode newOpcode, uint32 offset);
  111. #define LAYOUT_TYPE_WMS(layout) \
  112. template <typename SizePolicy> void Build##layout(Js::OpCode newOpcode, uint32 offset);
  113. #include "ByteCode/LayoutTypes.h"
  114. void BuildReg1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0);
  115. void BuildReg2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::RegSlot R1, uint32 nextOffset);
  116. void BuildProfiledReg2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot srcRegSlot, Js::ProfileId profileId, Js::InlineCacheIndex inlineCacheIndex = Js::Constants::NoInlineCacheIndex);
  117. void BuildProfiledReg2WithICIndex(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot srcRegSlot, Js::ProfileId profileId, Js::InlineCacheIndex inlineCacheIndex);
  118. void BuildReg3(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot,
  119. Js::RegSlot src2RegSlot, Js::ProfileId profileId);
  120. void BuildReg3C(Js::OpCode newOpCode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot,
  121. Js::RegSlot src2RegSlot, Js::CacheId inlineCacheIndex);
  122. void BuildReg4(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot,
  123. Js::RegSlot src2RegSlot, Js::RegSlot src3RegSlot);
  124. void BuildReg2B1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot srcRegSlot, byte index);
  125. void BuildReg3B1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot,
  126. Js::RegSlot src2RegSlot, uint8 index);
  127. void BuildReg5(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot,
  128. Js::RegSlot src2RegSlot, Js::RegSlot src3RegSlot, Js::RegSlot src4RegSlot);
  129. void BuildUnsigned1(Js::OpCode newOpcode, uint32 offset, uint32 C1);
  130. void BuildReg1Unsigned1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, int32 C1);
  131. void BuildProfiledReg1Unsigned1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, int32 C1, Js::ProfileId profileId);
  132. void BuildReg2Int1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot srcRegSlot, int32 value);
  133. void BuildElementC(Js::OpCode newOpcode, uint32 offset, Js::RegSlot fieldRegSlot, Js::RegSlot regSlot,
  134. Js::PropertyIdIndexType propertyIdIndex);
  135. void BuildElementScopedC(Js::OpCode newOpcode, uint32 offset, Js::RegSlot regSlot,
  136. Js::PropertyIdIndexType propertyIdIndex);
  137. void BuildElementSlot(Js::OpCode newOpcode, uint32 offset, Js::RegSlot fieldRegSlot, Js::RegSlot regSlot,
  138. int32 slotId, Js::ProfileId profileId);
  139. void BuildElementSlotI1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot regSlot,
  140. int32 slotId, Js::ProfileId profileId);
  141. void BuildElementSlotI2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot regSlot,
  142. int32 slotId1, int32 slotId2, Js::ProfileId profileId);
  143. void BuildArgIn0(uint32 offset, Js::RegSlot R0);
  144. void BuildArg(Js::OpCode newOpcode, uint32 offset, Js::ArgSlot argument, Js::RegSlot srcRegSlot);
  145. void BuildArgIn(uint32 offset, Js::RegSlot dstRegSlot, uint16 argument);
  146. void BuildArgInRest();
  147. void BuildElementP(Js::OpCode newOpcode, uint32 offset, Js::RegSlot regSlot, Js::CacheId inlineCacheIndex);
  148. void BuildElementCP(Js::OpCode newOpcode, uint32 offset, Js::RegSlot instance, Js::RegSlot regSlot, Js::CacheId inlineCacheIndex);
  149. void BuildElementC2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot instanceSlot, Js::RegSlot instance2Slot,
  150. Js::RegSlot regSlot, Js::PropertyIdIndexType propertyIdIndex);
  151. void BuildElementScopedC2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot instance2Slot,
  152. Js::RegSlot regSlot, Js::PropertyIdIndexType propertyIdIndex);
  153. void BuildElementU(Js::OpCode newOpcode, uint32 offset, Js::RegSlot instance, Js::PropertyIdIndexType propertyIdIndex);
  154. void BuildElementI(Js::OpCode newOpcode, uint32 offset, Js::RegSlot baseRegSlot, Js::RegSlot indexRegSlot,
  155. Js::RegSlot regSlot, Js::ProfileId profileId);
  156. void BuildElementUnsigned1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot baseRegSlot, uint32 index, Js::RegSlot regSlot);
  157. IR::Instr * BuildCallI_Helper(Js::OpCode newOpcode, uint32 offset, Js::RegSlot Return, Js::RegSlot Function, Js::ArgSlot ArgCount,
  158. Js::ProfileId profileId, Js::InlineCacheIndex inlineCacheIndex = Js::Constants::NoInlineCacheIndex);
  159. IR::Instr * BuildProfiledCallI(Js::OpCode opcode, uint32 offset, Js::RegSlot returnValue, Js::RegSlot function,
  160. Js::ArgSlot argCount, Js::ProfileId profileId, Js::InlineCacheIndex inlineCacheIndex = Js::Constants::NoInlineCacheIndex);
  161. IR::Instr * BuildProfiledCallIExtended(Js::OpCode opcode, uint32 offset, Js::RegSlot returnValue, Js::RegSlot function,
  162. Js::ArgSlot argCount, Js::ProfileId profileId, Js::CallIExtendedOptions options, uint32 spreadAuxOffset);
  163. IR::Instr * BuildProfiledCallIWithICIndex(Js::OpCode opcode, uint32 offset, Js::RegSlot returnValue, Js::RegSlot function,
  164. Js::ArgSlot argCount, Js::ProfileId profileId, Js::InlineCacheIndex inlineCacheIndex);
  165. void BuildProfiledCallIExtendedFlags(Js::OpCode opcode, uint32 offset, Js::RegSlot returnValue, Js::RegSlot function,
  166. Js::ArgSlot argCount, Js::ProfileId profileId, Js::CallIExtendedOptions options, uint32 spreadAuxOffset);
  167. void BuildProfiledCallIExtendedWithICIndex(Js::OpCode opcode, uint32 offset, Js::RegSlot returnValue, Js::RegSlot function,
  168. Js::ArgSlot argCount, Js::ProfileId profileId, Js::CallIExtendedOptions options, uint32 spreadAuxOffset);
  169. void BuildProfiledCallIExtendedFlagsWithICIndex(Js::OpCode opcode, uint32 offset, Js::RegSlot returnValue, Js::RegSlot function,
  170. Js::ArgSlot argCount, Js::ProfileId profileId, Js::CallIExtendedOptions options, uint32 spreadAuxOffset);
  171. void BuildProfiled2CallI(Js::OpCode opcode, uint32 offset, Js::RegSlot returnValue, Js::RegSlot function,
  172. Js::ArgSlot argCount, Js::ProfileId profileId, Js::ProfileId profileId2);
  173. void BuildProfiled2CallIExtended(Js::OpCode opcode, uint32 offset, Js::RegSlot returnValue, Js::RegSlot function,
  174. Js::ArgSlot argCount, Js::ProfileId profileId, Js::ProfileId profileId2, Js::CallIExtendedOptions options, uint32 spreadAuxOffset);
  175. void BuildLdSpreadIndices(uint32 offset, uint32 spreadAuxOffset);
  176. IR::Instr * BuildCallIExtended(Js::OpCode newOpcode, uint32 offset, Js::RegSlot returnValue, Js::RegSlot function,
  177. Js::ArgSlot argCount, Js::CallIExtendedOptions options, uint32 spreadAuxOffset);
  178. void BuildCallCommon(IR::Instr *instr, StackSym *symDst, Js::ArgSlot argCount);
  179. void BuildRegexFromPattern(Js::RegSlot dstRegSlot, uint32 patternIndex, uint32 offset);
  180. void BuildClass(Js::OpCode newOpcode, uint32 offset, Js::RegSlot constructor, Js::RegSlot extends);
  181. void BuildBrReg1(Js::OpCode newOpcode, uint32 offset, uint targetOffset, Js::RegSlot srcRegSlot);
  182. void BuildBrReg2(Js::OpCode newOpcode, uint32 offset, uint targetOffset, Js::RegSlot src1RegSlot, Js::RegSlot src2RegSlot);
  183. void BuildBrBReturn(Js::OpCode newOpcode, uint32 offset, Js::RegSlot DestRegSlot, uint32 forInLoopLevel, uint32 targetOffset);
  184. IR::IndirOpnd * BuildIndirOpnd(IR::RegOpnd *baseReg, IR::RegOpnd *indexReg);
  185. IR::IndirOpnd * BuildIndirOpnd(IR::RegOpnd *baseReg, uint32 offset);
  186. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  187. IR::IndirOpnd * BuildIndirOpnd(IR::RegOpnd *baseReg, uint32 offset, const char16 *desc);
  188. #endif
  189. IR::SymOpnd * BuildFieldOpnd(Js::OpCode newOpCode, Js::RegSlot reg, Js::PropertyId propertyId, Js::PropertyIdIndexType propertyIdIndex, PropertyKind propertyKind, uint inlineCacheIndex = -1);
  190. PropertySym * BuildFieldSym(Js::RegSlot reg, Js::PropertyId propertyId, Js::PropertyIdIndexType propertyIdIndex, uint inlineCacheIndex, PropertyKind propertyKind);
  191. SymID BuildSrcStackSymID(Js::RegSlot regSlot);
  192. IR::RegOpnd * BuildDstOpnd(Js::RegSlot dstRegSlot, IRType type = TyVar, bool isCatchObjectSym = false);
  193. IR::RegOpnd * BuildSrcOpnd(Js::RegSlot srcRegSlot, IRType type = TyVar);
  194. IR::AddrOpnd * BuildAuxArrayOpnd(AuxArrayValue auxArrayType, uint32 auxArrayOffset);
  195. IR::Opnd * BuildAuxObjectLiteralTypeRefOpnd(int objectId);
  196. IR::Opnd * BuildForInEnumeratorOpnd(uint forInLoopLevel);
  197. IR::RegOpnd * EnsureLoopBodyForInEnumeratorArrayOpnd();
  198. private:
  199. uint AddStatementBoundary(uint statementIndex, uint offset);
  200. void CheckBuiltIn(PropertySym * propertySym, Js::BuiltinFunction *puBuiltInIndex);
  201. bool IsFloatFunctionCallsite(Js::BuiltinFunction index, size_t argc);
  202. IR::Instr * BuildProfiledFieldLoad(Js::OpCode loadOp, IR::RegOpnd *dstOpnd, IR::SymOpnd *srcOpnd, Js::CacheId inlineCacheIndex, bool *pUnprofiled);
  203. IR::Instr * BuildProfiledSlotLoad(Js::OpCode loadOp, IR::RegOpnd *dstOpnd, IR::SymOpnd *srcOpnd, Js::ProfileId profileId, bool *pUnprofiled);
  204. SymID GetMappedTemp(Js::RegSlot reg)
  205. {
  206. AssertMsg(this->RegIsTemp(reg), "Processing non-temp reg as a temp?");
  207. AssertMsg(this->tempMap, "Processing non-temp reg without a temp map?");
  208. return this->tempMap[reg - this->firstTemp];
  209. }
  210. void SetMappedTemp(Js::RegSlot reg, SymID tempId)
  211. {
  212. AssertMsg(this->RegIsTemp(reg), "Processing non-temp reg as a temp?");
  213. AssertMsg(this->tempMap, "Processing non-temp reg without a temp map?");
  214. this->tempMap[reg - this->firstTemp] = tempId;
  215. }
  216. BOOL GetTempUsed(Js::RegSlot reg)
  217. {
  218. AssertMsg(this->RegIsTemp(reg), "Processing non-temp reg as a temp?");
  219. AssertMsg(this->fbvTempUsed, "Processing non-temp reg without a used BV?");
  220. return this->fbvTempUsed->Test(reg - this->firstTemp);
  221. }
  222. void SetTempUsed(Js::RegSlot reg, BOOL used)
  223. {
  224. AssertMsg(this->RegIsTemp(reg), "Processing non-temp reg as a temp?");
  225. AssertMsg(this->fbvTempUsed, "Processing non-temp reg without a used BV?");
  226. if (used)
  227. {
  228. this->fbvTempUsed->Set(reg - this->firstTemp);
  229. }
  230. else
  231. {
  232. this->fbvTempUsed->Clear(reg - this->firstTemp);
  233. }
  234. }
  235. BOOL RegIsTemp(Js::RegSlot reg)
  236. {
  237. return reg >= this->firstTemp;
  238. }
  239. BOOL RegIsConstant(Js::RegSlot reg)
  240. {
  241. return reg > 0 && reg < m_func->GetJITFunctionBody()->GetConstCount();
  242. }
  243. Js::RegSlot InnerScopeIndexToRegSlot(uint32) const;
  244. Js::RegSlot GetEnvReg() const;
  245. Js::RegSlot GetEnvRegForEvalCode() const;
  246. Js::RegSlot GetEnvRegForInnerFrameDisplay() const;
  247. void AddEnvOpndForInnerFrameDisplay(IR::Instr *instr, uint offset);
  248. bool DoSlotArrayCheck(IR::SymOpnd *fieldOpnd, bool doDynamicCheck);
  249. void EmitClosureRangeChecks();
  250. void DoClosureRegCheck(Js::RegSlot reg);
  251. void BuildInitCachedScope(int auxOffset, int offset);
  252. void GenerateLoopBodySlotAccesses(uint offset);
  253. void GenerateLoopBodyStSlots(SymID loopParamSymId, uint offset);
  254. IR::Instr * GenerateLoopBodyStSlot(Js::RegSlot regSlot, uint offset = Js::Constants::NoByteCodeOffset);
  255. bool IsLoopBody() const;
  256. bool IsLoopBodyInTry() const;
  257. uint GetLoopBodyExitInstrOffset() const;
  258. IR::SymOpnd * BuildLoopBodySlotOpnd(SymID symId);
  259. void EnsureLoopBodyLoadSlot(SymID symId, bool isCatchObjectSym = false);
  260. void SetLoopBodyStSlot(SymID symID, bool isCatchObjectSym);
  261. bool IsLoopBodyOuterOffset(uint offset) const;
  262. bool IsLoopBodyReturnIPInstr(IR::Instr * instr) const;
  263. IR::Opnd * InsertLoopBodyReturnIPInstr(uint targetOffset, uint offset);
  264. IR::Instr * CreateLoopBodyReturnIPInstr(uint targetOffset, uint offset);
  265. StackSym * EnsureStackFuncPtrSym();
  266. void InsertBailOutForDebugger(uint offset, IR::BailOutKind kind, IR::Instr* insertBeforeInstr = nullptr);
  267. void InsertBailOnNoProfile(uint offset);
  268. void InsertBailOnNoProfile(IR::Instr *const insertBeforeInstr);
  269. bool DoBailOnNoProfile();
  270. void InsertIncrLoopBodyLoopCounter(IR::LabelInstr *loopTopLabelInstr);
  271. void InsertInitLoopBodyLoopCounter(uint loopNum);
  272. void InsertDoneLoopBodyLoopCounter(uint32 lastOffset);
  273. IR::RegOpnd * InsertConvPrimStr(IR::RegOpnd * srcOpnd, uint offset, bool forcePreOpBailOutIfNeeded);
  274. #ifdef BAILOUT_INJECTION
  275. void InjectBailOut(uint offset);
  276. void CheckBailOutInjection(Js::OpCode opcode);
  277. bool seenLdStackArgPtr;
  278. bool expectApplyArg;
  279. bool seenProfiledBeginSwitch;
  280. #endif
  281. JitArenaAllocator * m_tempAlloc;
  282. JitArenaAllocator * m_funcAlloc;
  283. Func * m_func;
  284. IR::Instr * m_lastInstr;
  285. IR::Instr ** m_offsetToInstruction;
  286. uint32 m_functionStartOffset;
  287. Js::ByteCodeReader m_jnReader;
  288. Js::StatementReader<Js::FunctionBody::ArenaStatementMapList> m_statementReader;
  289. SList<IR::Instr *> *m_argStack;
  290. SList<BranchReloc*> *branchRelocList;
  291. SList<uint> *catchOffsetStack;
  292. SymID * tempMap;
  293. BVFixed * fbvTempUsed;
  294. Js::RegSlot firstTemp;
  295. IRBuilderSwitchAdapter m_switchAdapter;
  296. SwitchIRBuilder m_switchBuilder;
  297. BVFixed * m_ldSlots;
  298. BVFixed * m_stSlots;
  299. #if DBG
  300. BVFixed * m_usedAsTemp;
  301. #endif
  302. StackSym * m_loopBodyRetIPSym;
  303. StackSym* m_loopCounterSym;
  304. StackSym * m_stackFuncPtrSym;
  305. bool callTreeHasSomeProfileInfo;
  306. // Keep track of how many args we have on the stack whenever
  307. // we make a call so that the max stack used over all calls can be
  308. // used to estimate how much stack we should probe for at the
  309. // beginning of a JITted function.
  310. #if DBG
  311. uint32 m_offsetToInstructionCount;
  312. uint32 m_callsOnStack;
  313. #endif
  314. uint32 m_argsOnStack;
  315. Js::PropertyId m_loopBodyLocalsStartSlot;
  316. IR::Opnd** m_saveLoopImplicitCallFlags;
  317. IR::RegOpnd * m_loopBodyForInEnumeratorArrayOpnd;
  318. #ifdef BYTECODE_BRANCH_ISLAND
  319. typedef JsUtil::BaseDictionary<uint32, uint32, JitArenaAllocator> LongBranchMap;
  320. LongBranchMap * longBranchMap;
  321. static IR::Instr * const VirtualLongBranchInstr;
  322. #endif
  323. };