Func.h 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. struct CodeGenWorkItem;
  7. class Lowerer;
  8. class Inline;
  9. class FlowGraph;
  10. #if defined(_M_ARM32_OR_ARM64)
  11. #include "UnwindInfoManager.h"
  12. #endif
  13. struct Cloner
  14. {
  15. Cloner(Lowerer *lowerer, JitArenaAllocator *alloc) :
  16. alloc(alloc),
  17. symMap(nullptr),
  18. labelMap(nullptr),
  19. lowerer(lowerer),
  20. instrFirst(nullptr),
  21. instrLast(nullptr),
  22. fRetargetClonedBranch(FALSE)
  23. {
  24. }
  25. ~Cloner()
  26. {
  27. if (symMap)
  28. {
  29. Adelete(alloc, symMap);
  30. }
  31. if (labelMap)
  32. {
  33. Adelete(alloc, labelMap);
  34. }
  35. }
  36. void AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone);
  37. void Finish();
  38. void RetargetClonedBranches();
  39. HashTable<StackSym*> *symMap;
  40. HashTable<IR::LabelInstr*> *labelMap;
  41. Lowerer * lowerer;
  42. IR::Instr * instrFirst;
  43. IR::Instr * instrLast;
  44. BOOL fRetargetClonedBranch;
  45. JitArenaAllocator *alloc;
  46. bool clonedInstrGetOrigArgSlotSym;
  47. };
  48. typedef JsUtil::Pair<uint32, IR::LabelInstr*> YieldOffsetResumeLabel;
  49. typedef JsUtil::List<YieldOffsetResumeLabel, JitArenaAllocator> YieldOffsetResumeLabelList;
  50. typedef HashTable<uint32, JitArenaAllocator> SlotArrayCheckTable;
  51. struct FrameDisplayCheckRecord
  52. {
  53. SlotArrayCheckTable *table;
  54. uint32 slotId;
  55. FrameDisplayCheckRecord() : table(nullptr), slotId((uint32)-1) {}
  56. };
  57. typedef HashTable<FrameDisplayCheckRecord*, JitArenaAllocator> FrameDisplayCheckTable;
  58. class Func
  59. {
  60. public:
  61. Func(JitArenaAllocator *alloc, CodeGenWorkItem* workItem, const Js::FunctionCodeGenRuntimeData *const runtimeData,
  62. Js::PolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, CodeGenAllocators *const codeGenAllocators,
  63. CodeGenNumberAllocator * numberAllocator, Js::ReadOnlyDynamicProfileInfo *const profileInfo,
  64. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc = nullptr,
  65. uint postCallByteCodeOffset = Js::Constants::NoByteCodeOffset,
  66. Js::RegSlot returnValueRegSlot = Js::Constants::NoRegister, const bool isInlinedConstructor = false,
  67. Js::ProfileId callSiteIdInParentFunc = UINT16_MAX, bool isGetterSetter = false);
  68. public:
  69. ArenaAllocator *GetCodeGenAllocator() const { return &this->m_codeGenAllocators->allocator; }
  70. CodeGenAllocators * const GetCodeGenAllocators()
  71. {
  72. return this->GetTopFunc()->m_codeGenAllocators;
  73. }
  74. NativeCodeData::Allocator *GetNativeCodeDataAllocator()
  75. {
  76. return &this->GetTopFunc()->nativeCodeDataAllocator;
  77. }
  78. NativeCodeData::Allocator *GetTransferDataAllocator()
  79. {
  80. return &this->GetTopFunc()->transferDataAllocator;
  81. }
  82. CodeGenNumberAllocator * GetNumberAllocator()
  83. {
  84. return this->numberAllocator;
  85. }
  86. EmitBufferManager<CriticalSection> *GetEmitBufferManager() const
  87. {
  88. return &this->m_codeGenAllocators->emitBufferManager;
  89. }
  90. Js::ScriptContextProfiler *GetCodeGenProfiler() const
  91. {
  92. #ifdef PROFILE_EXEC
  93. return m_codeGenProfiler;
  94. #else
  95. return nullptr;
  96. #endif
  97. }
  98. void InitLocalClosureSyms();
  99. bool HasAnyStackNestedFunc() const { return this->hasAnyStackNestedFunc; }
  100. bool DoStackNestedFunc() const { return this->stackNestedFunc; }
  101. bool DoStackFrameDisplay() const { return this->stackClosure; }
  102. bool DoStackScopeSlots() const { return this->stackClosure; }
  103. bool IsBackgroundJIT() const { return this->m_isBackgroundJIT; }
  104. bool HasArgumentSlot() const { return this->GetInParamsCount() != 0 && !this->IsLoopBody(); }
  105. bool IsLoopBody() const;
  106. bool IsLoopBodyInTry() const;
  107. bool CanAllocInPreReservedHeapPageSegment();
  108. void SetDoFastPaths();
  109. bool DoFastPaths() const { Assert(this->hasCalledSetDoFastPaths); return this->m_doFastPaths; }
  110. bool DoLoopFastPaths() const
  111. {
  112. return
  113. (!IsSimpleJit() || Js::FunctionBody::IsNewSimpleJit()) &&
  114. !PHASE_OFF(Js::FastPathPhase, this) &&
  115. !PHASE_OFF(Js::LoopFastPathPhase, this);
  116. }
  117. bool DoGlobOpt() const
  118. {
  119. return
  120. !PHASE_OFF(Js::GlobOptPhase, this->GetJnFunction()) && !IsSimpleJit() &&
  121. (!GetTopFunc()->HasTry() || GetTopFunc()->CanOptimizeTryCatch());
  122. }
  123. bool DoInline() const
  124. {
  125. return DoGlobOpt() && !GetTopFunc()->HasTry();
  126. }
  127. bool DoOptimizeTryCatch() const
  128. {
  129. Assert(IsTopFunc());
  130. return DoGlobOpt();
  131. }
  132. bool CanOptimizeTryCatch() const
  133. {
  134. return !this->HasFinally() && !this->IsLoopBody() && !PHASE_OFF(Js::OptimizeTryCatchPhase, this);
  135. }
  136. bool DoSimpleJitDynamicProfile() const { return IsSimpleJit() && GetTopFunc()->GetJnFunction()->DoSimpleJitDynamicProfile(); }
  137. bool IsSimpleJit() const { return m_workItem->GetJitMode() == ExecutionMode::SimpleJit; }
  138. void BuildIR();
  139. void Codegen();
  140. void ThrowIfScriptClosed();
  141. int32 StackAllocate(int size);
  142. int32 StackAllocate(StackSym *stackSym, int size);
  143. void SetArgOffset(StackSym *stackSym, int32 offset);
  144. int32 GetLocalVarSlotOffset(int32 slotId);
  145. int32 GetHasLocalVarChangedOffset();
  146. bool IsJitInDebugMode();
  147. bool IsNonTempLocalVar(uint32 slotIndex);
  148. int32 AdjustOffsetValue(int32 offset);
  149. void OnAddSym(Sym* sym);
  150. #ifdef MD_GROW_LOCALS_AREA_UP
  151. void AjustLocalVarSlotOffset();
  152. #endif
  153. bool DoGlobOptsForGeneratorFunc();
  154. static inline uint32 GetDiagLocalSlotSize()
  155. {
  156. // For the debug purpose we will have fixed stack slot size
  157. // We will allocated the 8 bytes for each variable.
  158. return MachDouble;
  159. }
  160. #ifdef DBG
  161. // The pattern used to pre-fill locals for CHK builds.
  162. // When we restore bailout values we check for this pattern, this is how we assert for non-initialized variables/garbage.
  163. static const uint32 c_debugFillPattern4 = 0xcececece;
  164. static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
  165. #if defined(_M_IX86) || defined (_M_ARM)
  166. static const uint32 c_debugFillPattern = c_debugFillPattern4;
  167. #elif defined(_M_X64) || defined(_M_ARM64)
  168. static const unsigned __int64 c_debugFillPattern = c_debugFillPattern8;
  169. #else
  170. #error unsupported platform
  171. #endif
  172. #endif
  173. uint32 GetInstrCount();
  174. inline Js::ScriptContext* GetScriptContext() const { return m_workItem->GetScriptContext(); }
  175. void NumberInstrs();
  176. bool IsTopFunc() const { return this->parentFunc == nullptr; }
  177. Func const * GetTopFunc() const;
  178. Func * GetTopFunc();
  179. void SetFirstArgOffset(IR::Instr* inlineeStart);
  180. uint GetFunctionNumber() const
  181. {
  182. Assert(this->IsTopFunc());
  183. return this->m_workItem->GetFunctionNumber();
  184. }
  185. uint GetLocalFunctionId() const
  186. {
  187. return this->m_workItem->GetFunctionBody()->GetLocalFunctionId();
  188. }
  189. uint GetSourceContextId() const
  190. {
  191. return this->m_workItem->GetFunctionBody()->GetSourceContextId();
  192. }
  193. BOOL HasTry() const
  194. {
  195. Assert(this->IsTopFunc());
  196. Assert(this->m_jnFunction); // For now we always have a function body
  197. return this->m_jnFunction->GetHasTry();
  198. }
  199. bool HasFinally() const
  200. {
  201. Assert(this->IsTopFunc());
  202. Assert(this->m_jnFunction); // For now we always have a function body
  203. return this->m_jnFunction->GetHasFinally();
  204. }
  205. Js::ArgSlot GetInParamsCount() const
  206. {
  207. Assert(this->IsTopFunc());
  208. Assert(this->m_jnFunction); // For now we always have a function body
  209. return this->m_jnFunction->GetInParamsCount();
  210. }
  211. bool IsGlobalFunc() const
  212. {
  213. Assert(this->IsTopFunc());
  214. Assert(this->m_jnFunction); // For now we always have a function body
  215. return this->m_jnFunction->GetIsGlobalFunc();
  216. }
  217. RecyclerWeakReference<Js::FunctionBody> *GetWeakFuncRef() const;
  218. Js::FunctionBody * GetJnFunction() const { return m_jnFunction; }
  219. StackSym *EnsureLoopParamSym();
  220. StackSym *GetFuncObjSym() const { return m_funcObjSym; }
  221. void SetFuncObjSym(StackSym *sym) { m_funcObjSym = sym; }
  222. StackSym *GetJavascriptLibrarySym() const { return m_javascriptLibrarySym; }
  223. void SetJavascriptLibrarySym(StackSym *sym) { m_javascriptLibrarySym = sym; }
  224. StackSym *GetScriptContextSym() const { return m_scriptContextSym; }
  225. void SetScriptContextSym(StackSym *sym) { m_scriptContextSym = sym; }
  226. StackSym *GetFunctionBodySym() const { return m_functionBodySym; }
  227. void SetFunctionBodySym(StackSym *sym) { m_functionBodySym = sym; }
  228. StackSym *GetLocalClosureSym() const { return m_localClosureSym; }
  229. void SetLocalClosureSym(StackSym *sym) { m_localClosureSym = sym; }
  230. StackSym *GetLocalFrameDisplaySym() const { return m_localFrameDisplaySym; }
  231. void SetLocalFrameDisplaySym(StackSym *sym) { m_localFrameDisplaySym = sym; }
  232. uint8 *GetCallsCountAddress() const;
  233. void EnsurePinnedTypeRefs();
  234. void PinTypeRef(void* typeRef);
  235. void EnsureSingleTypeGuards();
  236. Js::JitTypePropertyGuard* GetOrCreateSingleTypeGuard(Js::Type* type);
  237. void EnsureEquivalentTypeGuards();
  238. Js::JitEquivalentTypeGuard * CreateEquivalentTypeGuard(Js::Type* type, uint32 objTypeSpecFldId);
  239. void EnsurePropertyGuardsByPropertyId();
  240. void EnsureCtorCachesByPropertyId();
  241. void LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard);
  242. void LinkCtorCacheToPropertyId(Js::PropertyId propertyId, Js::JitTimeConstructorCache* cache);
  243. Js::JitTimeConstructorCache* GetConstructorCache(const Js::ProfileId profiledCallSiteId);
  244. void SetConstructorCache(const Js::ProfileId profiledCallSiteId, Js::JitTimeConstructorCache* constructorCache);
  245. void EnsurePropertiesWrittenTo();
  246. void EnsureCallSiteToArgumentsOffsetFixupMap();
  247. IR::LabelInstr * EnsureFuncStartLabel();
  248. IR::LabelInstr * GetFuncStartLabel();
  249. IR::LabelInstr * EnsureFuncEndLabel();
  250. IR::LabelInstr * GetFuncEndLabel();
  251. #ifdef _M_X64
  252. void SetSpillSize(int32 spillSize)
  253. {
  254. m_spillSize = spillSize;
  255. }
  256. int32 GetSpillSize()
  257. {
  258. return m_spillSize;
  259. }
  260. void SetArgsSize(int32 argsSize)
  261. {
  262. m_argsSize = argsSize;
  263. }
  264. int32 GetArgsSize()
  265. {
  266. return m_argsSize;
  267. }
  268. void SetSavedRegSize(int32 savedRegSize)
  269. {
  270. m_savedRegSize = savedRegSize;
  271. }
  272. int32 GetSavedRegSize()
  273. {
  274. return m_savedRegSize;
  275. }
  276. #endif
  277. bool IsInlinee() const
  278. {
  279. Assert(m_inlineeFrameStartSym ? (m_inlineeFrameStartSym->m_offset != -1) : true);
  280. return m_inlineeFrameStartSym != nullptr;
  281. }
  282. void SetInlineeFrameStartSym(StackSym *sym)
  283. {
  284. Assert(m_inlineeFrameStartSym == nullptr);
  285. m_inlineeFrameStartSym = sym;
  286. }
  287. IR::SymOpnd *GetInlineeArgCountSlotOpnd()
  288. {
  289. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_Argc * MachPtr);
  290. }
  291. IR::SymOpnd *GetNextInlineeFrameArgCountSlotOpnd()
  292. {
  293. Assert(!this->m_hasInlineArgsOpt);
  294. return GetInlineeOpndAtOffset((Js::Constants::InlineeMetaArgCount + actualCount) * MachPtr);
  295. }
  296. IR::SymOpnd *GetInlineeFunctionObjectSlotOpnd()
  297. {
  298. Assert(!this->m_hasInlineArgsOpt);
  299. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_FunctionObject * MachPtr);
  300. }
  301. IR::SymOpnd *GetInlineeArgumentsObjectSlotOpnd()
  302. {
  303. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_ArgumentsObject * MachPtr);
  304. }
  305. IR::SymOpnd *GetInlineeArgvSlotOpnd()
  306. {
  307. Assert(!this->m_hasInlineArgsOpt);
  308. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_Argv * MachPtr);
  309. }
  310. bool IsInlined() const
  311. {
  312. return this->parentFunc != nullptr;
  313. }
  314. bool IsInlinedConstructor() const
  315. {
  316. return this->isInlinedConstructor;
  317. }
  318. bool IsTJLoopBody()const {
  319. return this->isTJLoopBody;
  320. }
  321. Js::ObjTypeSpecFldInfo* GetObjTypeSpecFldInfo(const uint index) const;
  322. Js::ObjTypeSpecFldInfo* GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const;
  323. void SetGlobalObjTypeSpecFldInfo(uint propertyInfoId, Js::ObjTypeSpecFldInfo* info);
  324. // Gets an inline cache pointer to use in jitted code. Cached data may not be stable while jitting. Does not return null.
  325. Js::InlineCache *GetRuntimeInlineCache(const uint index) const;
  326. Js::PolymorphicInlineCache * GetRuntimePolymorphicInlineCache(const uint index) const;
  327. byte GetPolyCacheUtil(const uint index) const;
  328. byte GetPolyCacheUtilToInitialize(const uint index) const;
  329. #if defined(_M_ARM32_OR_ARM64)
  330. RegNum GetLocalsPointer() const;
  331. #endif
  332. #if DBG_DUMP
  333. void Dump(IRDumpFlags flags);
  334. void Dump();
  335. void DumpHeader();
  336. #endif
  337. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  338. LPCSTR GetVtableName(INT_PTR address);
  339. #endif
  340. #if DBG_DUMP | defined(VTUNE_PROFILING)
  341. bool DoRecordNativeMap() const;
  342. #endif
  343. public:
  344. JitArenaAllocator * m_alloc;
  345. CodeGenWorkItem* m_workItem;
  346. const Js::FunctionCodeGenJitTimeData *const m_jitTimeData;
  347. const Js::FunctionCodeGenRuntimeData *const m_runtimeData;
  348. Js::PolymorphicInlineCacheInfo *const m_polymorphicInlineCacheInfo;
  349. // This indicates how many constructor caches we inserted into the constructorCaches array, not the total size of the array.
  350. uint constructorCacheCount;
  351. // This array maps callsite ids to constructor caches. The size corresponds to the number of callsites in the function.
  352. Js::JitTimeConstructorCache** constructorCaches;
  353. typedef JsUtil::BaseHashSet<void*, JitArenaAllocator, PowerOf2SizePolicy> TypeRefSet;
  354. TypeRefSet* pinnedTypeRefs;
  355. typedef JsUtil::BaseDictionary<Js::Type*, Js::JitTypePropertyGuard*, JitArenaAllocator, PowerOf2SizePolicy> TypePropertyGuardDictionary;
  356. TypePropertyGuardDictionary* singleTypeGuards;
  357. typedef SListCounted<Js::JitEquivalentTypeGuard*> EquivalentTypeGuardList;
  358. EquivalentTypeGuardList* equivalentTypeGuards;
  359. typedef JsUtil::BaseHashSet<Js::JitIndexedPropertyGuard*, JitArenaAllocator, PowerOf2SizePolicy> IndexedPropertyGuardSet;
  360. typedef JsUtil::BaseDictionary<Js::PropertyId, IndexedPropertyGuardSet*, JitArenaAllocator, PowerOf2SizePolicy> PropertyGuardByPropertyIdMap;
  361. PropertyGuardByPropertyIdMap* propertyGuardsByPropertyId;
  362. typedef JsUtil::BaseHashSet<Js::ConstructorCache*, JitArenaAllocator, PowerOf2SizePolicy> CtorCacheSet;
  363. typedef JsUtil::BaseDictionary<Js::PropertyId, CtorCacheSet*, JitArenaAllocator, PowerOf2SizePolicy> CtorCachesByPropertyIdMap;
  364. CtorCachesByPropertyIdMap* ctorCachesByPropertyId;
  365. typedef JsUtil::BaseDictionary<Js::ProfileId, int32, JitArenaAllocator, PrimeSizePolicy> CallSiteToArgumentsOffsetFixupMap;
  366. CallSiteToArgumentsOffsetFixupMap* callSiteToArgumentsOffsetFixupMap;
  367. int indexedPropertyGuardCount;
  368. typedef JsUtil::BaseHashSet<Js::PropertyId, JitArenaAllocator> PropertyIdSet;
  369. PropertyIdSet* propertiesWrittenTo;
  370. PropertyIdSet lazyBailoutProperties;
  371. bool anyPropertyMayBeWrittenTo;
  372. SlotArrayCheckTable *slotArrayCheckTable;
  373. FrameDisplayCheckTable *frameDisplayCheckTable;
  374. IR::Instr * m_headInstr;
  375. IR::Instr * m_exitInstr;
  376. IR::Instr * m_tailInstr;
  377. #ifdef _M_X64
  378. int32 m_spillSize;
  379. int32 m_argsSize;
  380. int32 m_savedRegSize;
  381. PrologEncoder m_prologEncoder;
  382. #endif
  383. SymTable * m_symTable;
  384. StackSym * m_loopParamSym;
  385. StackSym * m_funcObjSym;
  386. StackSym * m_javascriptLibrarySym;
  387. StackSym * m_scriptContextSym;
  388. StackSym * m_functionBodySym;
  389. StackSym * m_localClosureSym;
  390. StackSym * m_localFrameDisplaySym;
  391. StackSym * m_bailoutReturnValueSym;
  392. StackSym * m_hasBailedOutSym;
  393. int32 m_localStackHeight;
  394. uint frameSize;
  395. uint32 inlineDepth;
  396. uint32 postCallByteCodeOffset;
  397. Js::RegSlot returnValueRegSlot;
  398. Js::ArgSlot actualCount;
  399. int32 firstActualStackOffset;
  400. uint32 tryCatchNestingLevel;
  401. uint32 m_totalJumpTableSizeInBytesForSwitchStatements;
  402. #if defined(_M_ARM32_OR_ARM64)
  403. //Offset to arguments from sp + m_localStackHeight;
  404. //For non leaf functions this is (callee saved register count + LR + R11) * MachRegInt
  405. //For leaf functions this is (saved registers) * MachRegInt
  406. int32 m_ArgumentsOffset;
  407. UnwindInfoManager m_unwindInfo;
  408. IR::LabelInstr * m_epilogLabel;
  409. #endif
  410. IR::LabelInstr * m_funcStartLabel;
  411. IR::LabelInstr * m_funcEndLabel;
  412. // Keep track of the maximum number of args on the stack.
  413. uint32 m_argSlotsForFunctionsCalled;
  414. #if DBG
  415. uint32 m_callSiteCount;
  416. #endif
  417. FlowGraph * m_fg;
  418. unsigned int m_labelCount;
  419. BitVector m_regsUsed;
  420. StackSym * tempSymDouble;
  421. uint32 loopCount;
  422. Js::ProfileId callSiteIdInParentFunc;
  423. bool m_isLeaf: 1; // This is set in the IRBuilder and might be inaccurate after inlining
  424. bool m_hasCalls: 1; // This is more accurate compared to m_isLeaf
  425. bool m_hasInlineArgsOpt : 1;
  426. bool m_doFastPaths : 1;
  427. bool hasBailout: 1;
  428. bool hasBailoutInEHRegion : 1;
  429. bool hasStackArgs: 1;
  430. bool hasArgumentObject : 1;
  431. bool hasUnoptimizedArgumentsAcccess : 1; // True if there are any arguments access beyond the simple case of this.apply pattern
  432. bool m_canDoInlineArgsOpt : 1;
  433. bool hasApplyTargetInlining:1;
  434. bool isGetterSetter : 1;
  435. const bool isInlinedConstructor: 1;
  436. bool hasImplicitCalls: 1;
  437. bool hasTempObjectProducingInstr:1; // At least one instruction which can produce temp object
  438. bool isTJLoopBody : 1;
  439. bool isFlowGraphValid : 1;
  440. #if DBG
  441. bool hasCalledSetDoFastPaths:1;
  442. bool isPostLower:1;
  443. bool isPostRegAlloc:1;
  444. bool isPostPeeps:1;
  445. bool isPostLayout:1;
  446. bool isPostFinalLower:1;
  447. typedef JsUtil::Stack<Js::Phase> CurrentPhasesStack;
  448. CurrentPhasesStack currentPhases;
  449. bool IsInPhase(Js::Phase tag);
  450. #endif
  451. void BeginPhase(Js::Phase tag);
  452. void EndPhase(Js::Phase tag, bool dump = true);
  453. void EndProfiler(Js::Phase tag);
  454. void BeginClone(Lowerer *lowerer, JitArenaAllocator *alloc);
  455. void EndClone();
  456. Cloner * GetCloner() const { return GetTopFunc()->m_cloner; }
  457. InstrMap * GetCloneMap() const { return GetTopFunc()->m_cloneMap; }
  458. void ClearCloneMap() { Assert(this->IsTopFunc()); this->m_cloneMap = nullptr; }
  459. bool HasByteCodeOffset() const { return !this->GetTopFunc()->hasInstrNumber; }
  460. bool DoMaintainByteCodeOffset() const { return this->HasByteCodeOffset() && this->GetTopFunc()->maintainByteCodeOffset; }
  461. void StopMaintainByteCodeOffset() { this->GetTopFunc()->maintainByteCodeOffset = false; }
  462. Func * GetParentFunc() const { return parentFunc; }
  463. uint GetMaxInlineeArgOutCount() const { return maxInlineeArgOutCount; }
  464. void UpdateMaxInlineeArgOutCount(uint inlineeArgOutCount);
  465. #if DBG_DUMP
  466. ptrdiff_t m_codeSize;
  467. #endif
  468. bool GetHasCalls() const { return this->m_hasCalls; }
  469. void SetHasCalls() { this->m_hasCalls = true; }
  470. void SetHasCallsOnSelfAndParents()
  471. {
  472. Func *curFunc = this;
  473. while (curFunc)
  474. {
  475. curFunc->SetHasCalls();
  476. curFunc = curFunc->GetParentFunc();
  477. }
  478. }
  479. void SetHasInstrNumber(bool has) { this->GetTopFunc()->hasInstrNumber = has; }
  480. bool HasInstrNumber() const { return this->GetTopFunc()->hasInstrNumber; }
  481. bool HasInlinee() const { Assert(this->IsTopFunc()); return this->hasInlinee; }
  482. void SetHasInlinee() { Assert(this->IsTopFunc()); this->hasInlinee = true; }
  483. bool GetThisOrParentInlinerHasArguments() const { return thisOrParentInlinerHasArguments; }
  484. bool GetHasStackArgs() const { return this->hasStackArgs;}
  485. void SetHasStackArgs(bool has) { this->hasStackArgs = has;}
  486. bool GetHasArgumentObject() const { return this->hasArgumentObject;}
  487. void SetHasArgumentObject() { this->hasArgumentObject = true;}
  488. bool GetHasUnoptimizedArgumentsAcccess() const { return this->hasUnoptimizedArgumentsAcccess; }
  489. void SetHasUnoptimizedArgumentsAccess(bool args)
  490. {
  491. // Once set to 'true' make sure this does not become false
  492. if (!this->hasUnoptimizedArgumentsAcccess)
  493. {
  494. this->hasUnoptimizedArgumentsAcccess = args;
  495. }
  496. if (args)
  497. {
  498. Func *curFunc = this->GetParentFunc();
  499. while (curFunc)
  500. {
  501. curFunc->hasUnoptimizedArgumentsAcccess = args;
  502. curFunc = curFunc->GetParentFunc();
  503. }
  504. }
  505. }
  506. void DisableCanDoInlineArgOpt()
  507. {
  508. Func* curFunc = this;
  509. while (curFunc)
  510. {
  511. curFunc->m_canDoInlineArgsOpt = false;
  512. curFunc->m_hasInlineArgsOpt = false;
  513. curFunc = curFunc->GetParentFunc();
  514. }
  515. }
  516. bool GetHasApplyTargetInlining() const { return this->hasApplyTargetInlining;}
  517. void SetHasApplyTargetInlining() { this->hasApplyTargetInlining = true;}
  518. bool GetHasMarkTempObjects() const { return this->hasMarkTempObjects; }
  519. void SetHasMarkTempObjects() { this->hasMarkTempObjects = true; }
  520. bool GetHasImplicitCalls() const { return this->hasImplicitCalls;}
  521. void SetHasImplicitCalls(bool has) { this->hasImplicitCalls = has;}
  522. void SetHasImplicitCallsOnSelfAndParents()
  523. {
  524. this->SetHasImplicitCalls(true);
  525. Func *curFunc = this->GetParentFunc();
  526. while (curFunc && !curFunc->IsTopFunc())
  527. {
  528. curFunc->SetHasImplicitCalls(true);
  529. curFunc = curFunc->GetParentFunc();
  530. }
  531. }
  532. bool GetHasTempObjectProducingInstr() const { return this->hasTempObjectProducingInstr; }
  533. void SetHasTempObjectProducingInstr(bool has) { this->hasTempObjectProducingInstr = has; }
  534. Js::ReadOnlyDynamicProfileInfo * GetProfileInfo() const { return this->profileInfo; }
  535. bool HasProfileInfo() { return this->profileInfo->HasProfileInfo(); }
  536. bool HasArrayInfo()
  537. {
  538. const auto top = this->GetTopFunc();
  539. return this->HasProfileInfo() && this->GetWeakFuncRef() && !(top->HasTry() && !top->DoOptimizeTryCatch()) &&
  540. top->DoGlobOpt() && !PHASE_OFF(Js::LoopFastPathPhase, top);
  541. }
  542. static Js::BuiltinFunction GetBuiltInIndex(IR::Opnd* opnd)
  543. {
  544. Assert(opnd);
  545. Js::BuiltinFunction index;
  546. if (opnd->IsRegOpnd())
  547. {
  548. index = opnd->AsRegOpnd()->m_sym->m_builtInIndex;
  549. }
  550. else if (opnd->IsSymOpnd())
  551. {
  552. PropertySym *propertySym = opnd->AsSymOpnd()->m_sym->AsPropertySym();
  553. index = Js::JavascriptLibrary::GetBuiltinFunctionForPropId(propertySym->m_propertyId);
  554. }
  555. else
  556. {
  557. index = Js::BuiltinFunction::None;
  558. }
  559. return index;
  560. }
  561. static bool IsBuiltInInlinedInLowerer(IR::Opnd* opnd)
  562. {
  563. Assert(opnd);
  564. Js::BuiltinFunction index = Func::GetBuiltInIndex(opnd);
  565. switch (index)
  566. {
  567. case Js::BuiltinFunction::String_CharAt:
  568. case Js::BuiltinFunction::String_CharCodeAt:
  569. case Js::BuiltinFunction::String_CodePointAt:
  570. case Js::BuiltinFunction::Math_Abs:
  571. case Js::BuiltinFunction::Array_Push:
  572. case Js::BuiltinFunction::String_Replace:
  573. return true;
  574. default:
  575. return false;
  576. }
  577. }
  578. void AddYieldOffsetResumeLabel(uint32 offset, IR::LabelInstr* label)
  579. {
  580. m_yieldOffsetResumeLabelList->Add(YieldOffsetResumeLabel(offset, label));
  581. }
  582. template <typename Fn>
  583. void MapYieldOffsetResumeLabels(Fn fn)
  584. {
  585. m_yieldOffsetResumeLabelList->Map(fn);
  586. }
  587. template <typename Fn>
  588. bool MapUntilYieldOffsetResumeLabels(Fn fn)
  589. {
  590. return m_yieldOffsetResumeLabelList->MapUntil(fn);
  591. }
  592. void RemoveYieldOffsetResumeLabel(const YieldOffsetResumeLabel& yorl)
  593. {
  594. m_yieldOffsetResumeLabelList->Remove(yorl);
  595. }
  596. void RemoveDeadYieldOffsetResumeLabel(IR::LabelInstr* label)
  597. {
  598. uint32 offset;
  599. bool found = m_yieldOffsetResumeLabelList->MapUntil([&offset, &label](int i, YieldOffsetResumeLabel& yorl)
  600. {
  601. if (yorl.Second() == label)
  602. {
  603. offset = yorl.First();
  604. return true;
  605. }
  606. return false;
  607. });
  608. Assert(found);
  609. RemoveYieldOffsetResumeLabel(YieldOffsetResumeLabel(offset, label));
  610. AddYieldOffsetResumeLabel(offset, nullptr);
  611. }
  612. IR::Instr * GetFunctionEntryInsertionPoint();
  613. IR::IndirOpnd * GetConstantAddressIndirOpnd(void * address, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode);
  614. void MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv);
  615. void DisableConstandAddressLoadHoist() { canHoistConstantAddressLoad = false; }
  616. void AddSlotArrayCheck(IR::SymOpnd *fieldOpnd);
  617. void AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId = (uint32)-1);
  618. #if DBG
  619. bool allowRemoveBailOutArgInstr;
  620. #endif
  621. #if defined(_M_ARM32_OR_ARM64)
  622. int32 GetInlineeArgumentStackSize()
  623. {
  624. int32 count = this->GetMaxInlineeArgOutCount();
  625. if (count)
  626. {
  627. return ((count + 1) * MachPtr); // +1 for the dedicated zero out argc slot
  628. }
  629. return 0;
  630. }
  631. #endif
  632. public:
  633. BVSparse<JitArenaAllocator> * argObjSyms;
  634. BVSparse<JitArenaAllocator> * m_nonTempLocalVars; // Only populated in debug mode as part of IRBuilder. Used in GlobOpt and BackwardPass.
  635. InlineeFrameInfo* frameInfo;
  636. uint32 m_inlineeId;
  637. IR::LabelInstr * m_bailOutNoSaveLabel;
  638. private:
  639. #ifdef PROFILE_EXEC
  640. Js::ScriptContextProfiler *const m_codeGenProfiler;
  641. #endif
  642. Js::FunctionBody* m_jnFunction;
  643. Func * const parentFunc;
  644. StackSym * m_inlineeFrameStartSym;
  645. uint maxInlineeArgOutCount;
  646. const bool m_isBackgroundJIT;
  647. bool hasInstrNumber;
  648. bool maintainByteCodeOffset;
  649. bool hasInlinee;
  650. bool thisOrParentInlinerHasArguments;
  651. bool useRuntimeStats;
  652. bool stackNestedFunc;
  653. bool stackClosure;
  654. bool hasAnyStackNestedFunc;
  655. bool hasMarkTempObjects;
  656. Cloner * m_cloner;
  657. InstrMap * m_cloneMap;
  658. Js::ReadOnlyDynamicProfileInfo *const profileInfo;
  659. NativeCodeData::Allocator nativeCodeDataAllocator;
  660. NativeCodeData::Allocator transferDataAllocator;
  661. CodeGenNumberAllocator * numberAllocator;
  662. int32 m_localVarSlotsOffset;
  663. int32 m_hasLocalVarChangedOffset; // Offset on stack of 1 byte which indicates if any local var has changed.
  664. CodeGenAllocators *const m_codeGenAllocators;
  665. YieldOffsetResumeLabelList * m_yieldOffsetResumeLabelList;
  666. StackSym *CreateInlineeStackSym();
  667. IR::SymOpnd *GetInlineeOpndAtOffset(int32 offset);
  668. bool HasLocalVarSlotCreated() const { return m_localVarSlotsOffset != Js::Constants::InvalidOffset; }
  669. void EnsureLocalVarSlots();
  670. SList<IR::RegOpnd *> constantAddressRegOpnd;
  671. IR::Instr * lastConstantAddressRegLoadInstr;
  672. bool canHoistConstantAddressLoad;
  673. #if DBG
  674. VtableHashMap * vtableMap;
  675. #endif
  676. };
  677. class AutoCodeGenPhase
  678. {
  679. public:
  680. AutoCodeGenPhase(Func * func, Js::Phase phase) : func(func), phase(phase), dump(false), isPhaseComplete(false)
  681. {
  682. func->BeginPhase(phase);
  683. }
  684. ~AutoCodeGenPhase()
  685. {
  686. if(this->isPhaseComplete)
  687. {
  688. func->EndPhase(phase, dump);
  689. }
  690. else
  691. {
  692. //End the profiler tag
  693. func->EndProfiler(phase);
  694. }
  695. }
  696. void EndPhase(Func * func, Js::Phase phase, bool dump, bool isPhaseComplete)
  697. {
  698. Assert(this->func == func);
  699. Assert(this->phase == phase);
  700. this->dump = dump && (PHASE_DUMP(Js::SimpleJitPhase, func->GetJnFunction()) || !func->IsSimpleJit());
  701. this->isPhaseComplete = isPhaseComplete;
  702. }
  703. private:
  704. Func * func;
  705. Js::Phase phase;
  706. bool dump;
  707. bool isPhaseComplete;
  708. };
  709. #define BEGIN_CODEGEN_PHASE(func, phase) { AutoCodeGenPhase __autoCodeGen(func, phase);
  710. #define END_CODEGEN_PHASE(func, phase) __autoCodeGen.EndPhase(func, phase, true, true); }
  711. #define END_CODEGEN_PHASE_NO_DUMP(func, phase) __autoCodeGen.EndPhase(func, phase, false, true); }