Func.h 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. struct CodeGenWorkItem;
  7. class Lowerer;
  8. class Inline;
  9. class FlowGraph;
  10. #if defined(_M_ARM32_OR_ARM64)
  11. #include "UnwindInfoManager.h"
  12. #endif
  13. struct Cloner
  14. {
  15. Cloner(Lowerer *lowerer, JitArenaAllocator *alloc) :
  16. alloc(alloc),
  17. symMap(nullptr),
  18. labelMap(nullptr),
  19. lowerer(lowerer),
  20. instrFirst(nullptr),
  21. instrLast(nullptr),
  22. fRetargetClonedBranch(FALSE)
  23. {
  24. }
  25. ~Cloner()
  26. {
  27. if (symMap)
  28. {
  29. Adelete(alloc, symMap);
  30. }
  31. if (labelMap)
  32. {
  33. Adelete(alloc, labelMap);
  34. }
  35. }
  36. void AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone);
  37. void Finish();
  38. void RetargetClonedBranches();
  39. HashTable<StackSym*> *symMap;
  40. HashTable<IR::LabelInstr*> *labelMap;
  41. Lowerer * lowerer;
  42. IR::Instr * instrFirst;
  43. IR::Instr * instrLast;
  44. BOOL fRetargetClonedBranch;
  45. JitArenaAllocator *alloc;
  46. bool clonedInstrGetOrigArgSlotSym;
  47. };
  48. typedef JsUtil::Pair<uint32, IR::LabelInstr*> YieldOffsetResumeLabel;
  49. typedef JsUtil::List<YieldOffsetResumeLabel, JitArenaAllocator> YieldOffsetResumeLabelList;
  50. typedef HashTable<uint32, JitArenaAllocator> SlotArrayCheckTable;
  51. struct FrameDisplayCheckRecord
  52. {
  53. SlotArrayCheckTable *table;
  54. uint32 slotId;
  55. FrameDisplayCheckRecord() : table(nullptr), slotId((uint32)-1) {}
  56. };
  57. typedef HashTable<FrameDisplayCheckRecord*, JitArenaAllocator> FrameDisplayCheckTable;
  58. class Func
  59. {
  60. public:
  61. Func(JitArenaAllocator *alloc, CodeGenWorkItem* workItem, const Js::FunctionCodeGenRuntimeData *const runtimeData,
  62. Js::PolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, CodeGenAllocators *const codeGenAllocators,
  63. CodeGenNumberAllocator * numberAllocator, Js::ReadOnlyDynamicProfileInfo *const profileInfo,
  64. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc = nullptr,
  65. uint postCallByteCodeOffset = Js::Constants::NoByteCodeOffset,
  66. Js::RegSlot returnValueRegSlot = Js::Constants::NoRegister, const bool isInlinedConstructor = false,
  67. Js::ProfileId callSiteIdInParentFunc = UINT16_MAX, bool isGetterSetter = false);
  68. public:
  69. CodeGenAllocators * const GetCodeGenAllocators()
  70. {
  71. return this->GetTopFunc()->m_codeGenAllocators;
  72. }
  73. NativeCodeData::Allocator *GetNativeCodeDataAllocator()
  74. {
  75. return &this->GetTopFunc()->nativeCodeDataAllocator;
  76. }
  77. NativeCodeData::Allocator *GetTransferDataAllocator()
  78. {
  79. return &this->GetTopFunc()->transferDataAllocator;
  80. }
  81. CodeGenNumberAllocator * GetNumberAllocator()
  82. {
  83. return this->numberAllocator;
  84. }
  85. EmitBufferManager<CriticalSection> *GetEmitBufferManager() const
  86. {
  87. return &this->m_codeGenAllocators->emitBufferManager;
  88. }
  89. Js::ScriptContextProfiler *GetCodeGenProfiler() const
  90. {
  91. #ifdef PROFILE_EXEC
  92. return m_codeGenProfiler;
  93. #else
  94. return nullptr;
  95. #endif
  96. }
  97. void InitLocalClosureSyms();
  98. bool HasAnyStackNestedFunc() const { return this->hasAnyStackNestedFunc; }
  99. bool DoStackNestedFunc() const { return this->stackNestedFunc; }
  100. bool DoStackFrameDisplay() const { return this->stackClosure; }
  101. bool DoStackScopeSlots() const { return this->stackClosure; }
  102. bool IsBackgroundJIT() const { return this->m_isBackgroundJIT; }
  103. bool HasArgumentSlot() const { return this->GetInParamsCount() != 0 && !this->IsLoopBody(); }
  104. bool IsLoopBody() const;
  105. bool IsLoopBodyInTry() const;
  106. bool CanAllocInPreReservedHeapPageSegment();
  107. void SetDoFastPaths();
  108. bool DoFastPaths() const { Assert(this->hasCalledSetDoFastPaths); return this->m_doFastPaths; }
  109. bool DoLoopFastPaths() const
  110. {
  111. return
  112. (!IsSimpleJit() || Js::FunctionBody::IsNewSimpleJit()) &&
  113. !PHASE_OFF(Js::FastPathPhase, this) &&
  114. !PHASE_OFF(Js::LoopFastPathPhase, this);
  115. }
  116. bool DoGlobOpt() const
  117. {
  118. return
  119. !PHASE_OFF(Js::GlobOptPhase, this->GetJnFunction()) && !IsSimpleJit() &&
  120. (!GetTopFunc()->HasTry() || GetTopFunc()->CanOptimizeTryCatch());
  121. }
  122. bool DoInline() const
  123. {
  124. return DoGlobOpt() && !GetTopFunc()->HasTry();
  125. }
  126. bool DoOptimizeTryCatch() const
  127. {
  128. Assert(IsTopFunc());
  129. return DoGlobOpt();
  130. }
  131. bool CanOptimizeTryCatch() const
  132. {
  133. return !this->HasFinally() && !this->IsLoopBody() && !PHASE_OFF(Js::OptimizeTryCatchPhase, this);
  134. }
  135. bool DoSimpleJitDynamicProfile() const { return IsSimpleJit() && GetTopFunc()->GetJnFunction()->DoSimpleJitDynamicProfile(); }
  136. bool IsSimpleJit() const { return m_workItem->GetJitMode() == ExecutionMode::SimpleJit; }
  137. void BuildIR();
  138. void Codegen();
  139. void ThrowIfScriptClosed();
  140. int32 StackAllocate(int size);
  141. int32 StackAllocate(StackSym *stackSym, int size);
  142. void SetArgOffset(StackSym *stackSym, int32 offset);
  143. int32 GetLocalVarSlotOffset(int32 slotId);
  144. int32 GetHasLocalVarChangedOffset();
  145. bool IsJitInDebugMode();
  146. bool IsNonTempLocalVar(uint32 slotIndex);
  147. int32 AdjustOffsetValue(int32 offset);
  148. void OnAddSym(Sym* sym);
  149. #ifdef MD_GROW_LOCALS_AREA_UP
  150. void AjustLocalVarSlotOffset();
  151. #endif
  152. bool DoGlobOptsForGeneratorFunc();
  153. static inline uint32 GetDiagLocalSlotSize()
  154. {
  155. // For the debug purpose we will have fixed stack slot size
  156. // We will allocated the 8 bytes for each variable.
  157. return MachDouble;
  158. }
  159. #ifdef DBG
  160. // The pattern used to pre-fill locals for CHK builds.
  161. // When we restore bailout values we check for this pattern, this is how we assert for non-initialized variables/garbage.
  162. static const uint32 c_debugFillPattern4 = 0xcececece;
  163. static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
  164. #if defined(_M_IX86) || defined (_M_ARM)
  165. static const uint32 c_debugFillPattern = c_debugFillPattern4;
  166. #elif defined(_M_X64) || defined(_M_ARM64)
  167. static const unsigned __int64 c_debugFillPattern = c_debugFillPattern8;
  168. #else
  169. #error unsupported platform
  170. #endif
  171. #endif
  172. uint32 GetInstrCount();
  173. inline Js::ScriptContext* GetScriptContext() const { return m_workItem->GetScriptContext(); }
  174. void NumberInstrs();
  175. bool IsTopFunc() const { return this->parentFunc == nullptr; }
  176. Func const * GetTopFunc() const;
  177. Func * GetTopFunc();
  178. void SetFirstArgOffset(IR::Instr* inlineeStart);
  179. uint GetFunctionNumber() const
  180. {
  181. Assert(this->IsTopFunc());
  182. return this->m_workItem->GetFunctionNumber();
  183. }
  184. uint GetLocalFunctionId() const
  185. {
  186. return this->m_workItem->GetFunctionBody()->GetLocalFunctionId();
  187. }
  188. uint GetSourceContextId() const
  189. {
  190. return this->m_workItem->GetFunctionBody()->GetSourceContextId();
  191. }
  192. BOOL HasTry() const
  193. {
  194. Assert(this->IsTopFunc());
  195. Assert(this->m_jnFunction); // For now we always have a function body
  196. return this->m_jnFunction->GetHasTry();
  197. }
  198. bool HasFinally() const
  199. {
  200. Assert(this->IsTopFunc());
  201. Assert(this->m_jnFunction); // For now we always have a function body
  202. return this->m_jnFunction->GetHasFinally();
  203. }
  204. Js::ArgSlot GetInParamsCount() const
  205. {
  206. Assert(this->IsTopFunc());
  207. Assert(this->m_jnFunction); // For now we always have a function body
  208. return this->m_jnFunction->GetInParamsCount();
  209. }
  210. bool IsGlobalFunc() const
  211. {
  212. Assert(this->IsTopFunc());
  213. Assert(this->m_jnFunction); // For now we always have a function body
  214. return this->m_jnFunction->GetIsGlobalFunc();
  215. }
  216. RecyclerWeakReference<Js::FunctionBody> *GetWeakFuncRef() const;
  217. Js::FunctionBody * GetJnFunction() const { return m_jnFunction; }
  218. StackSym *EnsureLoopParamSym();
  219. StackSym *GetFuncObjSym() const { return m_funcObjSym; }
  220. void SetFuncObjSym(StackSym *sym) { m_funcObjSym = sym; }
  221. StackSym *GetJavascriptLibrarySym() const { return m_javascriptLibrarySym; }
  222. void SetJavascriptLibrarySym(StackSym *sym) { m_javascriptLibrarySym = sym; }
  223. StackSym *GetScriptContextSym() const { return m_scriptContextSym; }
  224. void SetScriptContextSym(StackSym *sym) { m_scriptContextSym = sym; }
  225. StackSym *GetFunctionBodySym() const { return m_functionBodySym; }
  226. void SetFunctionBodySym(StackSym *sym) { m_functionBodySym = sym; }
  227. StackSym *GetLocalClosureSym() const { return m_localClosureSym; }
  228. void SetLocalClosureSym(StackSym *sym) { m_localClosureSym = sym; }
  229. StackSym *GetLocalFrameDisplaySym() const { return m_localFrameDisplaySym; }
  230. void SetLocalFrameDisplaySym(StackSym *sym) { m_localFrameDisplaySym = sym; }
  231. uint8 *GetCallsCountAddress() const;
  232. void EnsurePinnedTypeRefs();
  233. void PinTypeRef(void* typeRef);
  234. void EnsureSingleTypeGuards();
  235. Js::JitTypePropertyGuard* GetOrCreateSingleTypeGuard(Js::Type* type);
  236. void EnsureEquivalentTypeGuards();
  237. Js::JitEquivalentTypeGuard * CreateEquivalentTypeGuard(Js::Type* type, uint32 objTypeSpecFldId);
  238. void EnsurePropertyGuardsByPropertyId();
  239. void EnsureCtorCachesByPropertyId();
  240. void LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard);
  241. void LinkCtorCacheToPropertyId(Js::PropertyId propertyId, Js::JitTimeConstructorCache* cache);
  242. Js::JitTimeConstructorCache* GetConstructorCache(const Js::ProfileId profiledCallSiteId);
  243. void SetConstructorCache(const Js::ProfileId profiledCallSiteId, Js::JitTimeConstructorCache* constructorCache);
  244. void EnsurePropertiesWrittenTo();
  245. void EnsureCallSiteToArgumentsOffsetFixupMap();
  246. IR::LabelInstr * EnsureFuncStartLabel();
  247. IR::LabelInstr * GetFuncStartLabel();
  248. IR::LabelInstr * EnsureFuncEndLabel();
  249. IR::LabelInstr * GetFuncEndLabel();
  250. #ifdef _M_X64
  251. void SetSpillSize(int32 spillSize)
  252. {
  253. m_spillSize = spillSize;
  254. }
  255. int32 GetSpillSize()
  256. {
  257. return m_spillSize;
  258. }
  259. void SetArgsSize(int32 argsSize)
  260. {
  261. m_argsSize = argsSize;
  262. }
  263. int32 GetArgsSize()
  264. {
  265. return m_argsSize;
  266. }
  267. void SetSavedRegSize(int32 savedRegSize)
  268. {
  269. m_savedRegSize = savedRegSize;
  270. }
  271. int32 GetSavedRegSize()
  272. {
  273. return m_savedRegSize;
  274. }
  275. #endif
  276. bool IsInlinee() const
  277. {
  278. Assert(m_inlineeFrameStartSym ? (m_inlineeFrameStartSym->m_offset != -1) : true);
  279. return m_inlineeFrameStartSym != nullptr;
  280. }
  281. void SetInlineeFrameStartSym(StackSym *sym)
  282. {
  283. Assert(m_inlineeFrameStartSym == nullptr);
  284. m_inlineeFrameStartSym = sym;
  285. }
  286. IR::SymOpnd *GetInlineeArgCountSlotOpnd()
  287. {
  288. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_Argc * MachPtr);
  289. }
  290. IR::SymOpnd *GetNextInlineeFrameArgCountSlotOpnd()
  291. {
  292. Assert(!this->m_hasInlineArgsOpt);
  293. return GetInlineeOpndAtOffset((Js::Constants::InlineeMetaArgCount + actualCount) * MachPtr);
  294. }
  295. IR::SymOpnd *GetInlineeFunctionObjectSlotOpnd()
  296. {
  297. Assert(!this->m_hasInlineArgsOpt);
  298. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_FunctionObject * MachPtr);
  299. }
  300. IR::SymOpnd *GetInlineeArgumentsObjectSlotOpnd()
  301. {
  302. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_ArgumentsObject * MachPtr);
  303. }
  304. IR::SymOpnd *GetInlineeArgvSlotOpnd()
  305. {
  306. Assert(!this->m_hasInlineArgsOpt);
  307. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_Argv * MachPtr);
  308. }
  309. bool IsInlined() const
  310. {
  311. return this->parentFunc != nullptr;
  312. }
  313. bool IsInlinedConstructor() const
  314. {
  315. return this->isInlinedConstructor;
  316. }
  317. bool IsTJLoopBody()const {
  318. return this->isTJLoopBody;
  319. }
  320. Js::ObjTypeSpecFldInfo* GetObjTypeSpecFldInfo(const uint index) const;
  321. Js::ObjTypeSpecFldInfo* GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const;
  322. void SetGlobalObjTypeSpecFldInfo(uint propertyInfoId, Js::ObjTypeSpecFldInfo* info);
  323. // Gets an inline cache pointer to use in jitted code. Cached data may not be stable while jitting. Does not return null.
  324. Js::InlineCache *GetRuntimeInlineCache(const uint index) const;
  325. Js::PolymorphicInlineCache * GetRuntimePolymorphicInlineCache(const uint index) const;
  326. byte GetPolyCacheUtil(const uint index) const;
  327. byte GetPolyCacheUtilToInitialize(const uint index) const;
  328. #if defined(_M_ARM32_OR_ARM64)
  329. RegNum GetLocalsPointer() const;
  330. #endif
  331. #if DBG_DUMP
  332. void Dump(IRDumpFlags flags);
  333. void Dump();
  334. void DumpHeader();
  335. #endif
  336. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  337. LPCSTR GetVtableName(INT_PTR address);
  338. #endif
  339. #if DBG_DUMP | defined(VTUNE_PROFILING)
  340. bool DoRecordNativeMap() const;
  341. #endif
  342. public:
  343. JitArenaAllocator * m_alloc;
  344. CodeGenWorkItem* m_workItem;
  345. const Js::FunctionCodeGenJitTimeData *const m_jitTimeData;
  346. const Js::FunctionCodeGenRuntimeData *const m_runtimeData;
  347. Js::PolymorphicInlineCacheInfo *const m_polymorphicInlineCacheInfo;
  348. // This indicates how many constructor caches we inserted into the constructorCaches array, not the total size of the array.
  349. uint constructorCacheCount;
  350. // This array maps callsite ids to constructor caches. The size corresponds to the number of callsites in the function.
  351. Js::JitTimeConstructorCache** constructorCaches;
  352. typedef JsUtil::BaseHashSet<void*, JitArenaAllocator, PowerOf2SizePolicy> TypeRefSet;
  353. TypeRefSet* pinnedTypeRefs;
  354. typedef JsUtil::BaseDictionary<Js::Type*, Js::JitTypePropertyGuard*, JitArenaAllocator, PowerOf2SizePolicy> TypePropertyGuardDictionary;
  355. TypePropertyGuardDictionary* singleTypeGuards;
  356. typedef SListCounted<Js::JitEquivalentTypeGuard*> EquivalentTypeGuardList;
  357. EquivalentTypeGuardList* equivalentTypeGuards;
  358. typedef JsUtil::BaseHashSet<Js::JitIndexedPropertyGuard*, JitArenaAllocator, PowerOf2SizePolicy> IndexedPropertyGuardSet;
  359. typedef JsUtil::BaseDictionary<Js::PropertyId, IndexedPropertyGuardSet*, JitArenaAllocator, PowerOf2SizePolicy> PropertyGuardByPropertyIdMap;
  360. PropertyGuardByPropertyIdMap* propertyGuardsByPropertyId;
  361. typedef JsUtil::BaseHashSet<Js::ConstructorCache*, JitArenaAllocator, PowerOf2SizePolicy> CtorCacheSet;
  362. typedef JsUtil::BaseDictionary<Js::PropertyId, CtorCacheSet*, JitArenaAllocator, PowerOf2SizePolicy> CtorCachesByPropertyIdMap;
  363. CtorCachesByPropertyIdMap* ctorCachesByPropertyId;
  364. typedef JsUtil::BaseDictionary<Js::ProfileId, int32, JitArenaAllocator, PrimeSizePolicy> CallSiteToArgumentsOffsetFixupMap;
  365. CallSiteToArgumentsOffsetFixupMap* callSiteToArgumentsOffsetFixupMap;
  366. int indexedPropertyGuardCount;
  367. typedef JsUtil::BaseHashSet<Js::PropertyId, JitArenaAllocator> PropertyIdSet;
  368. PropertyIdSet* propertiesWrittenTo;
  369. PropertyIdSet lazyBailoutProperties;
  370. bool anyPropertyMayBeWrittenTo;
  371. SlotArrayCheckTable *slotArrayCheckTable;
  372. FrameDisplayCheckTable *frameDisplayCheckTable;
  373. IR::Instr * m_headInstr;
  374. IR::Instr * m_exitInstr;
  375. IR::Instr * m_tailInstr;
  376. #ifdef _M_X64
  377. int32 m_spillSize;
  378. int32 m_argsSize;
  379. int32 m_savedRegSize;
  380. PrologEncoder m_prologEncoder;
  381. #endif
  382. SymTable * m_symTable;
  383. StackSym * m_loopParamSym;
  384. StackSym * m_funcObjSym;
  385. StackSym * m_javascriptLibrarySym;
  386. StackSym * m_scriptContextSym;
  387. StackSym * m_functionBodySym;
  388. StackSym * m_localClosureSym;
  389. StackSym * m_localFrameDisplaySym;
  390. StackSym * m_bailoutReturnValueSym;
  391. StackSym * m_hasBailedOutSym;
  392. int32 m_localStackHeight;
  393. uint frameSize;
  394. uint32 inlineDepth;
  395. uint32 postCallByteCodeOffset;
  396. Js::RegSlot returnValueRegSlot;
  397. Js::ArgSlot actualCount;
  398. int32 firstActualStackOffset;
  399. uint32 tryCatchNestingLevel;
  400. uint32 m_totalJumpTableSizeInBytesForSwitchStatements;
  401. #if defined(_M_ARM32_OR_ARM64)
  402. //Offset to arguments from sp + m_localStackHeight;
  403. //For non leaf functions this is (callee saved register count + LR + R11) * MachRegInt
  404. //For leaf functions this is (saved registers) * MachRegInt
  405. int32 m_ArgumentsOffset;
  406. UnwindInfoManager m_unwindInfo;
  407. IR::LabelInstr * m_epilogLabel;
  408. #endif
  409. IR::LabelInstr * m_funcStartLabel;
  410. IR::LabelInstr * m_funcEndLabel;
  411. // Keep track of the maximum number of args on the stack.
  412. uint32 m_argSlotsForFunctionsCalled;
  413. #if DBG
  414. uint32 m_callSiteCount;
  415. #endif
  416. FlowGraph * m_fg;
  417. unsigned int m_labelCount;
  418. BitVector m_regsUsed;
  419. StackSym * tempSymDouble;
  420. StackSym * tempSymBool;
  421. uint32 loopCount;
  422. Js::ProfileId callSiteIdInParentFunc;
  423. bool m_isLeaf: 1; // This is set in the IRBuilder and might be inaccurate after inlining
  424. bool m_hasCalls: 1; // This is more accurate compared to m_isLeaf
  425. bool m_hasInlineArgsOpt : 1;
  426. bool m_doFastPaths : 1;
  427. bool hasBailout: 1;
  428. bool hasBailoutInEHRegion : 1;
  429. bool hasStackArgs: 1;
  430. bool hasArgumentObject : 1;
  431. bool hasUnoptimizedArgumentsAcccess : 1; // True if there are any arguments access beyond the simple case of this.apply pattern
  432. bool m_canDoInlineArgsOpt : 1;
  433. bool hasApplyTargetInlining:1;
  434. bool isGetterSetter : 1;
  435. const bool isInlinedConstructor: 1;
  436. bool hasImplicitCalls: 1;
  437. bool hasTempObjectProducingInstr:1; // At least one instruction which can produce temp object
  438. bool isTJLoopBody : 1;
  439. bool isFlowGraphValid : 1;
  440. #if DBG
  441. bool hasCalledSetDoFastPaths:1;
  442. bool isPostLower:1;
  443. bool isPostRegAlloc:1;
  444. bool isPostPeeps:1;
  445. bool isPostLayout:1;
  446. bool isPostFinalLower:1;
  447. typedef JsUtil::Stack<Js::Phase> CurrentPhasesStack;
  448. CurrentPhasesStack currentPhases;
  449. bool IsInPhase(Js::Phase tag);
  450. #endif
  451. void BeginPhase(Js::Phase tag);
  452. void EndPhase(Js::Phase tag, bool dump = true);
  453. void EndProfiler(Js::Phase tag);
  454. void BeginClone(Lowerer *lowerer, JitArenaAllocator *alloc);
  455. void EndClone();
  456. Cloner * GetCloner() const { return GetTopFunc()->m_cloner; }
  457. InstrMap * GetCloneMap() const { return GetTopFunc()->m_cloneMap; }
  458. void ClearCloneMap() { Assert(this->IsTopFunc()); this->m_cloneMap = nullptr; }
  459. bool HasByteCodeOffset() const { return !this->GetTopFunc()->hasInstrNumber; }
  460. bool DoMaintainByteCodeOffset() const { return this->HasByteCodeOffset() && this->GetTopFunc()->maintainByteCodeOffset; }
  461. void StopMaintainByteCodeOffset() { this->GetTopFunc()->maintainByteCodeOffset = false; }
  462. Func * GetParentFunc() const { return parentFunc; }
  463. uint GetMaxInlineeArgOutCount() const { return maxInlineeArgOutCount; }
  464. void UpdateMaxInlineeArgOutCount(uint inlineeArgOutCount);
  465. #if DBG_DUMP
  466. ptrdiff_t m_codeSize;
  467. #endif
  468. bool GetHasCalls() const { return this->m_hasCalls; }
  469. void SetHasCalls() { this->m_hasCalls = true; }
  470. void SetHasCallsOnSelfAndParents()
  471. {
  472. Func *curFunc = this;
  473. while (curFunc)
  474. {
  475. curFunc->SetHasCalls();
  476. curFunc = curFunc->GetParentFunc();
  477. }
  478. }
  479. void SetHasInstrNumber(bool has) { this->GetTopFunc()->hasInstrNumber = has; }
  480. bool HasInstrNumber() const { return this->GetTopFunc()->hasInstrNumber; }
  481. bool HasInlinee() const { Assert(this->IsTopFunc()); return this->hasInlinee; }
  482. void SetHasInlinee() { Assert(this->IsTopFunc()); this->hasInlinee = true; }
  483. bool GetThisOrParentInlinerHasArguments() const { return thisOrParentInlinerHasArguments; }
  484. bool GetHasStackArgs() const { return this->hasStackArgs;}
  485. void SetHasStackArgs(bool has) { this->hasStackArgs = has;}
  486. bool GetHasArgumentObject() const { return this->hasArgumentObject;}
  487. void SetHasArgumentObject() { this->hasArgumentObject = true;}
  488. bool GetHasUnoptimizedArgumentsAcccess() const { return this->hasUnoptimizedArgumentsAcccess; }
  489. void SetHasUnoptimizedArgumentsAccess(bool args)
  490. {
  491. // Once set to 'true' make sure this does not become false
  492. if (!this->hasUnoptimizedArgumentsAcccess)
  493. {
  494. this->hasUnoptimizedArgumentsAcccess = args;
  495. }
  496. if (args)
  497. {
  498. Func *curFunc = this->GetParentFunc();
  499. while (curFunc)
  500. {
  501. curFunc->hasUnoptimizedArgumentsAcccess = args;
  502. curFunc = curFunc->GetParentFunc();
  503. }
  504. }
  505. }
  506. void DisableCanDoInlineArgOpt()
  507. {
  508. Func* curFunc = this;
  509. while (curFunc)
  510. {
  511. curFunc->m_canDoInlineArgsOpt = false;
  512. curFunc->m_hasInlineArgsOpt = false;
  513. curFunc = curFunc->GetParentFunc();
  514. }
  515. }
  516. bool GetHasApplyTargetInlining() const { return this->hasApplyTargetInlining;}
  517. void SetHasApplyTargetInlining() { this->hasApplyTargetInlining = true;}
  518. bool GetHasMarkTempObjects() const { return this->hasMarkTempObjects; }
  519. void SetHasMarkTempObjects() { this->hasMarkTempObjects = true; }
  520. bool GetHasImplicitCalls() const { return this->hasImplicitCalls;}
  521. void SetHasImplicitCalls(bool has) { this->hasImplicitCalls = has;}
  522. void SetHasImplicitCallsOnSelfAndParents()
  523. {
  524. this->SetHasImplicitCalls(true);
  525. Func *curFunc = this->GetParentFunc();
  526. while (curFunc && !curFunc->IsTopFunc())
  527. {
  528. curFunc->SetHasImplicitCalls(true);
  529. curFunc = curFunc->GetParentFunc();
  530. }
  531. }
  532. bool GetHasTempObjectProducingInstr() const { return this->hasTempObjectProducingInstr; }
  533. void SetHasTempObjectProducingInstr(bool has) { this->hasTempObjectProducingInstr = has; }
  534. Js::ReadOnlyDynamicProfileInfo * GetProfileInfo() const { return this->profileInfo; }
  535. bool HasProfileInfo() { return this->profileInfo->HasProfileInfo(); }
  536. bool HasArrayInfo()
  537. {
  538. const auto top = this->GetTopFunc();
  539. return this->HasProfileInfo() && this->GetWeakFuncRef() && !(top->HasTry() && !top->DoOptimizeTryCatch()) &&
  540. top->DoGlobOpt() && !PHASE_OFF(Js::LoopFastPathPhase, top);
  541. }
  542. static Js::BuiltinFunction GetBuiltInIndex(IR::Opnd* opnd)
  543. {
  544. Assert(opnd);
  545. Js::BuiltinFunction index;
  546. if (opnd->IsRegOpnd())
  547. {
  548. index = opnd->AsRegOpnd()->m_sym->m_builtInIndex;
  549. }
  550. else if (opnd->IsSymOpnd())
  551. {
  552. PropertySym *propertySym = opnd->AsSymOpnd()->m_sym->AsPropertySym();
  553. index = Js::JavascriptLibrary::GetBuiltinFunctionForPropId(propertySym->m_propertyId);
  554. }
  555. else
  556. {
  557. index = Js::BuiltinFunction::None;
  558. }
  559. return index;
  560. }
  561. static bool IsBuiltInInlinedInLowerer(IR::Opnd* opnd)
  562. {
  563. Assert(opnd);
  564. Js::BuiltinFunction index = Func::GetBuiltInIndex(opnd);
  565. switch (index)
  566. {
  567. case Js::BuiltinFunction::String_CharAt:
  568. case Js::BuiltinFunction::String_CharCodeAt:
  569. case Js::BuiltinFunction::String_CodePointAt:
  570. case Js::BuiltinFunction::Math_Abs:
  571. case Js::BuiltinFunction::Array_Push:
  572. case Js::BuiltinFunction::String_Replace:
  573. return true;
  574. default:
  575. return false;
  576. }
  577. }
  578. void AddYieldOffsetResumeLabel(uint32 offset, IR::LabelInstr* label)
  579. {
  580. m_yieldOffsetResumeLabelList->Add(YieldOffsetResumeLabel(offset, label));
  581. }
  582. template <typename Fn>
  583. void MapYieldOffsetResumeLabels(Fn fn)
  584. {
  585. m_yieldOffsetResumeLabelList->Map(fn);
  586. }
  587. template <typename Fn>
  588. bool MapUntilYieldOffsetResumeLabels(Fn fn)
  589. {
  590. return m_yieldOffsetResumeLabelList->MapUntil(fn);
  591. }
  592. void RemoveYieldOffsetResumeLabel(const YieldOffsetResumeLabel& yorl)
  593. {
  594. m_yieldOffsetResumeLabelList->Remove(yorl);
  595. }
  596. void RemoveDeadYieldOffsetResumeLabel(IR::LabelInstr* label)
  597. {
  598. uint32 offset;
  599. bool found = m_yieldOffsetResumeLabelList->MapUntil([&offset, &label](int i, YieldOffsetResumeLabel& yorl)
  600. {
  601. if (yorl.Second() == label)
  602. {
  603. offset = yorl.First();
  604. return true;
  605. }
  606. return false;
  607. });
  608. Assert(found);
  609. RemoveYieldOffsetResumeLabel(YieldOffsetResumeLabel(offset, label));
  610. AddYieldOffsetResumeLabel(offset, nullptr);
  611. }
  612. IR::Instr * GetFunctionEntryInsertionPoint();
  613. IR::IndirOpnd * GetConstantAddressIndirOpnd(void * address, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode);
  614. void MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv);
  615. void DisableConstandAddressLoadHoist() { canHoistConstantAddressLoad = false; }
  616. void AddSlotArrayCheck(IR::SymOpnd *fieldOpnd);
  617. void AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId = (uint32)-1);
  618. #if DBG
  619. bool allowRemoveBailOutArgInstr;
  620. #endif
  621. #if defined(_M_ARM32_OR_ARM64)
  622. int32 GetInlineeArgumentStackSize()
  623. {
  624. int32 count = this->GetMaxInlineeArgOutCount();
  625. if (count)
  626. {
  627. return ((count + 1) * MachPtr); // +1 for the dedicated zero out argc slot
  628. }
  629. return 0;
  630. }
  631. #endif
  632. public:
  633. BVSparse<JitArenaAllocator> * argObjSyms;
  634. BVSparse<JitArenaAllocator> * m_nonTempLocalVars; // Only populated in debug mode as part of IRBuilder. Used in GlobOpt and BackwardPass.
  635. InlineeFrameInfo* frameInfo;
  636. uint32 m_inlineeId;
  637. IR::LabelInstr * m_bailOutNoSaveLabel;
  638. private:
  639. #ifdef PROFILE_EXEC
  640. Js::ScriptContextProfiler *const m_codeGenProfiler;
  641. #endif
  642. Js::FunctionBody* m_jnFunction;
  643. Func * const parentFunc;
  644. StackSym * m_inlineeFrameStartSym;
  645. uint maxInlineeArgOutCount;
  646. const bool m_isBackgroundJIT;
  647. bool hasInstrNumber;
  648. bool maintainByteCodeOffset;
  649. bool hasInlinee;
  650. bool thisOrParentInlinerHasArguments;
  651. bool useRuntimeStats;
  652. bool stackNestedFunc;
  653. bool stackClosure;
  654. bool hasAnyStackNestedFunc;
  655. bool hasMarkTempObjects;
  656. Cloner * m_cloner;
  657. InstrMap * m_cloneMap;
  658. Js::ReadOnlyDynamicProfileInfo *const profileInfo;
  659. NativeCodeData::Allocator nativeCodeDataAllocator;
  660. NativeCodeData::Allocator transferDataAllocator;
  661. CodeGenNumberAllocator * numberAllocator;
  662. int32 m_localVarSlotsOffset;
  663. int32 m_hasLocalVarChangedOffset; // Offset on stack of 1 byte which indicates if any local var has changed.
  664. CodeGenAllocators *const m_codeGenAllocators;
  665. YieldOffsetResumeLabelList * m_yieldOffsetResumeLabelList;
  666. StackSym *CreateInlineeStackSym();
  667. IR::SymOpnd *GetInlineeOpndAtOffset(int32 offset);
  668. bool HasLocalVarSlotCreated() const { return m_localVarSlotsOffset != Js::Constants::InvalidOffset; }
  669. void EnsureLocalVarSlots();
  670. SList<IR::RegOpnd *> constantAddressRegOpnd;
  671. IR::Instr * lastConstantAddressRegLoadInstr;
  672. bool canHoistConstantAddressLoad;
  673. #if DBG
  674. VtableHashMap * vtableMap;
  675. #endif
  676. };
  677. class AutoCodeGenPhase
  678. {
  679. public:
  680. AutoCodeGenPhase(Func * func, Js::Phase phase) : func(func), phase(phase), dump(false), isPhaseComplete(false)
  681. {
  682. func->BeginPhase(phase);
  683. }
  684. ~AutoCodeGenPhase()
  685. {
  686. if(this->isPhaseComplete)
  687. {
  688. func->EndPhase(phase, dump);
  689. }
  690. else
  691. {
  692. //End the profiler tag
  693. func->EndProfiler(phase);
  694. }
  695. }
  696. void EndPhase(Func * func, Js::Phase phase, bool dump, bool isPhaseComplete)
  697. {
  698. Assert(this->func == func);
  699. Assert(this->phase == phase);
  700. this->dump = dump && (PHASE_DUMP(Js::SimpleJitPhase, func->GetJnFunction()) || !func->IsSimpleJit());
  701. this->isPhaseComplete = isPhaseComplete;
  702. }
  703. private:
  704. Func * func;
  705. Js::Phase phase;
  706. bool dump;
  707. bool isPhaseComplete;
  708. };
  709. #define BEGIN_CODEGEN_PHASE(func, phase) { AutoCodeGenPhase __autoCodeGen(func, phase);
  710. #define END_CODEGEN_PHASE(func, phase) __autoCodeGen.EndPhase(func, phase, true, true); }
  711. #define END_CODEGEN_PHASE_NO_DUMP(func, phase) __autoCodeGen.EndPhase(func, phase, false, true); }