Func.h 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. struct CodeGenWorkItem;
  7. class Lowerer;
  8. class Inline;
  9. class FlowGraph;
  10. #if defined(_M_ARM32_OR_ARM64)
  11. #include "UnwindInfoManager.h"
  12. #endif
  13. struct Cloner
  14. {
  15. Cloner(Lowerer *lowerer, JitArenaAllocator *alloc) :
  16. alloc(alloc),
  17. symMap(nullptr),
  18. labelMap(nullptr),
  19. lowerer(lowerer),
  20. instrFirst(nullptr),
  21. instrLast(nullptr),
  22. fRetargetClonedBranch(FALSE)
  23. {
  24. }
  25. ~Cloner()
  26. {
  27. if (symMap)
  28. {
  29. Adelete(alloc, symMap);
  30. }
  31. if (labelMap)
  32. {
  33. Adelete(alloc, labelMap);
  34. }
  35. }
  36. void AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone);
  37. void Finish();
  38. void RetargetClonedBranches();
  39. HashTable<StackSym*> *symMap;
  40. HashTable<IR::LabelInstr*> *labelMap;
  41. Lowerer * lowerer;
  42. IR::Instr * instrFirst;
  43. IR::Instr * instrLast;
  44. BOOL fRetargetClonedBranch;
  45. JitArenaAllocator *alloc;
  46. bool clonedInstrGetOrigArgSlotSym;
  47. };
  48. /*
  49. * This class keeps track of various information required for Stack Arguments optimization with formals.
  50. */
  51. class StackArgWithFormalsTracker
  52. {
  53. private:
  54. BVSparse<JitArenaAllocator> * formalsArraySyms; //Tracks Formal parameter Array - Is this Bv required explicitly?
  55. StackSym** formalsIndexToStackSymMap; //Tracks the stack sym for each formal
  56. StackSym* m_scopeObjSym; // Tracks the stack sym for the scope object that is created.
  57. JitArenaAllocator* alloc;
  58. public:
  59. StackArgWithFormalsTracker(JitArenaAllocator *alloc):
  60. formalsArraySyms(nullptr),
  61. formalsIndexToStackSymMap(nullptr),
  62. m_scopeObjSym(nullptr),
  63. alloc(alloc)
  64. {
  65. }
  66. BVSparse<JitArenaAllocator> * GetFormalsArraySyms();
  67. void SetFormalsArraySyms(SymID symId);
  68. StackSym ** GetFormalsIndexToStackSymMap();
  69. void SetStackSymInFormalsIndexMap(StackSym * sym, Js::ArgSlot formalsIndex, Js::ArgSlot formalsCount);
  70. void SetScopeObjSym(StackSym * sym);
  71. StackSym * GetScopeObjSym();
  72. };
  73. typedef JsUtil::Pair<uint32, IR::LabelInstr*> YieldOffsetResumeLabel;
  74. typedef JsUtil::List<YieldOffsetResumeLabel, JitArenaAllocator> YieldOffsetResumeLabelList;
  75. typedef HashTable<uint32, JitArenaAllocator> SlotArrayCheckTable;
  76. struct FrameDisplayCheckRecord
  77. {
  78. SlotArrayCheckTable *table;
  79. uint32 slotId;
  80. FrameDisplayCheckRecord() : table(nullptr), slotId((uint32)-1) {}
  81. };
  82. typedef HashTable<FrameDisplayCheckRecord*, JitArenaAllocator> FrameDisplayCheckTable;
  83. class Func
  84. {
  85. public:
  86. Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  87. ThreadContextInfo * threadContextInfo,
  88. ScriptContextInfo * scriptContextInfo,
  89. JITOutputIDL * outputData,
  90. Js::EntryPointInfo* epInfo,
  91. const FunctionJITRuntimeInfo *const runtimeInfo,
  92. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  93. #if !FLOATVAR
  94. CodeGenNumberAllocator * numberAllocator,
  95. #endif
  96. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc = nullptr,
  97. uint postCallByteCodeOffset = Js::Constants::NoByteCodeOffset,
  98. Js::RegSlot returnValueRegSlot = Js::Constants::NoRegister, const bool isInlinedConstructor = false,
  99. Js::ProfileId callSiteIdInParentFunc = UINT16_MAX, bool isGetterSetter = false);
  100. public:
  101. void * const GetCodeGenAllocators()
  102. {
  103. return this->GetTopFunc()->m_codeGenAllocators;
  104. }
  105. InProcCodeGenAllocators * const GetInProcCodeGenAllocators()
  106. {
  107. Assert(!JITManager::GetJITManager()->IsJITServer());
  108. return reinterpret_cast<InProcCodeGenAllocators*>(this->GetTopFunc()->m_codeGenAllocators);
  109. }
  110. #if ENABLE_OOP_NATIVE_CODEGEN
  111. OOPCodeGenAllocators * const GetOOPCodeGenAllocators()
  112. {
  113. Assert(JITManager::GetJITManager()->IsJITServer());
  114. return reinterpret_cast<OOPCodeGenAllocators*>(this->GetTopFunc()->m_codeGenAllocators);
  115. }
  116. #endif
  117. NativeCodeData::Allocator *GetNativeCodeDataAllocator()
  118. {
  119. return &this->GetTopFunc()->nativeCodeDataAllocator;
  120. }
  121. NativeCodeData::Allocator *GetTransferDataAllocator()
  122. {
  123. return &this->GetTopFunc()->transferDataAllocator;
  124. }
  125. #if !FLOATVAR
  126. CodeGenNumberAllocator * GetNumberAllocator()
  127. {
  128. return this->numberAllocator;
  129. }
  130. #endif
  131. #if !FLOATVAR
  132. XProcNumberPageSegmentImpl* GetXProcNumberAllocator()
  133. {
  134. if (this->GetJITOutput()->GetOutputData()->numberPageSegments == nullptr)
  135. {
  136. XProcNumberPageSegmentImpl* seg = (XProcNumberPageSegmentImpl*)midl_user_allocate(sizeof(XProcNumberPageSegment));
  137. if (seg == nullptr)
  138. {
  139. Js::Throw::OutOfMemory();
  140. }
  141. this->GetJITOutput()->GetOutputData()->numberPageSegments = new (seg) XProcNumberPageSegmentImpl();
  142. }
  143. return (XProcNumberPageSegmentImpl*)this->GetJITOutput()->GetOutputData()->numberPageSegments;
  144. }
  145. #endif
  146. Js::ScriptContextProfiler *GetCodeGenProfiler() const
  147. {
  148. #ifdef PROFILE_EXEC
  149. return m_codeGenProfiler;
  150. #else
  151. return nullptr;
  152. #endif
  153. }
  154. bool IsOOPJIT() const { return JITManager::GetJITManager()->IsOOPJITEnabled(); }
  155. void InitLocalClosureSyms();
  156. bool HasAnyStackNestedFunc() const { return this->hasAnyStackNestedFunc; }
  157. bool DoStackNestedFunc() const { return this->stackNestedFunc; }
  158. bool DoStackFrameDisplay() const { return this->stackClosure; }
  159. bool DoStackScopeSlots() const { return this->stackClosure; }
  160. bool IsBackgroundJIT() const { return this->m_isBackgroundJIT; }
  161. bool HasArgumentSlot() const { return this->GetInParamsCount() != 0 && !this->IsLoopBody(); }
  162. bool IsLoopBody() const { return m_workItem->IsLoopBody(); }
  163. bool IsLoopBodyInTry() const;
  164. bool CanAllocInPreReservedHeapPageSegment();
  165. void SetDoFastPaths();
  166. bool DoFastPaths() const { Assert(this->hasCalledSetDoFastPaths); return this->m_doFastPaths; }
  167. bool DoLoopFastPaths() const
  168. {
  169. return
  170. (!IsSimpleJit() || CONFIG_FLAG(NewSimpleJit)) &&
  171. !PHASE_OFF(Js::FastPathPhase, this) &&
  172. !PHASE_OFF(Js::LoopFastPathPhase, this);
  173. }
  174. bool DoGlobOpt() const
  175. {
  176. return
  177. !PHASE_OFF(Js::GlobOptPhase, this) && !IsSimpleJit() &&
  178. (!GetTopFunc()->HasTry() || GetTopFunc()->CanOptimizeTryCatch()) &&
  179. (!GetTopFunc()->HasFinally() || GetTopFunc()->CanOptimizeTryFinally());
  180. }
  181. bool DoInline() const
  182. {
  183. return DoGlobOpt() && !GetTopFunc()->HasTry();
  184. }
  185. bool DoOptimizeTry() const
  186. {
  187. Assert(IsTopFunc());
  188. return DoGlobOpt();
  189. }
  190. bool CanOptimizeTryFinally() const
  191. {
  192. return !this->m_workItem->IsLoopBody() && !PHASE_OFF(Js::OptimizeTryFinallyPhase, this) &&
  193. (!this->HasProfileInfo() || !this->GetReadOnlyProfileInfo()->IsOptimizeTryFinallyDisabled());
  194. }
  195. bool CanOptimizeTryCatch() const
  196. {
  197. return !this->m_workItem->IsLoopBody() && !PHASE_OFF(Js::OptimizeTryCatchPhase, this);
  198. }
  199. bool DoSimpleJitDynamicProfile() const;
  200. bool IsSimpleJit() const { return m_workItem->GetJitMode() == ExecutionMode::SimpleJit; }
  201. JITTimeWorkItem * GetWorkItem() const
  202. {
  203. return m_workItem;
  204. }
  205. ThreadContext * GetInProcThreadContext() const
  206. {
  207. Assert(!IsOOPJIT());
  208. return (ThreadContext*)m_threadContextInfo;
  209. }
  210. ServerThreadContext* GetOOPThreadContext() const
  211. {
  212. Assert(IsOOPJIT());
  213. return (ServerThreadContext*)m_threadContextInfo;
  214. }
  215. ThreadContextInfo * GetThreadContextInfo() const
  216. {
  217. return m_threadContextInfo;
  218. }
  219. ScriptContextInfo * GetScriptContextInfo() const
  220. {
  221. return m_scriptContextInfo;
  222. }
  223. JITOutput* GetJITOutput()
  224. {
  225. return &m_output;
  226. }
  227. const JITOutput* GetJITOutput() const
  228. {
  229. return &m_output;
  230. }
  231. const JITTimeFunctionBody * const GetJITFunctionBody() const
  232. {
  233. return m_workItem->GetJITFunctionBody();
  234. }
  235. Js::EntryPointInfo* GetInProcJITEntryPointInfo() const
  236. {
  237. Assert(!IsOOPJIT());
  238. return m_entryPointInfo;
  239. }
  240. char16* GetDebugNumberSet(wchar(&bufferToWriteTo)[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]) const
  241. {
  242. return m_workItem->GetJITTimeInfo()->GetDebugNumberSet(bufferToWriteTo);
  243. }
  244. void TryCodegen();
  245. static void Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  246. ThreadContextInfo * threadContextInfo,
  247. ScriptContextInfo * scriptContextInfo,
  248. JITOutputIDL * outputData,
  249. Js::EntryPointInfo* epInfo, // for in-proc jit only
  250. const FunctionJITRuntimeInfo *const runtimeInfo,
  251. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  252. #if !FLOATVAR
  253. CodeGenNumberAllocator * numberAllocator,
  254. #endif
  255. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT);
  256. int32 StackAllocate(int size);
  257. int32 StackAllocate(StackSym *stackSym, int size);
  258. void SetArgOffset(StackSym *stackSym, int32 offset);
  259. int32 GetLocalVarSlotOffset(int32 slotId);
  260. int32 GetHasLocalVarChangedOffset();
  261. bool IsJitInDebugMode() const;
  262. bool IsNonTempLocalVar(uint32 slotIndex);
  263. void OnAddSym(Sym* sym);
  264. uint GetLocalFunctionId() const
  265. {
  266. return m_workItem->GetJITTimeInfo()->GetLocalFunctionId();
  267. }
  268. uint GetSourceContextId() const
  269. {
  270. return m_workItem->GetJITFunctionBody()->GetSourceContextId();
  271. }
  272. #ifdef MD_GROW_LOCALS_AREA_UP
  273. void AjustLocalVarSlotOffset();
  274. #endif
  275. bool DoGlobOptsForGeneratorFunc() const;
  276. static int32 AdjustOffsetValue(int32 offset);
  277. static inline uint32 GetDiagLocalSlotSize()
  278. {
  279. // For the debug purpose we will have fixed stack slot size
  280. // We will allocated the 8 bytes for each variable.
  281. return MachDouble;
  282. }
  283. #ifdef DBG
  284. // The pattern used to pre-fill locals for CHK builds.
  285. // When we restore bailout values we check for this pattern, this is how we assert for non-initialized variables/garbage.
  286. static const uint32 c_debugFillPattern4 = 0xcececece;
  287. static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
  288. #if defined(_M_IX86) || defined (_M_ARM)
  289. static const uint32 c_debugFillPattern = c_debugFillPattern4;
  290. #elif defined(_M_X64) || defined(_M_ARM64)
  291. static const unsigned __int64 c_debugFillPattern = c_debugFillPattern8;
  292. #else
  293. #error unsupported platform
  294. #endif
  295. #endif
  296. bool IsSIMDEnabled() const
  297. {
  298. return GetScriptContextInfo()->IsSIMDEnabled();
  299. }
  300. uint32 GetInstrCount();
  301. inline Js::ScriptContext* GetScriptContext() const
  302. {
  303. Assert(!IsOOPJIT());
  304. return static_cast<Js::ScriptContext*>(this->GetScriptContextInfo());
  305. }
  306. void NumberInstrs();
  307. bool IsTopFunc() const { return this->parentFunc == nullptr; }
  308. Func const * GetTopFunc() const;
  309. Func * GetTopFunc();
  310. void SetFirstArgOffset(IR::Instr* inlineeStart);
  311. uint GetFunctionNumber() const
  312. {
  313. return m_workItem->GetJITFunctionBody()->GetFunctionNumber();
  314. }
  315. BOOL HasTry() const
  316. {
  317. Assert(this->IsTopFunc());
  318. return this->GetJITFunctionBody()->HasTry();
  319. }
  320. bool HasFinally() const
  321. {
  322. Assert(this->IsTopFunc());
  323. return this->GetJITFunctionBody()->HasFinally();
  324. }
  325. bool HasThis() const
  326. {
  327. Assert(this->IsTopFunc());
  328. Assert(this->GetJITFunctionBody()); // For now we always have a function body
  329. return this->GetJITFunctionBody()->HasThis();
  330. }
  331. Js::ArgSlot GetInParamsCount() const
  332. {
  333. Assert(this->IsTopFunc());
  334. return this->GetJITFunctionBody()->GetInParamsCount();
  335. }
  336. bool IsGlobalFunc() const
  337. {
  338. Assert(this->IsTopFunc());
  339. return this->GetJITFunctionBody()->IsGlobalFunc();
  340. }
  341. uint16 GetArgUsedForBranch() const;
  342. intptr_t GetWeakFuncRef() const;
  343. const FunctionJITRuntimeInfo * GetRuntimeInfo() const { return m_runtimeInfo; }
  344. bool IsLambda() const
  345. {
  346. Assert(this->IsTopFunc());
  347. Assert(this->GetJITFunctionBody()); // For now we always have a function body
  348. return this->GetJITFunctionBody()->IsLambda();
  349. }
  350. bool IsTrueLeaf() const
  351. {
  352. return !GetHasCalls() && !GetHasImplicitCalls();
  353. }
  354. StackSym *EnsureLoopParamSym();
  355. void UpdateForInLoopMaxDepth(uint forInLoopMaxDepth);
  356. int GetForInEnumeratorArrayOffset() const;
  357. StackSym *GetFuncObjSym() const { return m_funcObjSym; }
  358. void SetFuncObjSym(StackSym *sym) { m_funcObjSym = sym; }
  359. StackSym *GetJavascriptLibrarySym() const { return m_javascriptLibrarySym; }
  360. void SetJavascriptLibrarySym(StackSym *sym) { m_javascriptLibrarySym = sym; }
  361. StackSym *GetScriptContextSym() const { return m_scriptContextSym; }
  362. void SetScriptContextSym(StackSym *sym) { m_scriptContextSym = sym; }
  363. StackSym *GetFunctionBodySym() const { return m_functionBodySym; }
  364. void SetFunctionBodySym(StackSym *sym) { m_functionBodySym = sym; }
  365. StackSym *GetLocalClosureSym() const { return m_localClosureSym; }
  366. void SetLocalClosureSym(StackSym *sym) { m_localClosureSym = sym; }
  367. StackSym *GetParamClosureSym() const { return m_paramClosureSym; }
  368. void SetParamClosureSym(StackSym *sym) { m_paramClosureSym = sym; }
  369. StackSym *GetLocalFrameDisplaySym() const { return m_localFrameDisplaySym; }
  370. void SetLocalFrameDisplaySym(StackSym *sym) { m_localFrameDisplaySym = sym; }
  371. intptr_t GetJittedLoopIterationsSinceLastBailoutAddress() const;
  372. void EnsurePinnedTypeRefs();
  373. void PinTypeRef(void* typeRef);
  374. void EnsureSingleTypeGuards();
  375. Js::JitTypePropertyGuard* GetOrCreateSingleTypeGuard(intptr_t typeAddr);
  376. void EnsureEquivalentTypeGuards();
  377. Js::JitEquivalentTypeGuard * CreateEquivalentTypeGuard(JITTypeHolder type, uint32 objTypeSpecFldId);
  378. void ThrowIfScriptClosed();
  379. void EnsurePropertyGuardsByPropertyId();
  380. void EnsureCtorCachesByPropertyId();
  381. void LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard);
  382. void LinkCtorCacheToPropertyId(Js::PropertyId propertyId, JITTimeConstructorCache* cache);
  383. JITTimeConstructorCache * GetConstructorCache(const Js::ProfileId profiledCallSiteId);
  384. void SetConstructorCache(const Js::ProfileId profiledCallSiteId, JITTimeConstructorCache* constructorCache);
  385. void EnsurePropertiesWrittenTo();
  386. void EnsureCallSiteToArgumentsOffsetFixupMap();
  387. IR::LabelInstr * EnsureFuncStartLabel();
  388. IR::LabelInstr * GetFuncStartLabel();
  389. IR::LabelInstr * EnsureFuncEndLabel();
  390. IR::LabelInstr * GetFuncEndLabel();
  391. #ifdef _M_X64
  392. void SetSpillSize(int32 spillSize)
  393. {
  394. m_spillSize = spillSize;
  395. }
  396. int32 GetSpillSize()
  397. {
  398. return m_spillSize;
  399. }
  400. void SetArgsSize(int32 argsSize)
  401. {
  402. m_argsSize = argsSize;
  403. }
  404. int32 GetArgsSize()
  405. {
  406. return m_argsSize;
  407. }
  408. void SetSavedRegSize(int32 savedRegSize)
  409. {
  410. m_savedRegSize = savedRegSize;
  411. }
  412. int32 GetSavedRegSize()
  413. {
  414. return m_savedRegSize;
  415. }
  416. #endif
  417. bool IsInlinee() const
  418. {
  419. Assert(m_inlineeFrameStartSym ? (m_inlineeFrameStartSym->m_offset != -1) : true);
  420. return m_inlineeFrameStartSym != nullptr;
  421. }
  422. void SetInlineeFrameStartSym(StackSym *sym)
  423. {
  424. Assert(m_inlineeFrameStartSym == nullptr);
  425. m_inlineeFrameStartSym = sym;
  426. }
  427. IR::SymOpnd *GetInlineeArgCountSlotOpnd()
  428. {
  429. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_Argc * MachPtr);
  430. }
  431. IR::SymOpnd *GetNextInlineeFrameArgCountSlotOpnd()
  432. {
  433. Assert(!this->m_hasInlineArgsOpt);
  434. return GetInlineeOpndAtOffset((Js::Constants::InlineeMetaArgCount + actualCount) * MachPtr);
  435. }
  436. IR::SymOpnd *GetInlineeFunctionObjectSlotOpnd()
  437. {
  438. Assert(!this->m_hasInlineArgsOpt);
  439. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_FunctionObject * MachPtr);
  440. }
  441. IR::SymOpnd *GetInlineeArgumentsObjectSlotOpnd()
  442. {
  443. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_ArgumentsObject * MachPtr);
  444. }
  445. IR::SymOpnd *GetInlineeArgvSlotOpnd()
  446. {
  447. Assert(!this->m_hasInlineArgsOpt);
  448. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_Argv * MachPtr);
  449. }
  450. bool IsInlined() const
  451. {
  452. return this->parentFunc != nullptr;
  453. }
  454. bool IsInlinedConstructor() const
  455. {
  456. return this->isInlinedConstructor;
  457. }
  458. bool IsTJLoopBody()const {
  459. return this->isTJLoopBody;
  460. }
  461. Js::Var AllocateNumber(double value);
  462. ObjTypeSpecFldInfo* GetObjTypeSpecFldInfo(const uint index) const;
  463. ObjTypeSpecFldInfo* GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const;
  464. // Gets an inline cache pointer to use in jitted code. Cached data may not be stable while jitting. Does not return null.
  465. intptr_t GetRuntimeInlineCache(const uint index) const;
  466. JITTimePolymorphicInlineCache * GetRuntimePolymorphicInlineCache(const uint index) const;
  467. byte GetPolyCacheUtil(const uint index) const;
  468. byte GetPolyCacheUtilToInitialize(const uint index) const;
  469. #if defined(_M_ARM32_OR_ARM64)
  470. RegNum GetLocalsPointer() const;
  471. #endif
  472. #if DBG_DUMP
  473. void Dump(IRDumpFlags flags);
  474. void Dump();
  475. void DumpHeader();
  476. #endif
  477. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  478. LPCSTR GetVtableName(INT_PTR address);
  479. #endif
  480. #if DBG_DUMP | defined(VTUNE_PROFILING)
  481. bool DoRecordNativeMap() const;
  482. #endif
  483. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  484. void DumpFullFunctionName();
  485. #endif
  486. public:
  487. JitArenaAllocator * m_alloc;
  488. const FunctionJITRuntimeInfo *const m_runtimeInfo;
  489. ThreadContextInfo * m_threadContextInfo;
  490. ScriptContextInfo * m_scriptContextInfo;
  491. JITTimeWorkItem * m_workItem;
  492. JITTimePolymorphicInlineCacheInfo *const m_polymorphicInlineCacheInfo;
  493. // This indicates how many constructor caches we inserted into the constructorCaches array, not the total size of the array.
  494. uint constructorCacheCount;
  495. // This array maps callsite ids to constructor caches. The size corresponds to the number of callsites in the function.
  496. JITTimeConstructorCache** constructorCaches;
  497. typedef JsUtil::BaseHashSet<void*, JitArenaAllocator, PowerOf2SizePolicy> TypeRefSet;
  498. TypeRefSet* pinnedTypeRefs;
  499. typedef JsUtil::BaseDictionary<intptr_t, Js::JitTypePropertyGuard*, JitArenaAllocator, PowerOf2SizePolicy> TypePropertyGuardDictionary;
  500. TypePropertyGuardDictionary* singleTypeGuards;
  501. typedef SListCounted<Js::JitEquivalentTypeGuard*> EquivalentTypeGuardList;
  502. EquivalentTypeGuardList* equivalentTypeGuards;
  503. typedef JsUtil::BaseHashSet<Js::JitIndexedPropertyGuard*, JitArenaAllocator, PowerOf2SizePolicy> IndexedPropertyGuardSet;
  504. typedef JsUtil::BaseDictionary<Js::PropertyId, IndexedPropertyGuardSet*, JitArenaAllocator, PowerOf2SizePolicy> PropertyGuardByPropertyIdMap;
  505. PropertyGuardByPropertyIdMap* propertyGuardsByPropertyId;
  506. typedef JsUtil::BaseHashSet<intptr_t, JitArenaAllocator, PowerOf2SizePolicy> CtorCacheSet;
  507. typedef JsUtil::BaseDictionary<Js::PropertyId, CtorCacheSet*, JitArenaAllocator, PowerOf2SizePolicy> CtorCachesByPropertyIdMap;
  508. CtorCachesByPropertyIdMap* ctorCachesByPropertyId;
  509. typedef JsUtil::BaseDictionary<Js::ProfileId, int32, JitArenaAllocator, PrimeSizePolicy> CallSiteToArgumentsOffsetFixupMap;
  510. CallSiteToArgumentsOffsetFixupMap* callSiteToArgumentsOffsetFixupMap;
  511. int indexedPropertyGuardCount;
  512. typedef JsUtil::BaseHashSet<Js::PropertyId, JitArenaAllocator> PropertyIdSet;
  513. PropertyIdSet* propertiesWrittenTo;
  514. PropertyIdSet lazyBailoutProperties;
  515. bool anyPropertyMayBeWrittenTo;
  516. SlotArrayCheckTable *slotArrayCheckTable;
  517. FrameDisplayCheckTable *frameDisplayCheckTable;
  518. IR::Instr * m_headInstr;
  519. IR::Instr * m_exitInstr;
  520. IR::Instr * m_tailInstr;
  521. #ifdef _M_X64
  522. int32 m_spillSize;
  523. int32 m_argsSize;
  524. int32 m_savedRegSize;
  525. PrologEncoder m_prologEncoder;
  526. #endif
  527. SymTable * m_symTable;
  528. StackSym * m_loopParamSym;
  529. StackSym * m_funcObjSym;
  530. StackSym * m_javascriptLibrarySym;
  531. StackSym * m_scriptContextSym;
  532. StackSym * m_functionBodySym;
  533. StackSym * m_localClosureSym;
  534. StackSym * m_paramClosureSym;
  535. StackSym * m_localFrameDisplaySym;
  536. StackSym * m_bailoutReturnValueSym;
  537. StackSym * m_hasBailedOutSym;
  538. uint m_forInLoopMaxDepth;
  539. uint m_forInLoopBaseDepth;
  540. int32 m_forInEnumeratorArrayOffset;
  541. int32 m_localStackHeight;
  542. uint frameSize;
  543. uint32 inlineDepth;
  544. uint32 postCallByteCodeOffset;
  545. Js::RegSlot returnValueRegSlot;
  546. Js::ArgSlot actualCount;
  547. int32 firstActualStackOffset;
  548. uint32 tryCatchNestingLevel;
  549. uint32 m_totalJumpTableSizeInBytesForSwitchStatements;
  550. #if defined(_M_ARM32_OR_ARM64)
  551. //Offset to arguments from sp + m_localStackHeight;
  552. //For non leaf functions this is (callee saved register count + LR + R11) * MachRegInt
  553. //For leaf functions this is (saved registers) * MachRegInt
  554. int32 m_ArgumentsOffset;
  555. UnwindInfoManager m_unwindInfo;
  556. IR::LabelInstr * m_epilogLabel;
  557. #endif
  558. IR::LabelInstr * m_funcStartLabel;
  559. IR::LabelInstr * m_funcEndLabel;
  560. // Keep track of the maximum number of args on the stack.
  561. uint32 m_argSlotsForFunctionsCalled;
  562. #if DBG
  563. uint32 m_callSiteCount;
  564. #endif
  565. FlowGraph * m_fg;
  566. unsigned int m_labelCount;
  567. BitVector m_regsUsed;
  568. StackSym * tempSymDouble;
  569. StackSym * tempSymBool;
  570. uint32 loopCount;
  571. Js::ProfileId callSiteIdInParentFunc;
  572. bool m_isLeaf: 1; // This is set in the IRBuilder and might be inaccurate after inlining
  573. bool m_hasCalls: 1; // This is more accurate compared to m_isLeaf
  574. bool m_hasInlineArgsOpt : 1;
  575. bool m_doFastPaths : 1;
  576. bool hasBailout: 1;
  577. bool hasBailoutInEHRegion : 1;
  578. bool hasStackArgs: 1;
  579. bool hasImplicitParamLoad : 1; // True if there is a load of CallInfo, FunctionObject
  580. bool hasThrow : 1;
  581. bool hasUnoptimizedArgumentsAccess : 1; // True if there are any arguments access beyond the simple case of this.apply pattern
  582. bool m_canDoInlineArgsOpt : 1;
  583. bool hasApplyTargetInlining:1;
  584. bool isGetterSetter : 1;
  585. const bool isInlinedConstructor: 1;
  586. bool hasImplicitCalls: 1;
  587. bool hasTempObjectProducingInstr:1; // At least one instruction which can produce temp object
  588. bool isTJLoopBody : 1;
  589. bool isFlowGraphValid : 1;
  590. #if DBG
  591. bool hasCalledSetDoFastPaths:1;
  592. bool isPostLower:1;
  593. bool isPostRegAlloc:1;
  594. bool isPostPeeps:1;
  595. bool isPostLayout:1;
  596. bool isPostFinalLower:1;
  597. typedef JsUtil::Stack<Js::Phase> CurrentPhasesStack;
  598. CurrentPhasesStack currentPhases;
  599. bool IsInPhase(Js::Phase tag);
  600. #endif
  601. void BeginPhase(Js::Phase tag);
  602. void EndPhase(Js::Phase tag, bool dump = true);
  603. void EndProfiler(Js::Phase tag);
  604. void BeginClone(Lowerer *lowerer, JitArenaAllocator *alloc);
  605. void EndClone();
  606. Cloner * GetCloner() const { return GetTopFunc()->m_cloner; }
  607. InstrMap * GetCloneMap() const { return GetTopFunc()->m_cloneMap; }
  608. void ClearCloneMap() { Assert(this->IsTopFunc()); this->m_cloneMap = nullptr; }
  609. bool HasByteCodeOffset() const { return !this->GetTopFunc()->hasInstrNumber; }
  610. bool DoMaintainByteCodeOffset() const { return this->HasByteCodeOffset() && this->GetTopFunc()->maintainByteCodeOffset; }
  611. void StopMaintainByteCodeOffset() { this->GetTopFunc()->maintainByteCodeOffset = false; }
  612. Func * GetParentFunc() const { return parentFunc; }
  613. uint GetMaxInlineeArgOutCount() const { return maxInlineeArgOutCount; }
  614. void UpdateMaxInlineeArgOutCount(uint inlineeArgOutCount);
  615. #if DBG_DUMP
  616. ptrdiff_t m_codeSize;
  617. #endif
  618. bool GetHasCalls() const { return this->m_hasCalls; }
  619. void SetHasCalls() { this->m_hasCalls = true; }
  620. void SetHasCallsOnSelfAndParents()
  621. {
  622. Func *curFunc = this;
  623. while (curFunc)
  624. {
  625. curFunc->SetHasCalls();
  626. curFunc = curFunc->GetParentFunc();
  627. }
  628. }
  629. void SetHasInstrNumber(bool has) { this->GetTopFunc()->hasInstrNumber = has; }
  630. bool HasInstrNumber() const { return this->GetTopFunc()->hasInstrNumber; }
  631. bool HasInlinee() const { Assert(this->IsTopFunc()); return this->hasInlinee; }
  632. void SetHasInlinee() { Assert(this->IsTopFunc()); this->hasInlinee = true; }
  633. bool GetThisOrParentInlinerHasArguments() const { return thisOrParentInlinerHasArguments; }
  634. bool GetHasStackArgs() const
  635. {
  636. return this->hasStackArgs && !IsStackArgOptDisabled() && !PHASE_OFF1(Js::StackArgOptPhase);
  637. }
  638. void SetHasStackArgs(bool has) { this->hasStackArgs = has;}
  639. bool IsStackArgsEnabled()
  640. {
  641. Func* curFunc = this;
  642. bool isStackArgsEnabled = GetJITFunctionBody()->UsesArgumentsObject() && curFunc->GetHasStackArgs();
  643. Func * topFunc = curFunc->GetTopFunc();
  644. if (topFunc != nullptr)
  645. {
  646. isStackArgsEnabled = isStackArgsEnabled && topFunc->GetHasStackArgs();
  647. }
  648. return isStackArgsEnabled;
  649. }
  650. bool GetHasImplicitParamLoad() const { return this->hasImplicitParamLoad; }
  651. void SetHasImplicitParamLoad() { this->hasImplicitParamLoad = true; }
  652. bool GetHasThrow() const { return this->hasThrow; }
  653. void SetHasThrow() { this->hasThrow = true; }
  654. bool GetHasUnoptimizedArgumentsAccess() const { return this->hasUnoptimizedArgumentsAccess; }
  655. void SetHasUnoptimizedArgumentsAccess(bool args)
  656. {
  657. // Once set to 'true' make sure this does not become false
  658. if (!this->hasUnoptimizedArgumentsAccess)
  659. {
  660. this->hasUnoptimizedArgumentsAccess = args;
  661. }
  662. if (args)
  663. {
  664. Func *curFunc = this->GetParentFunc();
  665. while (curFunc)
  666. {
  667. curFunc->hasUnoptimizedArgumentsAccess = args;
  668. curFunc = curFunc->GetParentFunc();
  669. }
  670. }
  671. }
  672. void DisableCanDoInlineArgOpt()
  673. {
  674. Func* curFunc = this;
  675. while (curFunc)
  676. {
  677. curFunc->m_canDoInlineArgsOpt = false;
  678. curFunc->m_hasInlineArgsOpt = false;
  679. curFunc = curFunc->GetParentFunc();
  680. }
  681. }
  682. bool GetHasApplyTargetInlining() const { return this->hasApplyTargetInlining;}
  683. void SetHasApplyTargetInlining() { this->hasApplyTargetInlining = true;}
  684. bool GetHasMarkTempObjects() const { return this->hasMarkTempObjects; }
  685. void SetHasMarkTempObjects() { this->hasMarkTempObjects = true; }
  686. bool GetHasNonSimpleParams() const { return this->hasNonSimpleParams; }
  687. void SetHasNonSimpleParams() { this->hasNonSimpleParams = true; }
  688. bool GetHasImplicitCalls() const { return this->hasImplicitCalls;}
  689. void SetHasImplicitCalls(bool has) { this->hasImplicitCalls = has;}
  690. void SetHasImplicitCallsOnSelfAndParents()
  691. {
  692. this->SetHasImplicitCalls(true);
  693. Func *curFunc = this->GetParentFunc();
  694. while (curFunc && !curFunc->IsTopFunc())
  695. {
  696. curFunc->SetHasImplicitCalls(true);
  697. curFunc = curFunc->GetParentFunc();
  698. }
  699. }
  700. bool GetHasTempObjectProducingInstr() const { return this->hasTempObjectProducingInstr; }
  701. void SetHasTempObjectProducingInstr(bool has) { this->hasTempObjectProducingInstr = has; }
  702. const JITTimeProfileInfo * GetReadOnlyProfileInfo() const { return GetJITFunctionBody()->GetReadOnlyProfileInfo(); }
  703. bool HasProfileInfo() const { return GetJITFunctionBody()->HasProfileInfo(); }
  704. bool HasArrayInfo()
  705. {
  706. const auto top = this->GetTopFunc();
  707. return this->HasProfileInfo() && this->GetWeakFuncRef() && !(top->HasTry() && !top->DoOptimizeTry()) &&
  708. top->DoGlobOpt() && !PHASE_OFF(Js::LoopFastPathPhase, top);
  709. }
  710. static Js::BuiltinFunction GetBuiltInIndex(IR::Opnd* opnd)
  711. {
  712. Assert(opnd);
  713. Js::BuiltinFunction index;
  714. if (opnd->IsRegOpnd())
  715. {
  716. index = opnd->AsRegOpnd()->m_sym->m_builtInIndex;
  717. }
  718. else if (opnd->IsSymOpnd())
  719. {
  720. PropertySym *propertySym = opnd->AsSymOpnd()->m_sym->AsPropertySym();
  721. index = Js::JavascriptLibrary::GetBuiltinFunctionForPropId(propertySym->m_propertyId);
  722. }
  723. else
  724. {
  725. index = Js::BuiltinFunction::None;
  726. }
  727. return index;
  728. }
  729. static bool IsBuiltInInlinedInLowerer(IR::Opnd* opnd)
  730. {
  731. Assert(opnd);
  732. Js::BuiltinFunction index = Func::GetBuiltInIndex(opnd);
  733. switch (index)
  734. {
  735. case Js::BuiltinFunction::JavascriptString_CharAt:
  736. case Js::BuiltinFunction::JavascriptString_CharCodeAt:
  737. case Js::BuiltinFunction::JavascriptString_CodePointAt:
  738. case Js::BuiltinFunction::Math_Abs:
  739. case Js::BuiltinFunction::JavascriptArray_Push:
  740. case Js::BuiltinFunction::JavascriptString_Replace:
  741. case Js::BuiltinFunction::JavascriptObject_HasOwnProperty:
  742. case Js::BuiltinFunction::JavascriptArray_IsArray:
  743. return true;
  744. default:
  745. return false;
  746. }
  747. }
  748. void AddYieldOffsetResumeLabel(uint32 offset, IR::LabelInstr* label)
  749. {
  750. m_yieldOffsetResumeLabelList->Add(YieldOffsetResumeLabel(offset, label));
  751. }
  752. template <typename Fn>
  753. void MapYieldOffsetResumeLabels(Fn fn)
  754. {
  755. m_yieldOffsetResumeLabelList->Map(fn);
  756. }
  757. template <typename Fn>
  758. bool MapUntilYieldOffsetResumeLabels(Fn fn)
  759. {
  760. return m_yieldOffsetResumeLabelList->MapUntil(fn);
  761. }
  762. void RemoveYieldOffsetResumeLabel(const YieldOffsetResumeLabel& yorl)
  763. {
  764. m_yieldOffsetResumeLabelList->Remove(yorl);
  765. }
  766. void RemoveDeadYieldOffsetResumeLabel(IR::LabelInstr* label)
  767. {
  768. uint32 offset;
  769. bool found = m_yieldOffsetResumeLabelList->MapUntil([&offset, &label](int i, YieldOffsetResumeLabel& yorl)
  770. {
  771. if (yorl.Second() == label)
  772. {
  773. offset = yorl.First();
  774. return true;
  775. }
  776. return false;
  777. });
  778. Assert(found);
  779. RemoveYieldOffsetResumeLabel(YieldOffsetResumeLabel(offset, label));
  780. AddYieldOffsetResumeLabel(offset, nullptr);
  781. }
  782. IR::Instr * GetFunctionEntryInsertionPoint();
  783. IR::IndirOpnd * GetConstantAddressIndirOpnd(intptr_t address, IR::Opnd *largeConstOpnd, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode);
  784. void MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv);
  785. void DisableConstandAddressLoadHoist() { canHoistConstantAddressLoad = false; }
  786. void AddSlotArrayCheck(IR::SymOpnd *fieldOpnd);
  787. void AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId = (uint32)-1);
  788. void EnsureStackArgWithFormalsTracker();
  789. BOOL IsFormalsArraySym(SymID symId);
  790. void TrackFormalsArraySym(SymID symId);
  791. void TrackStackSymForFormalIndex(Js::ArgSlot formalsIndex, StackSym * sym);
  792. StackSym* GetStackSymForFormal(Js::ArgSlot formalsIndex);
  793. bool HasStackSymForFormal(Js::ArgSlot formalsIndex);
  794. void SetScopeObjSym(StackSym * sym);
  795. StackSym * GetScopeObjSym();
  796. bool IsTrackCompoundedIntOverflowDisabled() const;
  797. bool IsArrayCheckHoistDisabled() const;
  798. bool IsStackArgOptDisabled() const;
  799. bool IsSwitchOptDisabled() const;
  800. bool IsAggressiveIntTypeSpecDisabled() const;
  801. #if DBG
  802. bool allowRemoveBailOutArgInstr;
  803. #endif
  804. #if defined(_M_ARM32_OR_ARM64)
  805. int32 GetInlineeArgumentStackSize()
  806. {
  807. int32 count = this->GetMaxInlineeArgOutCount();
  808. if (count)
  809. {
  810. return ((count + 1) * MachPtr); // +1 for the dedicated zero out argc slot
  811. }
  812. return 0;
  813. }
  814. #endif
  815. public:
  816. BVSparse<JitArenaAllocator> * argObjSyms;
  817. BVSparse<JitArenaAllocator> * m_nonTempLocalVars; // Only populated in debug mode as part of IRBuilder. Used in GlobOpt and BackwardPass.
  818. InlineeFrameInfo* frameInfo;
  819. Js::ArgSlot argInsCount; // This count doesn't include the ArgIn instr for "this".
  820. uint32 m_inlineeId;
  821. IR::LabelInstr * m_bailOutNoSaveLabel;
  822. StackSym * GetNativeCodeDataSym() const;
  823. void SetNativeCodeDataSym(StackSym * sym);
  824. private:
  825. Js::EntryPointInfo* m_entryPointInfo; // for in-proc JIT only
  826. JITOutput m_output;
  827. #ifdef PROFILE_EXEC
  828. Js::ScriptContextProfiler *const m_codeGenProfiler;
  829. #endif
  830. Func * const parentFunc;
  831. StackSym * m_inlineeFrameStartSym;
  832. uint maxInlineeArgOutCount;
  833. const bool m_isBackgroundJIT;
  834. bool hasInstrNumber;
  835. bool maintainByteCodeOffset;
  836. bool hasInlinee;
  837. bool thisOrParentInlinerHasArguments;
  838. bool useRuntimeStats;
  839. bool stackNestedFunc;
  840. bool stackClosure;
  841. bool hasAnyStackNestedFunc;
  842. bool hasMarkTempObjects;
  843. bool hasNonSimpleParams;
  844. Cloner * m_cloner;
  845. InstrMap * m_cloneMap;
  846. NativeCodeData::Allocator nativeCodeDataAllocator;
  847. NativeCodeData::Allocator transferDataAllocator;
  848. #if !FLOATVAR
  849. CodeGenNumberAllocator * numberAllocator;
  850. #endif
  851. int32 m_localVarSlotsOffset;
  852. int32 m_hasLocalVarChangedOffset; // Offset on stack of 1 byte which indicates if any local var has changed.
  853. void * const m_codeGenAllocators;
  854. YieldOffsetResumeLabelList * m_yieldOffsetResumeLabelList;
  855. StackArgWithFormalsTracker * stackArgWithFormalsTracker;
  856. ObjTypeSpecFldInfo ** m_globalObjTypeSpecFldInfoArray;
  857. StackSym *CreateInlineeStackSym();
  858. IR::SymOpnd *GetInlineeOpndAtOffset(int32 offset);
  859. bool HasLocalVarSlotCreated() const { return m_localVarSlotsOffset != Js::Constants::InvalidOffset; }
  860. void EnsureLocalVarSlots();
  861. StackSym * m_nativeCodeDataSym;
  862. SList<IR::RegOpnd *> constantAddressRegOpnd;
  863. IR::Instr * lastConstantAddressRegLoadInstr;
  864. bool canHoistConstantAddressLoad;
  865. #if DBG
  866. VtableHashMap * vtableMap;
  867. #endif
  868. #ifdef RECYCLER_WRITE_BARRIER_JIT
  869. public:
  870. Lowerer* m_lowerer;
  871. #endif
  872. };
  873. class AutoCodeGenPhase
  874. {
  875. public:
  876. AutoCodeGenPhase(Func * func, Js::Phase phase) : func(func), phase(phase), dump(false), isPhaseComplete(false)
  877. {
  878. func->BeginPhase(phase);
  879. }
  880. ~AutoCodeGenPhase()
  881. {
  882. if(this->isPhaseComplete)
  883. {
  884. func->EndPhase(phase, dump);
  885. }
  886. else
  887. {
  888. //End the profiler tag
  889. func->EndProfiler(phase);
  890. }
  891. }
  892. void EndPhase(Func * func, Js::Phase phase, bool dump, bool isPhaseComplete)
  893. {
  894. Assert(this->func == func);
  895. Assert(this->phase == phase);
  896. this->dump = dump && (PHASE_DUMP(Js::SimpleJitPhase, func) || !func->IsSimpleJit());
  897. this->isPhaseComplete = isPhaseComplete;
  898. }
  899. private:
  900. Func * func;
  901. Js::Phase phase;
  902. bool dump;
  903. bool isPhaseComplete;
  904. };
  905. #define BEGIN_CODEGEN_PHASE(func, phase) { AutoCodeGenPhase __autoCodeGen(func, phase);
  906. #define END_CODEGEN_PHASE(func, phase) __autoCodeGen.EndPhase(func, phase, true, true); }
  907. #define END_CODEGEN_PHASE_NO_DUMP(func, phase) __autoCodeGen.EndPhase(func, phase, false, true); }
  908. #ifdef PERF_HINT
  909. void WritePerfHint(PerfHints hint, Func* func, uint byteCodeOffset = Js::Constants::NoByteCodeOffset);
  910. #endif