2
0

Func.h 38 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. struct CodeGenWorkItem;
  7. class Lowerer;
  8. class Inline;
  9. class FlowGraph;
  10. #if defined(_M_ARM32_OR_ARM64)
  11. #include "UnwindInfoManager.h"
  12. #endif
  13. struct Cloner
  14. {
  15. Cloner(Lowerer *lowerer, JitArenaAllocator *alloc) :
  16. alloc(alloc),
  17. symMap(nullptr),
  18. labelMap(nullptr),
  19. lowerer(lowerer),
  20. instrFirst(nullptr),
  21. instrLast(nullptr),
  22. fRetargetClonedBranch(FALSE)
  23. {
  24. }
  25. ~Cloner()
  26. {
  27. if (symMap)
  28. {
  29. Adelete(alloc, symMap);
  30. }
  31. if (labelMap)
  32. {
  33. Adelete(alloc, labelMap);
  34. }
  35. }
  36. void AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone);
  37. void Finish();
  38. void RetargetClonedBranches();
  39. HashTable<StackSym*> *symMap;
  40. HashTable<IR::LabelInstr*> *labelMap;
  41. Lowerer * lowerer;
  42. IR::Instr * instrFirst;
  43. IR::Instr * instrLast;
  44. BOOL fRetargetClonedBranch;
  45. JitArenaAllocator *alloc;
  46. bool clonedInstrGetOrigArgSlotSym;
  47. };
  48. /*
  49. * This class keeps track of various information required for Stack Arguments optimization with formals.
  50. */
  51. class StackArgWithFormalsTracker
  52. {
  53. private:
  54. BVSparse<JitArenaAllocator> * formalsArraySyms; //Tracks Formal parameter Array - Is this Bv required explicitly?
  55. StackSym** formalsIndexToStackSymMap; //Tracks the stack sym for each formal
  56. StackSym* m_scopeObjSym; // Tracks the stack sym for the scope object that is created.
  57. JitArenaAllocator* alloc;
  58. public:
  59. StackArgWithFormalsTracker(JitArenaAllocator *alloc):
  60. formalsArraySyms(nullptr),
  61. formalsIndexToStackSymMap(nullptr),
  62. m_scopeObjSym(nullptr),
  63. alloc(alloc)
  64. {
  65. }
  66. BVSparse<JitArenaAllocator> * GetFormalsArraySyms();
  67. void SetFormalsArraySyms(SymID symId);
  68. StackSym ** GetFormalsIndexToStackSymMap();
  69. void SetStackSymInFormalsIndexMap(StackSym * sym, Js::ArgSlot formalsIndex, Js::ArgSlot formalsCount);
  70. void SetScopeObjSym(StackSym * sym);
  71. StackSym * GetScopeObjSym();
  72. };
  73. typedef JsUtil::Pair<uint32, IR::LabelInstr*> YieldOffsetResumeLabel;
  74. typedef JsUtil::List<YieldOffsetResumeLabel, JitArenaAllocator> YieldOffsetResumeLabelList;
  75. typedef HashTable<uint32, JitArenaAllocator> SlotArrayCheckTable;
  76. struct FrameDisplayCheckRecord
  77. {
  78. SlotArrayCheckTable *table;
  79. uint32 slotId;
  80. FrameDisplayCheckRecord() : table(nullptr), slotId((uint32)-1) {}
  81. };
  82. typedef HashTable<FrameDisplayCheckRecord*, JitArenaAllocator> FrameDisplayCheckTable;
  83. class Func
  84. {
  85. public:
  86. Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  87. ThreadContextInfo * threadContextInfo,
  88. ScriptContextInfo * scriptContextInfo,
  89. JITOutputIDL * outputData,
  90. Js::EntryPointInfo* epInfo,
  91. const FunctionJITRuntimeInfo *const runtimeInfo,
  92. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  93. #if !FLOATVAR
  94. CodeGenNumberAllocator * numberAllocator,
  95. #endif
  96. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc = nullptr,
  97. uint postCallByteCodeOffset = Js::Constants::NoByteCodeOffset,
  98. Js::RegSlot returnValueRegSlot = Js::Constants::NoRegister, const bool isInlinedConstructor = false,
  99. Js::ProfileId callSiteIdInParentFunc = UINT16_MAX, bool isGetterSetter = false);
  100. public:
  101. void * const GetCodeGenAllocators()
  102. {
  103. return this->GetTopFunc()->m_codeGenAllocators;
  104. }
  105. InProcCodeGenAllocators * const GetInProcCodeGenAllocators()
  106. {
  107. Assert(!JITManager::GetJITManager()->IsJITServer());
  108. return reinterpret_cast<InProcCodeGenAllocators*>(this->GetTopFunc()->m_codeGenAllocators);
  109. }
  110. #if ENABLE_OOP_NATIVE_CODEGEN
  111. OOPCodeGenAllocators * const GetOOPCodeGenAllocators()
  112. {
  113. Assert(JITManager::GetJITManager()->IsJITServer());
  114. return reinterpret_cast<OOPCodeGenAllocators*>(this->GetTopFunc()->m_codeGenAllocators);
  115. }
  116. #endif
  117. NativeCodeData::Allocator *GetNativeCodeDataAllocator()
  118. {
  119. return &this->GetTopFunc()->nativeCodeDataAllocator;
  120. }
  121. NativeCodeData::Allocator *GetTransferDataAllocator()
  122. {
  123. return &this->GetTopFunc()->transferDataAllocator;
  124. }
  125. #if !FLOATVAR
  126. CodeGenNumberAllocator * GetNumberAllocator()
  127. {
  128. return this->numberAllocator;
  129. }
  130. #endif
  131. #if !FLOATVAR
  132. XProcNumberPageSegmentImpl* GetXProcNumberAllocator()
  133. {
  134. if (this->GetJITOutput()->GetOutputData()->numberPageSegments == nullptr)
  135. {
  136. XProcNumberPageSegmentImpl* seg = (XProcNumberPageSegmentImpl*)midl_user_allocate(sizeof(XProcNumberPageSegment));
  137. if (seg == nullptr)
  138. {
  139. Js::Throw::OutOfMemory();
  140. }
  141. this->GetJITOutput()->GetOutputData()->numberPageSegments = new (seg) XProcNumberPageSegmentImpl();
  142. }
  143. return (XProcNumberPageSegmentImpl*)this->GetJITOutput()->GetOutputData()->numberPageSegments;
  144. }
  145. #endif
  146. Js::ScriptContextProfiler *GetCodeGenProfiler() const
  147. {
  148. #ifdef PROFILE_EXEC
  149. return m_codeGenProfiler;
  150. #else
  151. return nullptr;
  152. #endif
  153. }
  154. bool IsOOPJIT() const { return JITManager::GetJITManager()->IsOOPJITEnabled(); }
  155. void InitLocalClosureSyms();
  156. bool HasAnyStackNestedFunc() const { return this->hasAnyStackNestedFunc; }
  157. bool DoStackNestedFunc() const { return this->stackNestedFunc; }
  158. bool DoStackFrameDisplay() const { return this->stackClosure; }
  159. bool DoStackScopeSlots() const { return this->stackClosure; }
  160. bool IsBackgroundJIT() const { return this->m_isBackgroundJIT; }
  161. bool HasArgumentSlot() const { return this->GetInParamsCount() != 0 && !this->IsLoopBody(); }
  162. bool IsLoopBody() const { return m_workItem->IsLoopBody(); }
  163. bool IsLoopBodyInTry() const;
  164. bool CanAllocInPreReservedHeapPageSegment();
  165. void SetDoFastPaths();
  166. bool DoFastPaths() const { Assert(this->hasCalledSetDoFastPaths); return this->m_doFastPaths; }
  167. bool DoLoopFastPaths() const
  168. {
  169. return
  170. (!IsSimpleJit() || CONFIG_FLAG(NewSimpleJit)) &&
  171. !PHASE_OFF(Js::FastPathPhase, this) &&
  172. !PHASE_OFF(Js::LoopFastPathPhase, this);
  173. }
  174. bool DoGlobOpt() const
  175. {
  176. return
  177. !PHASE_OFF(Js::GlobOptPhase, this) && !IsSimpleJit() &&
  178. (!GetTopFunc()->HasTry() || GetTopFunc()->CanOptimizeTryCatch());
  179. }
  180. bool DoInline() const
  181. {
  182. return DoGlobOpt() && !GetTopFunc()->HasTry();
  183. }
  184. bool DoOptimizeTryCatch() const
  185. {
  186. Assert(IsTopFunc());
  187. return DoGlobOpt();
  188. }
  189. bool CanOptimizeTryCatch() const
  190. {
  191. return !this->HasFinally() && !this->m_workItem->IsLoopBody() && !PHASE_OFF(Js::OptimizeTryCatchPhase, this);
  192. }
  193. bool DoSimpleJitDynamicProfile() const;
  194. bool IsSimpleJit() const { return m_workItem->GetJitMode() == ExecutionMode::SimpleJit; }
  195. JITTimeWorkItem * GetWorkItem() const
  196. {
  197. return m_workItem;
  198. }
  199. ThreadContext * GetInProcThreadContext() const
  200. {
  201. Assert(!IsOOPJIT());
  202. return (ThreadContext*)m_threadContextInfo;
  203. }
  204. ServerThreadContext* GetOOPThreadContext() const
  205. {
  206. Assert(IsOOPJIT());
  207. return (ServerThreadContext*)m_threadContextInfo;
  208. }
  209. ThreadContextInfo * GetThreadContextInfo() const
  210. {
  211. return m_threadContextInfo;
  212. }
  213. ScriptContextInfo * GetScriptContextInfo() const
  214. {
  215. return m_scriptContextInfo;
  216. }
  217. JITOutput* GetJITOutput()
  218. {
  219. return &m_output;
  220. }
  221. const JITOutput* GetJITOutput() const
  222. {
  223. return &m_output;
  224. }
  225. const JITTimeFunctionBody * const GetJITFunctionBody() const
  226. {
  227. return m_workItem->GetJITFunctionBody();
  228. }
  229. Js::EntryPointInfo* GetInProcJITEntryPointInfo() const
  230. {
  231. Assert(!IsOOPJIT());
  232. return m_entryPointInfo;
  233. }
  234. char16* GetDebugNumberSet(wchar(&bufferToWriteTo)[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]) const
  235. {
  236. return m_workItem->GetJITTimeInfo()->GetDebugNumberSet(bufferToWriteTo);
  237. }
  238. void TryCodegen();
  239. static void Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  240. ThreadContextInfo * threadContextInfo,
  241. ScriptContextInfo * scriptContextInfo,
  242. JITOutputIDL * outputData,
  243. Js::EntryPointInfo* epInfo, // for in-proc jit only
  244. const FunctionJITRuntimeInfo *const runtimeInfo,
  245. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  246. #if !FLOATVAR
  247. CodeGenNumberAllocator * numberAllocator,
  248. #endif
  249. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT);
  250. int32 StackAllocate(int size);
  251. int32 StackAllocate(StackSym *stackSym, int size);
  252. void SetArgOffset(StackSym *stackSym, int32 offset);
  253. int32 GetLocalVarSlotOffset(int32 slotId);
  254. int32 GetHasLocalVarChangedOffset();
  255. bool IsJitInDebugMode();
  256. bool IsNonTempLocalVar(uint32 slotIndex);
  257. void OnAddSym(Sym* sym);
  258. uint GetLocalFunctionId() const
  259. {
  260. return m_workItem->GetJITTimeInfo()->GetLocalFunctionId();
  261. }
  262. uint GetSourceContextId() const
  263. {
  264. return m_workItem->GetJITFunctionBody()->GetSourceContextId();
  265. }
  266. #ifdef MD_GROW_LOCALS_AREA_UP
  267. void AjustLocalVarSlotOffset();
  268. #endif
  269. bool DoGlobOptsForGeneratorFunc() const;
  270. static int32 AdjustOffsetValue(int32 offset);
  271. static inline uint32 GetDiagLocalSlotSize()
  272. {
  273. // For the debug purpose we will have fixed stack slot size
  274. // We will allocated the 8 bytes for each variable.
  275. return MachDouble;
  276. }
  277. #ifdef DBG
  278. // The pattern used to pre-fill locals for CHK builds.
  279. // When we restore bailout values we check for this pattern, this is how we assert for non-initialized variables/garbage.
  280. static const uint32 c_debugFillPattern4 = 0xcececece;
  281. static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
  282. #if defined(_M_IX86) || defined (_M_ARM)
  283. static const uint32 c_debugFillPattern = c_debugFillPattern4;
  284. #elif defined(_M_X64) || defined(_M_ARM64)
  285. static const unsigned __int64 c_debugFillPattern = c_debugFillPattern8;
  286. #else
  287. #error unsupported platform
  288. #endif
  289. #endif
  290. bool IsSIMDEnabled() const
  291. {
  292. return GetScriptContextInfo()->IsSIMDEnabled();
  293. }
  294. uint32 GetInstrCount();
  295. inline Js::ScriptContext* GetScriptContext() const
  296. {
  297. Assert(!IsOOPJIT());
  298. return static_cast<Js::ScriptContext*>(this->GetScriptContextInfo());
  299. }
  300. void NumberInstrs();
  301. bool IsTopFunc() const { return this->parentFunc == nullptr; }
  302. Func const * GetTopFunc() const;
  303. Func * GetTopFunc();
  304. void SetFirstArgOffset(IR::Instr* inlineeStart);
  305. uint GetFunctionNumber() const
  306. {
  307. return m_workItem->GetJITFunctionBody()->GetFunctionNumber();
  308. }
  309. BOOL HasTry() const
  310. {
  311. Assert(this->IsTopFunc());
  312. return this->GetJITFunctionBody()->HasTry();
  313. }
  314. bool HasFinally() const
  315. {
  316. Assert(this->IsTopFunc());
  317. return this->GetJITFunctionBody()->HasFinally();
  318. }
  319. bool HasThis() const
  320. {
  321. Assert(this->IsTopFunc());
  322. Assert(this->GetJITFunctionBody()); // For now we always have a function body
  323. return this->GetJITFunctionBody()->HasThis();
  324. }
  325. Js::ArgSlot GetInParamsCount() const
  326. {
  327. Assert(this->IsTopFunc());
  328. return this->GetJITFunctionBody()->GetInParamsCount();
  329. }
  330. bool IsGlobalFunc() const
  331. {
  332. Assert(this->IsTopFunc());
  333. return this->GetJITFunctionBody()->IsGlobalFunc();
  334. }
  335. uint16 GetArgUsedForBranch() const;
  336. intptr_t GetWeakFuncRef() const;
  337. const FunctionJITRuntimeInfo * GetRuntimeInfo() const { return m_runtimeInfo; }
  338. bool IsLambda() const
  339. {
  340. Assert(this->IsTopFunc());
  341. Assert(this->GetJITFunctionBody()); // For now we always have a function body
  342. return this->GetJITFunctionBody()->IsLambda();
  343. }
  344. bool IsTrueLeaf() const
  345. {
  346. return !GetHasCalls() && !GetHasImplicitCalls();
  347. }
  348. StackSym *EnsureLoopParamSym();
  349. void UpdateForInLoopMaxDepth(uint forInLoopMaxDepth);
  350. int GetForInEnumeratorArrayOffset() const;
  351. StackSym *GetFuncObjSym() const { return m_funcObjSym; }
  352. void SetFuncObjSym(StackSym *sym) { m_funcObjSym = sym; }
  353. StackSym *GetJavascriptLibrarySym() const { return m_javascriptLibrarySym; }
  354. void SetJavascriptLibrarySym(StackSym *sym) { m_javascriptLibrarySym = sym; }
  355. StackSym *GetScriptContextSym() const { return m_scriptContextSym; }
  356. void SetScriptContextSym(StackSym *sym) { m_scriptContextSym = sym; }
  357. StackSym *GetFunctionBodySym() const { return m_functionBodySym; }
  358. void SetFunctionBodySym(StackSym *sym) { m_functionBodySym = sym; }
  359. StackSym *GetLocalClosureSym() const { return m_localClosureSym; }
  360. void SetLocalClosureSym(StackSym *sym) { m_localClosureSym = sym; }
  361. StackSym *GetParamClosureSym() const { return m_paramClosureSym; }
  362. void SetParamClosureSym(StackSym *sym) { m_paramClosureSym = sym; }
  363. StackSym *GetLocalFrameDisplaySym() const { return m_localFrameDisplaySym; }
  364. void SetLocalFrameDisplaySym(StackSym *sym) { m_localFrameDisplaySym = sym; }
  365. intptr_t GetJittedLoopIterationsSinceLastBailoutAddress() const;
  366. void EnsurePinnedTypeRefs();
  367. void PinTypeRef(void* typeRef);
  368. void EnsureSingleTypeGuards();
  369. Js::JitTypePropertyGuard* GetOrCreateSingleTypeGuard(intptr_t typeAddr);
  370. void EnsureEquivalentTypeGuards();
  371. Js::JitEquivalentTypeGuard * CreateEquivalentTypeGuard(JITTypeHolder type, uint32 objTypeSpecFldId);
  372. void ThrowIfScriptClosed();
  373. void EnsurePropertyGuardsByPropertyId();
  374. void EnsureCtorCachesByPropertyId();
  375. void LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard);
  376. void LinkCtorCacheToPropertyId(Js::PropertyId propertyId, JITTimeConstructorCache* cache);
  377. JITTimeConstructorCache * GetConstructorCache(const Js::ProfileId profiledCallSiteId);
  378. void SetConstructorCache(const Js::ProfileId profiledCallSiteId, JITTimeConstructorCache* constructorCache);
  379. void EnsurePropertiesWrittenTo();
  380. void EnsureCallSiteToArgumentsOffsetFixupMap();
  381. IR::LabelInstr * EnsureFuncStartLabel();
  382. IR::LabelInstr * GetFuncStartLabel();
  383. IR::LabelInstr * EnsureFuncEndLabel();
  384. IR::LabelInstr * GetFuncEndLabel();
  385. #ifdef _M_X64
  386. void SetSpillSize(int32 spillSize)
  387. {
  388. m_spillSize = spillSize;
  389. }
  390. int32 GetSpillSize()
  391. {
  392. return m_spillSize;
  393. }
  394. void SetArgsSize(int32 argsSize)
  395. {
  396. m_argsSize = argsSize;
  397. }
  398. int32 GetArgsSize()
  399. {
  400. return m_argsSize;
  401. }
  402. void SetSavedRegSize(int32 savedRegSize)
  403. {
  404. m_savedRegSize = savedRegSize;
  405. }
  406. int32 GetSavedRegSize()
  407. {
  408. return m_savedRegSize;
  409. }
  410. #endif
  411. bool IsInlinee() const
  412. {
  413. Assert(m_inlineeFrameStartSym ? (m_inlineeFrameStartSym->m_offset != -1) : true);
  414. return m_inlineeFrameStartSym != nullptr;
  415. }
  416. void SetInlineeFrameStartSym(StackSym *sym)
  417. {
  418. Assert(m_inlineeFrameStartSym == nullptr);
  419. m_inlineeFrameStartSym = sym;
  420. }
  421. IR::SymOpnd *GetInlineeArgCountSlotOpnd()
  422. {
  423. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_Argc * MachPtr);
  424. }
  425. IR::SymOpnd *GetNextInlineeFrameArgCountSlotOpnd()
  426. {
  427. Assert(!this->m_hasInlineArgsOpt);
  428. if (this->m_hasInlineArgsOpt)
  429. {
  430. // If the function has inlineArgsOpt turned on, jitted code will not write to stack slots for inlinee's function object
  431. // and arguments, until needed. If we attempt to read from those slots, we may be reading uninitialized memory.
  432. throw Js::OperationAbortedException();
  433. }
  434. return GetInlineeOpndAtOffset((Js::Constants::InlineeMetaArgCount + actualCount) * MachPtr);
  435. }
  436. IR::SymOpnd *GetInlineeFunctionObjectSlotOpnd()
  437. {
  438. Assert(!this->m_hasInlineArgsOpt);
  439. if (this->m_hasInlineArgsOpt)
  440. {
  441. // If the function has inlineArgsOpt turned on, jitted code will not write to stack slots for inlinee's function object
  442. // and arguments, until needed. If we attempt to read from those slots, we may be reading uninitialized memory.
  443. throw Js::OperationAbortedException();
  444. }
  445. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_FunctionObject * MachPtr);
  446. }
  447. IR::SymOpnd *GetInlineeArgumentsObjectSlotOpnd()
  448. {
  449. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_ArgumentsObject * MachPtr);
  450. }
  451. IR::SymOpnd *GetInlineeArgvSlotOpnd()
  452. {
  453. Assert(!this->m_hasInlineArgsOpt);
  454. if (this->m_hasInlineArgsOpt)
  455. {
  456. // If the function has inlineArgsOpt turned on, jitted code will not write to stack slots for inlinee's function object
  457. // and arguments, until needed. If we attempt to read from those slots, we may be reading uninitialized memory.
  458. throw Js::OperationAbortedException();
  459. }
  460. return GetInlineeOpndAtOffset(Js::Constants::InlineeMetaArgIndex_Argv * MachPtr);
  461. }
  462. bool IsInlined() const
  463. {
  464. return this->parentFunc != nullptr;
  465. }
  466. bool IsInlinedConstructor() const
  467. {
  468. return this->isInlinedConstructor;
  469. }
  470. bool IsTJLoopBody()const {
  471. return this->isTJLoopBody;
  472. }
  473. Js::Var AllocateNumber(double value);
  474. JITObjTypeSpecFldInfo* GetObjTypeSpecFldInfo(const uint index) const;
  475. JITObjTypeSpecFldInfo* GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const;
  476. // Gets an inline cache pointer to use in jitted code. Cached data may not be stable while jitting. Does not return null.
  477. intptr_t GetRuntimeInlineCache(const uint index) const;
  478. JITTimePolymorphicInlineCache * GetRuntimePolymorphicInlineCache(const uint index) const;
  479. byte GetPolyCacheUtil(const uint index) const;
  480. byte GetPolyCacheUtilToInitialize(const uint index) const;
  481. #if defined(_M_ARM32_OR_ARM64)
  482. RegNum GetLocalsPointer() const;
  483. #endif
  484. #if DBG_DUMP
  485. void Dump(IRDumpFlags flags);
  486. void Dump();
  487. void DumpHeader();
  488. #endif
  489. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  490. LPCSTR GetVtableName(INT_PTR address);
  491. #endif
  492. #if DBG_DUMP | defined(VTUNE_PROFILING)
  493. bool DoRecordNativeMap() const;
  494. #endif
  495. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  496. void DumpFullFunctionName();
  497. #endif
  498. public:
  499. JitArenaAllocator * m_alloc;
  500. const FunctionJITRuntimeInfo *const m_runtimeInfo;
  501. ThreadContextInfo * m_threadContextInfo;
  502. ScriptContextInfo * m_scriptContextInfo;
  503. JITTimeWorkItem * m_workItem;
  504. JITTimePolymorphicInlineCacheInfo *const m_polymorphicInlineCacheInfo;
  505. // This indicates how many constructor caches we inserted into the constructorCaches array, not the total size of the array.
  506. uint constructorCacheCount;
  507. // This array maps callsite ids to constructor caches. The size corresponds to the number of callsites in the function.
  508. JITTimeConstructorCache** constructorCaches;
  509. typedef JsUtil::BaseHashSet<void*, JitArenaAllocator, PowerOf2SizePolicy> TypeRefSet;
  510. TypeRefSet* pinnedTypeRefs;
  511. typedef JsUtil::BaseDictionary<intptr_t, Js::JitTypePropertyGuard*, JitArenaAllocator, PowerOf2SizePolicy> TypePropertyGuardDictionary;
  512. TypePropertyGuardDictionary* singleTypeGuards;
  513. typedef SListCounted<Js::JitEquivalentTypeGuard*> EquivalentTypeGuardList;
  514. EquivalentTypeGuardList* equivalentTypeGuards;
  515. typedef JsUtil::BaseHashSet<Js::JitIndexedPropertyGuard*, JitArenaAllocator, PowerOf2SizePolicy> IndexedPropertyGuardSet;
  516. typedef JsUtil::BaseDictionary<Js::PropertyId, IndexedPropertyGuardSet*, JitArenaAllocator, PowerOf2SizePolicy> PropertyGuardByPropertyIdMap;
  517. PropertyGuardByPropertyIdMap* propertyGuardsByPropertyId;
  518. typedef JsUtil::BaseHashSet<intptr_t, JitArenaAllocator, PowerOf2SizePolicy> CtorCacheSet;
  519. typedef JsUtil::BaseDictionary<Js::PropertyId, CtorCacheSet*, JitArenaAllocator, PowerOf2SizePolicy> CtorCachesByPropertyIdMap;
  520. CtorCachesByPropertyIdMap* ctorCachesByPropertyId;
  521. typedef JsUtil::BaseDictionary<Js::ProfileId, int32, JitArenaAllocator, PrimeSizePolicy> CallSiteToArgumentsOffsetFixupMap;
  522. CallSiteToArgumentsOffsetFixupMap* callSiteToArgumentsOffsetFixupMap;
  523. int indexedPropertyGuardCount;
  524. typedef JsUtil::BaseHashSet<Js::PropertyId, JitArenaAllocator> PropertyIdSet;
  525. PropertyIdSet* propertiesWrittenTo;
  526. PropertyIdSet lazyBailoutProperties;
  527. bool anyPropertyMayBeWrittenTo;
  528. SlotArrayCheckTable *slotArrayCheckTable;
  529. FrameDisplayCheckTable *frameDisplayCheckTable;
  530. IR::Instr * m_headInstr;
  531. IR::Instr * m_exitInstr;
  532. IR::Instr * m_tailInstr;
  533. #ifdef _M_X64
  534. int32 m_spillSize;
  535. int32 m_argsSize;
  536. int32 m_savedRegSize;
  537. PrologEncoder m_prologEncoder;
  538. #endif
  539. SymTable * m_symTable;
  540. StackSym * m_loopParamSym;
  541. StackSym * m_funcObjSym;
  542. StackSym * m_javascriptLibrarySym;
  543. StackSym * m_scriptContextSym;
  544. StackSym * m_functionBodySym;
  545. StackSym * m_localClosureSym;
  546. StackSym * m_paramClosureSym;
  547. StackSym * m_localFrameDisplaySym;
  548. StackSym * m_bailoutReturnValueSym;
  549. StackSym * m_hasBailedOutSym;
  550. uint m_forInLoopMaxDepth;
  551. uint m_forInLoopBaseDepth;
  552. int32 m_forInEnumeratorArrayOffset;
  553. int32 m_localStackHeight;
  554. uint frameSize;
  555. uint32 inlineDepth;
  556. uint32 postCallByteCodeOffset;
  557. Js::RegSlot returnValueRegSlot;
  558. Js::ArgSlot actualCount;
  559. int32 firstActualStackOffset;
  560. uint32 tryCatchNestingLevel;
  561. uint32 m_totalJumpTableSizeInBytesForSwitchStatements;
  562. #if defined(_M_ARM32_OR_ARM64)
  563. //Offset to arguments from sp + m_localStackHeight;
  564. //For non leaf functions this is (callee saved register count + LR + R11) * MachRegInt
  565. //For leaf functions this is (saved registers) * MachRegInt
  566. int32 m_ArgumentsOffset;
  567. UnwindInfoManager m_unwindInfo;
  568. IR::LabelInstr * m_epilogLabel;
  569. #endif
  570. IR::LabelInstr * m_funcStartLabel;
  571. IR::LabelInstr * m_funcEndLabel;
  572. // Keep track of the maximum number of args on the stack.
  573. uint32 m_argSlotsForFunctionsCalled;
  574. #if DBG
  575. uint32 m_callSiteCount;
  576. #endif
  577. FlowGraph * m_fg;
  578. unsigned int m_labelCount;
  579. BitVector m_regsUsed;
  580. StackSym * tempSymDouble;
  581. StackSym * tempSymBool;
  582. uint32 loopCount;
  583. Js::ProfileId callSiteIdInParentFunc;
  584. bool m_isLeaf: 1; // This is set in the IRBuilder and might be inaccurate after inlining
  585. bool m_hasCalls: 1; // This is more accurate compared to m_isLeaf
  586. bool m_hasInlineArgsOpt : 1;
  587. bool m_doFastPaths : 1;
  588. bool hasBailout: 1;
  589. bool hasBailoutInEHRegion : 1;
  590. bool hasStackArgs: 1;
  591. bool hasImplicitParamLoad : 1; // True if there is a load of CallInfo, FunctionObject
  592. bool hasThrow : 1;
  593. bool hasUnoptimizedArgumentsAcccess : 1; // True if there are any arguments access beyond the simple case of this.apply pattern
  594. bool m_canDoInlineArgsOpt : 1;
  595. bool applyTargetInliningRemovedArgumentsAccess :1;
  596. bool isGetterSetter : 1;
  597. const bool isInlinedConstructor: 1;
  598. bool hasImplicitCalls: 1;
  599. bool hasTempObjectProducingInstr:1; // At least one instruction which can produce temp object
  600. bool isTJLoopBody : 1;
  601. bool isFlowGraphValid : 1;
  602. #if DBG
  603. bool hasCalledSetDoFastPaths:1;
  604. bool isPostLower:1;
  605. bool isPostRegAlloc:1;
  606. bool isPostPeeps:1;
  607. bool isPostLayout:1;
  608. bool isPostFinalLower:1;
  609. typedef JsUtil::Stack<Js::Phase> CurrentPhasesStack;
  610. CurrentPhasesStack currentPhases;
  611. bool IsInPhase(Js::Phase tag);
  612. #endif
  613. void BeginPhase(Js::Phase tag);
  614. void EndPhase(Js::Phase tag, bool dump = true);
  615. void EndProfiler(Js::Phase tag);
  616. void BeginClone(Lowerer *lowerer, JitArenaAllocator *alloc);
  617. void EndClone();
  618. Cloner * GetCloner() const { return GetTopFunc()->m_cloner; }
  619. InstrMap * GetCloneMap() const { return GetTopFunc()->m_cloneMap; }
  620. void ClearCloneMap() { Assert(this->IsTopFunc()); this->m_cloneMap = nullptr; }
  621. bool HasByteCodeOffset() const { return !this->GetTopFunc()->hasInstrNumber; }
  622. bool DoMaintainByteCodeOffset() const { return this->HasByteCodeOffset() && this->GetTopFunc()->maintainByteCodeOffset; }
  623. void StopMaintainByteCodeOffset() { this->GetTopFunc()->maintainByteCodeOffset = false; }
  624. Func * GetParentFunc() const { return parentFunc; }
  625. uint GetMaxInlineeArgOutCount() const { return maxInlineeArgOutCount; }
  626. void UpdateMaxInlineeArgOutCount(uint inlineeArgOutCount);
  627. #if DBG_DUMP
  628. ptrdiff_t m_codeSize;
  629. #endif
  630. bool GetHasCalls() const { return this->m_hasCalls; }
  631. void SetHasCalls() { this->m_hasCalls = true; }
  632. void SetHasCallsOnSelfAndParents()
  633. {
  634. Func *curFunc = this;
  635. while (curFunc)
  636. {
  637. curFunc->SetHasCalls();
  638. curFunc = curFunc->GetParentFunc();
  639. }
  640. }
  641. void SetHasInstrNumber(bool has) { this->GetTopFunc()->hasInstrNumber = has; }
  642. bool HasInstrNumber() const { return this->GetTopFunc()->hasInstrNumber; }
  643. bool HasInlinee() const { Assert(this->IsTopFunc()); return this->hasInlinee; }
  644. void SetHasInlinee() { Assert(this->IsTopFunc()); this->hasInlinee = true; }
  645. bool GetThisOrParentInlinerHasArguments() const { return thisOrParentInlinerHasArguments; }
  646. bool GetHasStackArgs()
  647. {
  648. bool isStackArgOptDisabled = false;
  649. if (HasProfileInfo())
  650. {
  651. isStackArgOptDisabled = GetReadOnlyProfileInfo()->IsStackArgOptDisabled();
  652. }
  653. return this->hasStackArgs && !isStackArgOptDisabled && !PHASE_OFF1(Js::StackArgOptPhase);
  654. }
  655. void SetHasStackArgs(bool has) { this->hasStackArgs = has;}
  656. bool IsStackArgsEnabled()
  657. {
  658. Func* curFunc = this;
  659. bool isStackArgsEnabled = GetJITFunctionBody()->UsesArgumentsObject() && curFunc->GetHasStackArgs();
  660. Func * topFunc = curFunc->GetTopFunc();
  661. if (topFunc != nullptr)
  662. {
  663. isStackArgsEnabled = isStackArgsEnabled && topFunc->GetHasStackArgs();
  664. }
  665. return isStackArgsEnabled;
  666. }
  667. bool GetHasImplicitParamLoad() const { return this->hasImplicitParamLoad; }
  668. void SetHasImplicitParamLoad() { this->hasImplicitParamLoad = true; }
  669. bool GetHasThrow() const { return this->hasThrow; }
  670. void SetHasThrow() { this->hasThrow = true; }
  671. bool GetHasUnoptimizedArgumentsAcccess() const { return this->hasUnoptimizedArgumentsAcccess; }
  672. void SetHasUnoptimizedArgumentsAccess(bool args)
  673. {
  674. // Once set to 'true' make sure this does not become false
  675. if (!this->hasUnoptimizedArgumentsAcccess)
  676. {
  677. this->hasUnoptimizedArgumentsAcccess = args;
  678. }
  679. if (args)
  680. {
  681. Func *curFunc = this->GetParentFunc();
  682. while (curFunc)
  683. {
  684. curFunc->hasUnoptimizedArgumentsAcccess = args;
  685. curFunc = curFunc->GetParentFunc();
  686. }
  687. }
  688. }
  689. void DisableCanDoInlineArgOpt()
  690. {
  691. Func* curFunc = this;
  692. while (curFunc)
  693. {
  694. curFunc->m_canDoInlineArgsOpt = false;
  695. curFunc->m_hasInlineArgsOpt = false;
  696. curFunc = curFunc->GetParentFunc();
  697. }
  698. }
  699. bool GetApplyTargetInliningRemovedArgumentsAccess() const { return this->applyTargetInliningRemovedArgumentsAccess;}
  700. void SetApplyTargetInliningRemovedArgumentsAccess() { this->applyTargetInliningRemovedArgumentsAccess = true;}
  701. bool GetHasMarkTempObjects() const { return this->hasMarkTempObjects; }
  702. void SetHasMarkTempObjects() { this->hasMarkTempObjects = true; }
  703. bool GetHasNonSimpleParams() const { return this->hasNonSimpleParams; }
  704. void SetHasNonSimpleParams() { this->hasNonSimpleParams = true; }
  705. bool GetHasImplicitCalls() const { return this->hasImplicitCalls;}
  706. void SetHasImplicitCalls(bool has) { this->hasImplicitCalls = has;}
  707. void SetHasImplicitCallsOnSelfAndParents()
  708. {
  709. this->SetHasImplicitCalls(true);
  710. Func *curFunc = this->GetParentFunc();
  711. while (curFunc && !curFunc->IsTopFunc())
  712. {
  713. curFunc->SetHasImplicitCalls(true);
  714. curFunc = curFunc->GetParentFunc();
  715. }
  716. }
  717. bool GetHasTempObjectProducingInstr() const { return this->hasTempObjectProducingInstr; }
  718. void SetHasTempObjectProducingInstr(bool has) { this->hasTempObjectProducingInstr = has; }
  719. const JITTimeProfileInfo * GetReadOnlyProfileInfo() const { return GetJITFunctionBody()->GetReadOnlyProfileInfo(); }
  720. bool HasProfileInfo() const { return GetJITFunctionBody()->HasProfileInfo(); }
  721. bool HasArrayInfo()
  722. {
  723. const auto top = this->GetTopFunc();
  724. return this->HasProfileInfo() && this->GetWeakFuncRef() && !(top->HasTry() && !top->DoOptimizeTryCatch()) &&
  725. top->DoGlobOpt() && !PHASE_OFF(Js::LoopFastPathPhase, top);
  726. }
  727. static Js::BuiltinFunction GetBuiltInIndex(IR::Opnd* opnd)
  728. {
  729. Assert(opnd);
  730. Js::BuiltinFunction index;
  731. if (opnd->IsRegOpnd())
  732. {
  733. index = opnd->AsRegOpnd()->m_sym->m_builtInIndex;
  734. }
  735. else if (opnd->IsSymOpnd())
  736. {
  737. PropertySym *propertySym = opnd->AsSymOpnd()->m_sym->AsPropertySym();
  738. index = Js::JavascriptLibrary::GetBuiltinFunctionForPropId(propertySym->m_propertyId);
  739. }
  740. else
  741. {
  742. index = Js::BuiltinFunction::None;
  743. }
  744. return index;
  745. }
  746. static bool IsBuiltInInlinedInLowerer(IR::Opnd* opnd)
  747. {
  748. Assert(opnd);
  749. Js::BuiltinFunction index = Func::GetBuiltInIndex(opnd);
  750. switch (index)
  751. {
  752. case Js::BuiltinFunction::JavascriptString_CharAt:
  753. case Js::BuiltinFunction::JavascriptString_CharCodeAt:
  754. case Js::BuiltinFunction::JavascriptString_CodePointAt:
  755. case Js::BuiltinFunction::Math_Abs:
  756. case Js::BuiltinFunction::JavascriptArray_Push:
  757. case Js::BuiltinFunction::JavascriptString_Replace:
  758. return true;
  759. default:
  760. return false;
  761. }
  762. }
  763. void AddYieldOffsetResumeLabel(uint32 offset, IR::LabelInstr* label)
  764. {
  765. m_yieldOffsetResumeLabelList->Add(YieldOffsetResumeLabel(offset, label));
  766. }
  767. template <typename Fn>
  768. void MapYieldOffsetResumeLabels(Fn fn)
  769. {
  770. m_yieldOffsetResumeLabelList->Map(fn);
  771. }
  772. template <typename Fn>
  773. bool MapUntilYieldOffsetResumeLabels(Fn fn)
  774. {
  775. return m_yieldOffsetResumeLabelList->MapUntil(fn);
  776. }
  777. void RemoveYieldOffsetResumeLabel(const YieldOffsetResumeLabel& yorl)
  778. {
  779. m_yieldOffsetResumeLabelList->Remove(yorl);
  780. }
  781. void RemoveDeadYieldOffsetResumeLabel(IR::LabelInstr* label)
  782. {
  783. uint32 offset;
  784. bool found = m_yieldOffsetResumeLabelList->MapUntil([&offset, &label](int i, YieldOffsetResumeLabel& yorl)
  785. {
  786. if (yorl.Second() == label)
  787. {
  788. offset = yorl.First();
  789. return true;
  790. }
  791. return false;
  792. });
  793. Assert(found);
  794. RemoveYieldOffsetResumeLabel(YieldOffsetResumeLabel(offset, label));
  795. AddYieldOffsetResumeLabel(offset, nullptr);
  796. }
  797. IR::Instr * GetFunctionEntryInsertionPoint();
  798. IR::IndirOpnd * GetConstantAddressIndirOpnd(intptr_t address, IR::Opnd *largeConstOpnd, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode);
  799. void MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv);
  800. void DisableConstandAddressLoadHoist() { canHoistConstantAddressLoad = false; }
  801. void AddSlotArrayCheck(IR::SymOpnd *fieldOpnd);
  802. void AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId = (uint32)-1);
  803. void EnsureStackArgWithFormalsTracker();
  804. BOOL IsFormalsArraySym(SymID symId);
  805. void TrackFormalsArraySym(SymID symId);
  806. void TrackStackSymForFormalIndex(Js::ArgSlot formalsIndex, StackSym * sym);
  807. StackSym* GetStackSymForFormal(Js::ArgSlot formalsIndex);
  808. bool HasStackSymForFormal(Js::ArgSlot formalsIndex);
  809. void SetScopeObjSym(StackSym * sym);
  810. StackSym * GetScopeObjSym();
  811. #if DBG
  812. bool allowRemoveBailOutArgInstr;
  813. #endif
  814. #if defined(_M_ARM32_OR_ARM64)
  815. int32 GetInlineeArgumentStackSize()
  816. {
  817. int32 count = this->GetMaxInlineeArgOutCount();
  818. if (count)
  819. {
  820. return ((count + 1) * MachPtr); // +1 for the dedicated zero out argc slot
  821. }
  822. return 0;
  823. }
  824. #endif
  825. public:
  826. BVSparse<JitArenaAllocator> * argObjSyms;
  827. BVSparse<JitArenaAllocator> * m_nonTempLocalVars; // Only populated in debug mode as part of IRBuilder. Used in GlobOpt and BackwardPass.
  828. InlineeFrameInfo* frameInfo;
  829. Js::ArgSlot argInsCount; // This count doesn't include the ArgIn instr for "this".
  830. uint32 m_inlineeId;
  831. IR::LabelInstr * m_bailOutNoSaveLabel;
  832. StackSym * GetNativeCodeDataSym() const;
  833. void SetNativeCodeDataSym(StackSym * sym);
  834. private:
  835. Js::EntryPointInfo* m_entryPointInfo; // for in-proc JIT only
  836. JITOutput m_output;
  837. #ifdef PROFILE_EXEC
  838. Js::ScriptContextProfiler *const m_codeGenProfiler;
  839. #endif
  840. Func * const parentFunc;
  841. StackSym * m_inlineeFrameStartSym;
  842. uint maxInlineeArgOutCount;
  843. const bool m_isBackgroundJIT;
  844. bool hasInstrNumber;
  845. bool maintainByteCodeOffset;
  846. bool hasInlinee;
  847. bool thisOrParentInlinerHasArguments;
  848. bool useRuntimeStats;
  849. bool stackNestedFunc;
  850. bool stackClosure;
  851. bool hasAnyStackNestedFunc;
  852. bool hasMarkTempObjects;
  853. bool hasNonSimpleParams;
  854. Cloner * m_cloner;
  855. InstrMap * m_cloneMap;
  856. NativeCodeData::Allocator nativeCodeDataAllocator;
  857. NativeCodeData::Allocator transferDataAllocator;
  858. #if !FLOATVAR
  859. CodeGenNumberAllocator * numberAllocator;
  860. #endif
  861. int32 m_localVarSlotsOffset;
  862. int32 m_hasLocalVarChangedOffset; // Offset on stack of 1 byte which indicates if any local var has changed.
  863. void * const m_codeGenAllocators;
  864. YieldOffsetResumeLabelList * m_yieldOffsetResumeLabelList;
  865. StackArgWithFormalsTracker * stackArgWithFormalsTracker;
  866. JITObjTypeSpecFldInfo ** m_globalObjTypeSpecFldInfoArray;
  867. StackSym *CreateInlineeStackSym();
  868. IR::SymOpnd *GetInlineeOpndAtOffset(int32 offset);
  869. bool HasLocalVarSlotCreated() const { return m_localVarSlotsOffset != Js::Constants::InvalidOffset; }
  870. void EnsureLocalVarSlots();
  871. StackSym * m_nativeCodeDataSym;
  872. SList<IR::RegOpnd *> constantAddressRegOpnd;
  873. IR::Instr * lastConstantAddressRegLoadInstr;
  874. bool canHoistConstantAddressLoad;
  875. #if DBG
  876. VtableHashMap * vtableMap;
  877. #endif
  878. #ifdef RECYCLER_WRITE_BARRIER_JIT
  879. public:
  880. Lowerer* m_lowerer;
  881. #endif
  882. };
  883. class AutoCodeGenPhase
  884. {
  885. public:
  886. AutoCodeGenPhase(Func * func, Js::Phase phase) : func(func), phase(phase), dump(false), isPhaseComplete(false)
  887. {
  888. func->BeginPhase(phase);
  889. }
  890. ~AutoCodeGenPhase()
  891. {
  892. if(this->isPhaseComplete)
  893. {
  894. func->EndPhase(phase, dump);
  895. }
  896. else
  897. {
  898. //End the profiler tag
  899. func->EndProfiler(phase);
  900. }
  901. }
  902. void EndPhase(Func * func, Js::Phase phase, bool dump, bool isPhaseComplete)
  903. {
  904. Assert(this->func == func);
  905. Assert(this->phase == phase);
  906. this->dump = dump && (PHASE_DUMP(Js::SimpleJitPhase, func) || !func->IsSimpleJit());
  907. this->isPhaseComplete = isPhaseComplete;
  908. }
  909. private:
  910. Func * func;
  911. Js::Phase phase;
  912. bool dump;
  913. bool isPhaseComplete;
  914. };
  915. #define BEGIN_CODEGEN_PHASE(func, phase) { AutoCodeGenPhase __autoCodeGen(func, phase);
  916. #define END_CODEGEN_PHASE(func, phase) __autoCodeGen.EndPhase(func, phase, true, true); }
  917. #define END_CODEGEN_PHASE_NO_DUMP(func, phase) __autoCodeGen.EndPhase(func, phase, false, true); }
  918. #ifdef PERF_HINT
  919. void WritePerfHint(PerfHints hint, Func* func, uint byteCodeOffset = Js::Constants::NoByteCodeOffset);
  920. #endif