Func.cpp 51 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Base/EtwTrace.h"
  7. #include "Base/ScriptContextProfiler.h"
  8. Func::Func(JitArenaAllocator *alloc, CodeGenWorkItem* workItem, const Js::FunctionCodeGenRuntimeData *const runtimeData,
  9. Js::PolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, CodeGenAllocators *const codeGenAllocators,
  10. CodeGenNumberAllocator * numberAllocator, Js::ReadOnlyDynamicProfileInfo *const profileInfo,
  11. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc,
  12. uint postCallByteCodeOffset, Js::RegSlot returnValueRegSlot, const bool isInlinedConstructor,
  13. Js::ProfileId callSiteIdInParentFunc, bool isGetterSetter) :
  14. m_alloc(alloc),
  15. m_workItem(workItem),
  16. m_jitTimeData(workItem->RecyclableData()->JitTimeData()),
  17. m_runtimeData(runtimeData),
  18. m_polymorphicInlineCacheInfo(polymorphicInlineCacheInfo),
  19. m_codeGenAllocators(codeGenAllocators),
  20. m_inlineeId(0),
  21. pinnedTypeRefs(nullptr),
  22. singleTypeGuards(nullptr),
  23. equivalentTypeGuards(nullptr),
  24. propertyGuardsByPropertyId(nullptr),
  25. ctorCachesByPropertyId(nullptr),
  26. callSiteToArgumentsOffsetFixupMap(nullptr),
  27. indexedPropertyGuardCount(0),
  28. propertiesWrittenTo(nullptr),
  29. lazyBailoutProperties(alloc),
  30. anyPropertyMayBeWrittenTo(false),
  31. #ifdef PROFILE_EXEC
  32. m_codeGenProfiler(codeGenProfiler),
  33. #endif
  34. m_isBackgroundJIT(isBackgroundJIT),
  35. m_cloner(nullptr),
  36. m_cloneMap(nullptr),
  37. m_loopParamSym(nullptr),
  38. m_funcObjSym(nullptr),
  39. m_localClosureSym(nullptr),
  40. m_localFrameDisplaySym(nullptr),
  41. m_bailoutReturnValueSym(nullptr),
  42. m_hasBailedOutSym(nullptr),
  43. m_inlineeFrameStartSym(nullptr),
  44. m_regsUsed(0),
  45. m_fg(nullptr),
  46. m_labelCount(0),
  47. m_argSlotsForFunctionsCalled(0),
  48. m_isLeaf(false),
  49. m_hasCalls(false),
  50. m_hasInlineArgsOpt(false),
  51. m_canDoInlineArgsOpt(true),
  52. m_doFastPaths(false),
  53. hasBailout(false),
  54. hasBailoutInEHRegion(false),
  55. hasInstrNumber(false),
  56. maintainByteCodeOffset(true),
  57. frameSize(0),
  58. parentFunc(parentFunc),
  59. argObjSyms(nullptr),
  60. m_nonTempLocalVars(nullptr),
  61. hasAnyStackNestedFunc(false),
  62. hasMarkTempObjects(false),
  63. postCallByteCodeOffset(postCallByteCodeOffset),
  64. maxInlineeArgOutCount(0),
  65. returnValueRegSlot(returnValueRegSlot),
  66. firstActualStackOffset(-1),
  67. m_localVarSlotsOffset(Js::Constants::InvalidOffset),
  68. m_hasLocalVarChangedOffset(Js::Constants::InvalidOffset),
  69. actualCount((Js::ArgSlot) - 1),
  70. tryCatchNestingLevel(0),
  71. m_localStackHeight(0),
  72. tempSymDouble(nullptr),
  73. tempSymBool(nullptr),
  74. hasInlinee(false),
  75. thisOrParentInlinerHasArguments(false),
  76. hasStackArgs(false),
  77. hasArgumentObject(false),
  78. hasUnoptimizedArgumentsAcccess(false),
  79. hasApplyTargetInlining(false),
  80. hasImplicitCalls(false),
  81. hasTempObjectProducingInstr(false),
  82. isInlinedConstructor(isInlinedConstructor),
  83. numberAllocator(numberAllocator),
  84. profileInfo(profileInfo),
  85. loopCount(0),
  86. callSiteIdInParentFunc(callSiteIdInParentFunc),
  87. isGetterSetter(isGetterSetter),
  88. frameInfo(nullptr),
  89. isTJLoopBody(false),
  90. isFlowGraphValid(false),
  91. #if DBG
  92. m_callSiteCount(0),
  93. #endif
  94. stackNestedFunc(false),
  95. stackClosure(false)
  96. #if defined(_M_ARM32_OR_ARM64)
  97. , m_ArgumentsOffset(0)
  98. , m_epilogLabel(nullptr)
  99. #endif
  100. , m_funcStartLabel(nullptr)
  101. , m_funcEndLabel(nullptr)
  102. #ifdef _M_X64
  103. , m_prologEncoder(alloc)
  104. #endif
  105. #if DBG
  106. , hasCalledSetDoFastPaths(false)
  107. , allowRemoveBailOutArgInstr(false)
  108. , currentPhases(alloc)
  109. , isPostLower(false)
  110. , isPostRegAlloc(false)
  111. , isPostPeeps(false)
  112. , isPostLayout(false)
  113. , isPostFinalLower(false)
  114. , vtableMap(nullptr)
  115. #endif
  116. , m_yieldOffsetResumeLabelList(nullptr)
  117. , m_bailOutNoSaveLabel(nullptr)
  118. , constantAddressRegOpnd(alloc)
  119. , lastConstantAddressRegLoadInstr(nullptr)
  120. , m_totalJumpTableSizeInBytesForSwitchStatements(0)
  121. , slotArrayCheckTable(nullptr)
  122. , frameDisplayCheckTable(nullptr)
  123. {
  124. Assert(this->IsInlined() == !!runtimeData);
  125. if (this->IsInlined())
  126. {
  127. m_inlineeId = ++(GetTopFunc()->m_inlineeId);
  128. }
  129. m_jnFunction = m_workItem->GetFunctionBody();
  130. bool doStackNestedFunc = m_jnFunction->DoStackNestedFunc();
  131. bool doStackClosure = m_jnFunction->DoStackClosure() && !PHASE_OFF(Js::FrameDisplayFastPathPhase, this);
  132. Assert(!doStackClosure || doStackNestedFunc);
  133. this->stackClosure = doStackClosure && this->IsTopFunc();
  134. if (this->stackClosure)
  135. {
  136. m_workItem->GetEntryPoint()->SetHasJittedStackClosure();
  137. }
  138. if (m_workItem->Type() == JsFunctionType)
  139. {
  140. if (m_jnFunction->GetDoBackendArgumentsOptimization() && !m_jnFunction->GetHasTry())
  141. {
  142. // doBackendArgumentsOptimization bit is set when there is no eval inside a function
  143. // as determined by the bytecode generator.
  144. SetHasStackArgs(true);
  145. }
  146. if (doStackNestedFunc && m_jnFunction->GetNestedCount() != 0)
  147. {
  148. Assert(!(this->IsJitInDebugMode() && !m_jnFunction->GetUtf8SourceInfo()->GetIsLibraryCode()));
  149. stackNestedFunc = true;
  150. this->GetTopFunc()->hasAnyStackNestedFunc = true;
  151. }
  152. }
  153. else
  154. {
  155. Assert(m_workItem->Type() == JsLoopBodyWorkItemType);
  156. }
  157. if (m_jnFunction->GetHasOrParentHasArguments() || parentFunc && parentFunc->thisOrParentInlinerHasArguments)
  158. {
  159. thisOrParentInlinerHasArguments = true;
  160. }
  161. if (parentFunc == nullptr)
  162. {
  163. inlineDepth = 0;
  164. m_symTable = JitAnew(alloc, SymTable);
  165. m_symTable->Init(this);
  166. Assert(Js::Constants::NoByteCodeOffset == postCallByteCodeOffset);
  167. Assert(Js::Constants::NoRegister == returnValueRegSlot);
  168. #if defined(_M_IX86) || defined(_M_X64)
  169. if (HasArgumentSlot())
  170. {
  171. // Pre-allocate the single argument slot we'll reserve for the arguments object.
  172. // For ARM, the argument slot is not part of the local but part of the register saves
  173. m_localStackHeight = MachArgsSlotOffset;
  174. }
  175. #endif
  176. }
  177. else
  178. {
  179. inlineDepth = parentFunc->inlineDepth + 1;
  180. Assert(Js::Constants::NoByteCodeOffset != postCallByteCodeOffset);
  181. }
  182. this->constructorCacheCount = 0;
  183. this->constructorCaches = AnewArrayZ(this->m_alloc, Js::JitTimeConstructorCache*, this->m_jnFunction->GetProfiledCallSiteCount());
  184. #if DBG_DUMP
  185. m_codeSize = -1;
  186. #endif
  187. #if defined(_M_X64)
  188. m_spillSize = -1;
  189. m_argsSize = -1;
  190. m_savedRegSize = -1;
  191. #endif
  192. if (this->IsJitInDebugMode())
  193. {
  194. m_nonTempLocalVars = Anew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  195. }
  196. if (this->m_jnFunction->IsGenerator())
  197. {
  198. m_yieldOffsetResumeLabelList = YieldOffsetResumeLabelList::New(this->m_alloc);
  199. }
  200. canHoistConstantAddressLoad = !PHASE_OFF(Js::HoistConstAddrPhase, this);
  201. }
  202. bool
  203. Func::IsLoopBody() const
  204. {
  205. return this->m_workItem->Type() == JsLoopBodyWorkItemType;
  206. }
  207. bool
  208. Func::IsLoopBodyInTry() const
  209. {
  210. return IsLoopBody() && ((JsLoopBodyCodeGen*)this->m_workItem)->loopHeader->isInTry;
  211. }
  212. ///----------------------------------------------------------------------------
  213. ///
  214. /// Func::Codegen
  215. ///
  216. /// Codegen this function.
  217. ///
  218. ///----------------------------------------------------------------------------
  219. void
  220. Func::Codegen()
  221. {
  222. Assert(!IsJitInDebugMode() || !m_jnFunction->GetHasTry());
  223. Js::ScriptContext* scriptContext = this->GetScriptContext();
  224. {
  225. if(IS_JS_ETW(EventEnabledJSCRIPT_FUNCTION_JIT_START()))
  226. {
  227. WCHAR displayNameBuffer[256];
  228. WCHAR* displayName = displayNameBuffer;
  229. size_t sizeInChars = this->m_workItem->GetDisplayName(displayName, 256);
  230. if(sizeInChars > 256)
  231. {
  232. displayName = new WCHAR[sizeInChars];
  233. this->m_workItem->GetDisplayName(displayName, 256);
  234. }
  235. JS_ETW(EventWriteJSCRIPT_FUNCTION_JIT_START(
  236. this->GetFunctionNumber(),
  237. displayName,
  238. this->GetScriptContext(),
  239. this->m_workItem->GetInterpretedCount(),
  240. (const unsigned int)this->m_jnFunction->LengthInBytes(),
  241. this->m_jnFunction->GetByteCodeCount(),
  242. this->m_jnFunction->GetByteCodeInLoopCount(),
  243. (int)this->m_workItem->GetJitMode()));
  244. if(displayName != displayNameBuffer)
  245. {
  246. delete[] displayName;
  247. }
  248. }
  249. }
  250. #if DBG_DUMP
  251. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::BackEndPhase))
  252. {
  253. if (this->IsLoopBody())
  254. {
  255. Output::Print(L"---BeginBackEnd: function: %s, loop:%d---\r\n", this->GetJnFunction()->GetDisplayName(),
  256. static_cast<JsLoopBodyCodeGen *>(this->m_workItem)->GetLoopNumber());
  257. }
  258. else
  259. {
  260. Output::Print(L"---BeginBackEnd: function: %s---\r\n", this->GetJnFunction()->GetDisplayName());
  261. }
  262. Output::Flush();
  263. }
  264. #endif
  265. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  266. LARGE_INTEGER start_time = { 0 };
  267. if(PHASE_TRACE(Js::BackEndPhase, GetJnFunction()))
  268. {
  269. QueryPerformanceCounter(&start_time);
  270. if (this->IsLoopBody())
  271. {
  272. Output::Print(
  273. L"BeginBackEnd - function: %s (%s, line %u), loop: %u, mode: %S",
  274. GetJnFunction()->GetDisplayName(),
  275. GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  276. GetJnFunction()->GetLineNumber(),
  277. static_cast<JsLoopBodyCodeGen *>(this->m_workItem)->GetLoopNumber(),
  278. ExecutionModeName(m_workItem->GetJitMode()));
  279. if (this->m_jnFunction->GetIsAsmjsMode())
  280. {
  281. Output::Print(L" (Asmjs)\n");
  282. }
  283. else
  284. {
  285. Output::Print(L"\n");
  286. }
  287. }
  288. else
  289. {
  290. Output::Print(
  291. L"BeginBackEnd - function: %s (%s, line %u), mode: %S",
  292. GetJnFunction()->GetDisplayName(),
  293. GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  294. GetJnFunction()->GetLineNumber(),
  295. ExecutionModeName(m_workItem->GetJitMode()));
  296. if (this->m_jnFunction->GetIsAsmjsMode())
  297. {
  298. Output::Print(L" (Asmjs)\n");
  299. }
  300. else
  301. {
  302. Output::Print(L"\n");
  303. }
  304. }
  305. Output::Flush();
  306. }
  307. #ifdef FIELD_ACCESS_STATS
  308. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->GetJnFunction()) || PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->GetJnFunction()))
  309. {
  310. if (this->m_jitTimeData->inlineCacheStats)
  311. {
  312. auto stats = this->m_jitTimeData->inlineCacheStats;
  313. Output::Print(L"ObjTypeSpec: jitting function %s (#%s): inline cache stats:\n", this->GetJnFunction()->GetDisplayName(), this->GetJnFunction()->GetDebugNumberSet(debugStringBuffer));
  314. Output::Print(L" overall: total %u, no profile info %u\n", stats->totalInlineCacheCount, stats->noInfoInlineCacheCount);
  315. Output::Print(L" mono: total %u, empty %u, cloned %u\n",
  316. stats->monoInlineCacheCount, stats->emptyMonoInlineCacheCount, stats->clonedMonoInlineCacheCount);
  317. Output::Print(L" poly: total %u (high %u, low %u), null %u, empty %u, ignored %u, disabled %u, equivalent %u, non-equivalent %u, cloned %u\n",
  318. stats->polyInlineCacheCount, stats->highUtilPolyInlineCacheCount, stats->lowUtilPolyInlineCacheCount,
  319. stats->nullPolyInlineCacheCount, stats->emptyPolyInlineCacheCount, stats->ignoredPolyInlineCacheCount, stats->disabledPolyInlineCacheCount,
  320. stats->equivPolyInlineCacheCount, stats->nonEquivPolyInlineCacheCount, stats->clonedPolyInlineCacheCount);
  321. }
  322. else
  323. {
  324. Output::Print(L"EquivObjTypeSpec: function %s (%s): inline cache stats unavailable\n", this->GetJnFunction()->GetDisplayName(), this->GetJnFunction()->GetDebugNumberSet(debugStringBuffer));
  325. }
  326. Output::Flush();
  327. }
  328. #endif
  329. BEGIN_CODEGEN_PHASE(this, Js::BackEndPhase);
  330. {
  331. // IRBuilder
  332. BEGIN_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  333. if (m_jnFunction->GetIsAsmjsMode())
  334. {
  335. IRBuilderAsmJs asmIrBuilder(this);
  336. asmIrBuilder.Build();
  337. }
  338. else
  339. {
  340. IRBuilder irBuilder(this);
  341. irBuilder.Build();
  342. }
  343. END_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  344. #ifdef IR_VIEWER
  345. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::IRBuilderPhase);
  346. #endif /* IR_VIEWER */
  347. BEGIN_CODEGEN_PHASE(this, Js::InlinePhase);
  348. InliningHeuristics heuristics(this->GetJnFunction());
  349. Inline inliner(this, heuristics);
  350. inliner.Optimize();
  351. END_CODEGEN_PHASE(this, Js::InlinePhase);
  352. if (scriptContext->IsClosed())
  353. {
  354. // Should not be jitting something in the foreground when the script context is actually closed
  355. Assert(IsBackgroundJIT() || !scriptContext->IsActuallyClosed());
  356. throw Js::OperationAbortedException();
  357. }
  358. // FlowGraph
  359. {
  360. // Scope for FlowGraph arena
  361. NoRecoverMemoryJitArenaAllocator fgAlloc(L"BE-FlowGraph", m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  362. BEGIN_CODEGEN_PHASE(this, Js::FGBuildPhase);
  363. this->m_fg = FlowGraph::New(this, &fgAlloc);
  364. this->m_fg->Build();
  365. END_CODEGEN_PHASE(this, Js::FGBuildPhase);
  366. // Global Optimization and Type Specialization
  367. BEGIN_CODEGEN_PHASE(this, Js::GlobOptPhase);
  368. GlobOpt globOpt(this);
  369. globOpt.Optimize();
  370. END_CODEGEN_PHASE(this, Js::GlobOptPhase);
  371. // Delete flowGraph now
  372. this->m_fg->Destroy();
  373. this->m_fg = nullptr;
  374. }
  375. #ifdef IR_VIEWER
  376. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::GlobOptPhase);
  377. #endif /* IR_VIEWER */
  378. ThrowIfScriptClosed();
  379. // Lowering
  380. Lowerer lowerer(this);
  381. BEGIN_CODEGEN_PHASE(this, Js::LowererPhase);
  382. lowerer.Lower();
  383. END_CODEGEN_PHASE(this, Js::LowererPhase);
  384. #ifdef IR_VIEWER
  385. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::LowererPhase);
  386. #endif /* IR_VIEWER */
  387. // Encode constants
  388. Security security(this);
  389. BEGIN_CODEGEN_PHASE(this, Js::EncodeConstantsPhase)
  390. security.EncodeLargeConstants();
  391. END_CODEGEN_PHASE(this, Js::EncodeConstantsPhase);
  392. if (this->GetScriptContext()->GetThreadContext()->DoInterruptProbe(this->GetJnFunction()))
  393. {
  394. BEGIN_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  395. lowerer.DoInterruptProbes();
  396. END_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  397. }
  398. // Register Allocation
  399. BEGIN_CODEGEN_PHASE(this, Js::RegAllocPhase);
  400. LinearScan linearScan(this);
  401. linearScan.RegAlloc();
  402. END_CODEGEN_PHASE(this, Js::RegAllocPhase);
  403. #ifdef IR_VIEWER
  404. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::RegAllocPhase);
  405. #endif /* IR_VIEWER */
  406. ThrowIfScriptClosed();
  407. // Peephole optimizations
  408. BEGIN_CODEGEN_PHASE(this, Js::PeepsPhase);
  409. Peeps peeps(this);
  410. peeps.PeepFunc();
  411. END_CODEGEN_PHASE(this, Js::PeepsPhase);
  412. // Layout
  413. BEGIN_CODEGEN_PHASE(this, Js::LayoutPhase);
  414. SimpleLayout layout(this);
  415. layout.Layout();
  416. END_CODEGEN_PHASE(this, Js::LayoutPhase);
  417. if (this->HasTry() && this->hasBailoutInEHRegion)
  418. {
  419. BEGIN_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  420. lowerer.EHBailoutPatchUp();
  421. END_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  422. }
  423. // Insert NOPs (moving this before prolog/epilog for AMD64 and possibly ARM).
  424. BEGIN_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  425. security.InsertNOPs();
  426. END_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  427. // Prolog/Epilog
  428. BEGIN_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  429. if (m_jnFunction->GetIsAsmjsMode())
  430. {
  431. lowerer.LowerPrologEpilogAsmJs();
  432. }
  433. else
  434. {
  435. lowerer.LowerPrologEpilog();
  436. }
  437. END_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  438. BEGIN_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  439. lowerer.FinalLower();
  440. END_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  441. // Encoder
  442. BEGIN_CODEGEN_PHASE(this, Js::EncoderPhase);
  443. Encoder encoder(this);
  444. encoder.Encode();
  445. END_CODEGEN_PHASE_NO_DUMP(this, Js::EncoderPhase);
  446. #ifdef IR_VIEWER
  447. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::EncoderPhase);
  448. #endif /* IR_VIEWER */
  449. }
  450. END_CODEGEN_PHASE(this, Js::BackEndPhase);
  451. #if DBG_DUMP
  452. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::BackEndPhase))
  453. {
  454. Output::Print(L"---EndBackEnd---\r\n");
  455. Output::Flush();
  456. }
  457. #endif
  458. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  459. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  460. {
  461. scriptContext->codeSize += this->m_codeSize;
  462. }
  463. #endif
  464. if (PHASE_TRACE(Js::BackEndPhase, GetJnFunction()))
  465. {
  466. LARGE_INTEGER freq;
  467. LARGE_INTEGER end_time;
  468. QueryPerformanceCounter(&end_time);
  469. QueryPerformanceFrequency(&freq);
  470. if (this->IsLoopBody())
  471. {
  472. Output::Print(
  473. L"EndBackEnd - function: %s (%s, line %u), loop: %u, mode: %S, time:%8.6f mSec",
  474. GetJnFunction()->GetDisplayName(),
  475. GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  476. GetJnFunction()->GetLineNumber(),
  477. static_cast<JsLoopBodyCodeGen *>(this->m_workItem)->GetLoopNumber(),
  478. ExecutionModeName(m_workItem->GetJitMode()),
  479. (((double)((end_time.QuadPart - start_time.QuadPart)* (double)1000.0 / (double)freq.QuadPart))) / (1));
  480. if (this->m_jnFunction->GetIsAsmjsMode())
  481. {
  482. Output::Print(L" (Asmjs)\n");
  483. }
  484. else
  485. {
  486. Output::Print(L"\n");
  487. }
  488. }
  489. else
  490. {
  491. Output::Print(
  492. L"EndBackEnd - function: %s (%s, line %u), mode: %S time:%8.6f mSec",
  493. GetJnFunction()->GetDisplayName(),
  494. GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  495. GetJnFunction()->GetLineNumber(),
  496. ExecutionModeName(m_workItem->GetJitMode()),
  497. (((double)((end_time.QuadPart - start_time.QuadPart)* (double)1000.0 / (double)freq.QuadPart))) / (1));
  498. if (this->m_jnFunction->GetIsAsmjsMode())
  499. {
  500. Output::Print(L" (Asmjs)\n");
  501. }
  502. else
  503. {
  504. Output::Print(L"\n");
  505. }
  506. }
  507. Output::Flush();
  508. }
  509. {
  510. if(IS_JS_ETW(EventEnabledJSCRIPT_FUNCTION_JIT_STOP()))
  511. {
  512. WCHAR displayNameBuffer[256];
  513. WCHAR* displayName = displayNameBuffer;
  514. size_t sizeInChars = this->m_workItem->GetDisplayName(displayName, 256);
  515. if(sizeInChars > 256)
  516. {
  517. displayName = new WCHAR[sizeInChars];
  518. this->m_workItem->GetDisplayName(displayName, 256);
  519. }
  520. void* entryPoint;
  521. ptrdiff_t codeSize;
  522. this->m_workItem->GetEntryPointAddress(&entryPoint, &codeSize);
  523. JS_ETW(EventWriteJSCRIPT_FUNCTION_JIT_STOP(
  524. this->GetFunctionNumber(),
  525. displayName,
  526. scriptContext,
  527. this->m_workItem->GetInterpretedCount(),
  528. entryPoint,
  529. codeSize));
  530. if(displayName != displayNameBuffer)
  531. {
  532. delete[] displayName;
  533. }
  534. }
  535. }
  536. #if DBG_DUMP
  537. if (Js::Configuration::Global.flags.IsEnabled(Js::AsmDumpModeFlag))
  538. {
  539. FILE * oldFile = 0;
  540. FILE * asmFile = scriptContext->GetNativeCodeGenerator()->asmFile;
  541. if (asmFile)
  542. {
  543. oldFile = Output::SetFile(asmFile);
  544. }
  545. this->Dump(IRDumpFlags_AsmDumpMode);
  546. Output::Flush();
  547. if (asmFile)
  548. {
  549. FILE *openedFile = Output::SetFile(oldFile);
  550. Assert(openedFile == asmFile);
  551. }
  552. }
  553. #endif
  554. }
  555. ///----------------------------------------------------------------------------
  556. /// Func::StackAllocate
  557. /// Allocate stack space of given size.
  558. ///----------------------------------------------------------------------------
  559. int32
  560. Func::StackAllocate(int size)
  561. {
  562. Assert(this->IsTopFunc());
  563. int32 offset;
  564. #ifdef MD_GROW_LOCALS_AREA_UP
  565. // Locals have positive offsets and are allocated from bottom to top.
  566. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  567. offset = m_localStackHeight;
  568. m_localStackHeight += size;
  569. #else
  570. // Locals have negative offsets and are allocated from top to bottom.
  571. m_localStackHeight += size;
  572. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  573. offset = -m_localStackHeight;
  574. #endif
  575. return offset;
  576. }
  577. ///----------------------------------------------------------------------------
  578. ///
  579. /// Func::StackAllocate
  580. ///
  581. /// Allocate stack space for this symbol.
  582. ///
  583. ///----------------------------------------------------------------------------
  584. int32
  585. Func::StackAllocate(StackSym *stackSym, int size)
  586. {
  587. Assert(size > 0);
  588. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym() || stackSym->IsAllocated())
  589. {
  590. return stackSym->m_offset;
  591. }
  592. Assert(stackSym->m_offset == 0);
  593. stackSym->m_allocated = true;
  594. stackSym->m_offset = StackAllocate(size);
  595. return stackSym->m_offset;
  596. }
  597. void
  598. Func::SetArgOffset(StackSym *stackSym, int32 offset)
  599. {
  600. AssertMsg(offset >= 0, "Why is the offset, negative?");
  601. stackSym->m_offset = offset;
  602. stackSym->m_allocated = true;
  603. }
  604. ///
  605. /// Ensures that local var slots are created, if the function has locals.
  606. /// Allocate stack space for locals used for debugging
  607. /// (for local non-temp vars we write-through memory so that locals inspection can make use of that.).
  608. // On stack, after local slots we allocate space for metadata (in particular, whether any the locals was changed in debugger).
  609. ///
  610. void
  611. Func::EnsureLocalVarSlots()
  612. {
  613. Assert(IsJitInDebugMode());
  614. if (!this->HasLocalVarSlotCreated())
  615. {
  616. Assert(this->m_jnFunction != nullptr);
  617. uint32 localSlotCount = this->m_jnFunction->GetNonTempLocalVarCount();
  618. if (localSlotCount && m_localVarSlotsOffset == Js::Constants::InvalidOffset)
  619. {
  620. // Allocate the slots.
  621. int32 size = localSlotCount * GetDiagLocalSlotSize();
  622. m_localVarSlotsOffset = StackAllocate(size);
  623. m_hasLocalVarChangedOffset = StackAllocate(max(1, MachStackAlignment)); // Can't alloc less than StackAlignment bytes.
  624. Assert(this->m_workItem->Type() == JsFunctionType);
  625. // Store in the entry point info, so that it will later be used when we do the variable inspection.
  626. Js::FunctionEntryPointInfo * entryPointInfo = static_cast<Js::FunctionEntryPointInfo*>(this->m_workItem->GetEntryPoint());
  627. Assert(entryPointInfo != nullptr);
  628. entryPointInfo->localVarSlotsOffset = AdjustOffsetValue(m_localVarSlotsOffset);
  629. entryPointInfo->localVarChangedOffset = AdjustOffsetValue(m_hasLocalVarChangedOffset);
  630. }
  631. }
  632. }
  633. void Func::SetFirstArgOffset(IR::Instr* inlineeStart)
  634. {
  635. Assert(inlineeStart->m_func == this);
  636. Assert(!IsTopFunc());
  637. int32 lastOffset;
  638. IR::Instr* arg = inlineeStart->GetNextArg();
  639. const auto lastArgOutStackSym = arg->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  640. lastOffset = lastArgOutStackSym->m_offset;
  641. Assert(lastArgOutStackSym->m_isSingleDef);
  642. const auto secondLastArgOutOpnd = lastArgOutStackSym->m_instrDef->GetSrc2();
  643. if (secondLastArgOutOpnd->IsSymOpnd())
  644. {
  645. const auto secondLastOffset = secondLastArgOutOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_offset;
  646. if (secondLastOffset > lastOffset)
  647. {
  648. lastOffset = secondLastOffset;
  649. }
  650. }
  651. lastOffset += MachPtr;
  652. int32 firstActualStackOffset = lastOffset - ((this->actualCount + Js::Constants::InlineeMetaArgCount) * MachPtr);
  653. Assert((this->firstActualStackOffset == -1) || (this->firstActualStackOffset == firstActualStackOffset));
  654. this->firstActualStackOffset = firstActualStackOffset;
  655. }
  656. int32
  657. Func::GetLocalVarSlotOffset(int32 slotId)
  658. {
  659. this->EnsureLocalVarSlots();
  660. Assert(m_localVarSlotsOffset != Js::Constants::InvalidOffset);
  661. int32 slotOffset = slotId * GetDiagLocalSlotSize();
  662. return m_localVarSlotsOffset + slotOffset;
  663. }
  664. void Func::OnAddSym(Sym* sym)
  665. {
  666. Assert(sym);
  667. if (this->IsJitInDebugMode() && this->IsNonTempLocalVar(sym->m_id))
  668. {
  669. Assert(m_nonTempLocalVars);
  670. m_nonTempLocalVars->Set(sym->m_id);
  671. }
  672. }
  673. ///
  674. /// Returns offset of the flag (1 byte) whether any local was changed (in debugger).
  675. /// If the function does not have any locals, returns -1.
  676. ///
  677. int32
  678. Func::GetHasLocalVarChangedOffset()
  679. {
  680. this->EnsureLocalVarSlots();
  681. return m_hasLocalVarChangedOffset;
  682. }
  683. bool
  684. Func::IsJitInDebugMode()
  685. {
  686. return
  687. Js::Configuration::Global.EnableJitInDebugMode() &&
  688. this->m_workItem->IsJitInDebugMode();
  689. }
  690. bool
  691. Func::IsNonTempLocalVar(uint32 slotIndex)
  692. {
  693. Assert(this->m_jnFunction != nullptr);
  694. return this->m_jnFunction->IsNonTempLocalVar(slotIndex);
  695. }
  696. int32
  697. Func::AdjustOffsetValue(int32 offset)
  698. {
  699. #ifdef MD_GROW_LOCALS_AREA_UP
  700. return -(offset + BailOutInfo::StackSymBias);
  701. #else
  702. // Stack offset are negative, includes the PUSH EBP and return address
  703. return offset - (2 * MachPtr);
  704. #endif
  705. }
  706. #ifdef MD_GROW_LOCALS_AREA_UP
  707. // Note: this is called during jit-compile when we finalize bail out record.
  708. void
  709. Func::AjustLocalVarSlotOffset()
  710. {
  711. if (m_jnFunction->GetNonTempLocalVarCount())
  712. {
  713. // Turn positive SP-relative base locals offset into negative frame-pointer-relative offset
  714. // This is changing value for restoring the locals when read due to locals inspection.
  715. int localsOffset = m_localVarSlotsOffset - (m_localStackHeight + m_ArgumentsOffset);
  716. int valueChangeOffset = m_hasLocalVarChangedOffset - (m_localStackHeight + m_ArgumentsOffset);
  717. Js::FunctionEntryPointInfo * entryPointInfo = static_cast<Js::FunctionEntryPointInfo*>(this->m_workItem->GetEntryPoint());
  718. Assert(entryPointInfo != nullptr);
  719. entryPointInfo->localVarSlotsOffset = localsOffset;
  720. entryPointInfo->localVarChangedOffset = valueChangeOffset;
  721. }
  722. }
  723. #endif
  724. bool
  725. Func::DoGlobOptsForGeneratorFunc()
  726. {
  727. // Disable GlobOpt optimizations for generators initially. Will visit and enable each one by one.
  728. return !m_jnFunction->IsGenerator();
  729. }
  730. void
  731. Func::SetDoFastPaths()
  732. {
  733. // Make sure we only call this once!
  734. Assert(!this->hasCalledSetDoFastPaths);
  735. bool isLeaf = this->m_isLeaf && !PHASE_OFF(Js::LeafFastPathPhase, this);
  736. bool doFastPaths = false;
  737. if(!PHASE_OFF(Js::FastPathPhase, this) && (!IsSimpleJit() || Js::FunctionBody::IsNewSimpleJit()))
  738. {
  739. if (isLeaf || this->GetScriptContext()->GetThreadContext()->GetSourceSize() < (size_t)CONFIG_FLAG(FastPathCap) || CONFIG_FLAG(ForceFastPath))
  740. {
  741. doFastPaths = true;
  742. }
  743. }
  744. this->m_doFastPaths = doFastPaths;
  745. #ifdef DBG
  746. this->hasCalledSetDoFastPaths = true;
  747. #endif
  748. }
  749. #ifdef _M_ARM
  750. RegNum
  751. Func::GetLocalsPointer() const
  752. {
  753. #ifdef DBG
  754. if (Js::Configuration::Global.flags.IsEnabled(Js::ForceLocalsPtrFlag))
  755. {
  756. return ALT_LOCALS_PTR;
  757. }
  758. #endif
  759. if (this->m_jnFunction->GetHasTry())
  760. {
  761. return ALT_LOCALS_PTR;
  762. }
  763. return RegSP;
  764. }
  765. #endif
  766. void Func::AddSlotArrayCheck(IR::SymOpnd *fieldOpnd)
  767. {
  768. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  769. {
  770. return;
  771. }
  772. Assert(IsTopFunc());
  773. if (this->slotArrayCheckTable == nullptr)
  774. {
  775. this->slotArrayCheckTable = SlotArrayCheckTable::New(m_alloc, 4);
  776. }
  777. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  778. uint32 slot = propertySym->m_propertyId;
  779. uint32 *pSlotId = this->slotArrayCheckTable->FindOrInsert(slot, propertySym->m_stackSym->m_id);
  780. if (pSlotId && (*pSlotId == (uint32)-1 || *pSlotId < slot))
  781. {
  782. *pSlotId = propertySym->m_propertyId;
  783. }
  784. }
  785. void Func::AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId)
  786. {
  787. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  788. {
  789. return;
  790. }
  791. Assert(IsTopFunc());
  792. if (this->frameDisplayCheckTable == nullptr)
  793. {
  794. this->frameDisplayCheckTable = FrameDisplayCheckTable::New(m_alloc, 4);
  795. }
  796. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  797. FrameDisplayCheckRecord **record = this->frameDisplayCheckTable->FindOrInsertNew(propertySym->m_stackSym->m_id);
  798. if (*record == nullptr)
  799. {
  800. *record = JitAnew(m_alloc, FrameDisplayCheckRecord);
  801. }
  802. uint32 frameDisplaySlot = propertySym->m_propertyId;
  803. if ((*record)->table == nullptr || (*record)->slotId < frameDisplaySlot)
  804. {
  805. (*record)->slotId = frameDisplaySlot;
  806. }
  807. if (slotId != (uint32)-1)
  808. {
  809. if ((*record)->table == nullptr)
  810. {
  811. (*record)->table = SlotArrayCheckTable::New(m_alloc, 4);
  812. }
  813. uint32 *pSlotId = (*record)->table->FindOrInsert(slotId, frameDisplaySlot);
  814. if (pSlotId && *pSlotId < slotId)
  815. {
  816. *pSlotId = slotId;
  817. }
  818. }
  819. }
  820. void Func::InitLocalClosureSyms()
  821. {
  822. Assert(this->m_localClosureSym == nullptr);
  823. // Allocate stack space for closure pointers. Do this only if we're jitting for stack closures, and
  824. // tell bailout that these are not byte code symbols so that we don't try to encode them in the bailout record,
  825. // as they don't have normal lifetimes.
  826. Js::RegSlot regSlot = this->GetJnFunction()->GetLocalClosureReg();
  827. if (regSlot != Js::Constants::NoRegister)
  828. {
  829. this->m_localClosureSym =
  830. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  831. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  832. this);
  833. }
  834. regSlot = this->GetJnFunction()->GetLocalFrameDisplayReg();
  835. if (regSlot != Js::Constants::NoRegister)
  836. {
  837. this->m_localFrameDisplaySym =
  838. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  839. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  840. this);
  841. }
  842. }
  843. bool Func::CanAllocInPreReservedHeapPageSegment ()
  844. {
  845. #ifdef _CONTROL_FLOW_GUARD
  846. return PHASE_FORCE1(Js::PreReservedHeapAllocPhase) || (!PHASE_OFF1(Js::PreReservedHeapAllocPhase) &&
  847. !IsJitInDebugMode() && !this->m_workItem->GetFunctionBody()->IsInDebugMode() && GetScriptContext()->GetThreadContext()->IsCFGEnabled()
  848. #if _M_IX86
  849. && m_workItem->GetJitMode() == ExecutionMode::FullJit && GetCodeGenAllocators()->canCreatePreReservedSegment);
  850. #elif _M_X64
  851. && true);
  852. #else
  853. && false); //Not yet implemented for architectures other than x86 and amd64.
  854. #endif //_M_ARCH
  855. #else
  856. return false;
  857. #endif//_CONTROL_FLOW_GUARD
  858. }
  859. ///----------------------------------------------------------------------------
  860. ///
  861. /// Func::GetInstrCount
  862. ///
  863. /// Returns the number of instrs.
  864. /// Note: It counts all instrs for now, including labels, etc.
  865. ///
  866. ///----------------------------------------------------------------------------
  867. uint32
  868. Func::GetInstrCount()
  869. {
  870. uint instrCount = 0;
  871. FOREACH_INSTR_IN_FUNC(instr, this)
  872. {
  873. instrCount++;
  874. }NEXT_INSTR_IN_FUNC;
  875. return instrCount;
  876. }
  877. ///----------------------------------------------------------------------------
  878. ///
  879. /// Func::NumberInstrs
  880. ///
  881. /// Number each instruction in order of appearance in the function.
  882. ///
  883. ///----------------------------------------------------------------------------
  884. void
  885. Func::NumberInstrs()
  886. {
  887. #if DBG_DUMP
  888. Assert(this->IsTopFunc());
  889. Assert(!this->hasInstrNumber);
  890. this->hasInstrNumber = true;
  891. #endif
  892. uint instrCount = 1;
  893. FOREACH_INSTR_IN_FUNC(instr, this)
  894. {
  895. instr->SetNumber(instrCount++);
  896. }
  897. NEXT_INSTR_IN_FUNC;
  898. }
  899. ///----------------------------------------------------------------------------
  900. ///
  901. /// Func::IsInPhase
  902. ///
  903. /// Determines whether the function is currently in the provided phase
  904. ///
  905. ///----------------------------------------------------------------------------
  906. #if DBG
  907. bool
  908. Func::IsInPhase(Js::Phase tag)
  909. {
  910. return this->GetTopFunc()->currentPhases.Contains(tag);
  911. }
  912. #endif
  913. ///----------------------------------------------------------------------------
  914. ///
  915. /// Func::BeginPhase
  916. ///
  917. /// Takes care of the profiler
  918. ///
  919. ///----------------------------------------------------------------------------
  920. void
  921. Func::BeginPhase(Js::Phase tag)
  922. {
  923. #ifdef DBG
  924. this->GetTopFunc()->currentPhases.Push(tag);
  925. #endif
  926. #ifdef PROFILE_EXEC
  927. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  928. "Profiler tag is supplied but the profiler pointer is NULL");
  929. if (this->m_codeGenProfiler)
  930. {
  931. this->m_codeGenProfiler->ProfileBegin(tag);
  932. }
  933. #endif
  934. }
  935. ///----------------------------------------------------------------------------
  936. ///
  937. /// Func::EndPhase
  938. ///
  939. /// Takes care of the profiler and dumper
  940. ///
  941. ///----------------------------------------------------------------------------
  942. void
  943. Func::EndProfiler(Js::Phase tag)
  944. {
  945. #ifdef DBG
  946. Assert(this->GetTopFunc()->currentPhases.Count() > 0);
  947. Js::Phase popped = this->GetTopFunc()->currentPhases.Pop();
  948. Assert(tag == popped);
  949. #endif
  950. #ifdef PROFILE_EXEC
  951. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  952. "Profiler tag is supplied but the profiler pointer is NULL");
  953. if (this->m_codeGenProfiler)
  954. {
  955. this->m_codeGenProfiler->ProfileEnd(tag);
  956. }
  957. #endif
  958. }
  959. void
  960. Func::EndPhase(Js::Phase tag, bool dump)
  961. {
  962. this->EndProfiler(tag);
  963. #if DBG_DUMP
  964. if(dump && (PHASE_DUMP(tag, this)
  965. || PHASE_DUMP(Js::BackEndPhase, this)))
  966. {
  967. Output::Print(L"-----------------------------------------------------------------------------\n");
  968. if (m_workItem->Type() == JsLoopBodyWorkItemType)
  969. {
  970. Output::Print(L"************ IR after %s (%S) Loop %d ************\n", Js::PhaseNames[tag], ExecutionModeName(m_workItem->GetJitMode()), ((JsLoopBodyCodeGen*)m_workItem)->GetLoopNumber());
  971. }
  972. else
  973. {
  974. Output::Print(L"************ IR after %s (%S) ************\n", Js::PhaseNames[tag], ExecutionModeName(m_workItem->GetJitMode()));
  975. }
  976. this->Dump(Js::Configuration::Global.flags.AsmDiff? IRDumpFlags_AsmDumpMode : IRDumpFlags_None);
  977. }
  978. #endif
  979. #if DBG
  980. if (tag == Js::LowererPhase)
  981. {
  982. Assert(!this->isPostLower);
  983. this->isPostLower = true;
  984. }
  985. else if (tag == Js::RegAllocPhase)
  986. {
  987. Assert(!this->isPostRegAlloc);
  988. this->isPostRegAlloc = true;
  989. }
  990. else if (tag == Js::PeepsPhase)
  991. {
  992. Assert(this->isPostLower && !this->isPostLayout);
  993. this->isPostPeeps = true;
  994. }
  995. else if (tag == Js::LayoutPhase)
  996. {
  997. Assert(this->isPostPeeps && !this->isPostLayout);
  998. this->isPostLayout = true;
  999. }
  1000. else if (tag == Js::FinalLowerPhase)
  1001. {
  1002. Assert(this->isPostLayout && !this->isPostFinalLower);
  1003. this->isPostFinalLower = true;
  1004. }
  1005. if (this->isPostLower)
  1006. {
  1007. #ifndef _M_ARM // Need to verify ARM is clean.
  1008. DbCheckPostLower dbCheck(this);
  1009. dbCheck.Check();
  1010. #endif
  1011. }
  1012. #endif
  1013. }
  1014. Func const *
  1015. Func::GetTopFunc() const
  1016. {
  1017. Func const * func = this;
  1018. while (!func->IsTopFunc())
  1019. {
  1020. func = func->parentFunc;
  1021. }
  1022. return func;
  1023. }
  1024. Func *
  1025. Func::GetTopFunc()
  1026. {
  1027. Func * func = this;
  1028. while (!func->IsTopFunc())
  1029. {
  1030. func = func->parentFunc;
  1031. }
  1032. return func;
  1033. }
  1034. StackSym *
  1035. Func::EnsureLoopParamSym()
  1036. {
  1037. if (this->m_loopParamSym == nullptr)
  1038. {
  1039. this->m_loopParamSym = StackSym::New(TyMachPtr, this);
  1040. }
  1041. return this->m_loopParamSym;
  1042. }
  1043. void
  1044. Func::UpdateMaxInlineeArgOutCount(uint inlineeArgOutCount)
  1045. {
  1046. if (maxInlineeArgOutCount < inlineeArgOutCount)
  1047. {
  1048. maxInlineeArgOutCount = inlineeArgOutCount;
  1049. }
  1050. }
  1051. void
  1052. Func::BeginClone(Lowerer * lowerer, JitArenaAllocator *alloc)
  1053. {
  1054. Assert(this->IsTopFunc());
  1055. AssertMsg(m_cloner == nullptr, "Starting new clone while one is in progress");
  1056. m_cloner = JitAnew(alloc, Cloner, lowerer, alloc);
  1057. if (m_cloneMap == nullptr)
  1058. {
  1059. m_cloneMap = JitAnew(alloc, InstrMap, alloc, 7);
  1060. }
  1061. }
  1062. void
  1063. Func::EndClone()
  1064. {
  1065. Assert(this->IsTopFunc());
  1066. if (m_cloner)
  1067. {
  1068. m_cloner->Finish();
  1069. JitAdelete(m_cloner->alloc, m_cloner);
  1070. m_cloner = nullptr;
  1071. }
  1072. }
  1073. IR::SymOpnd *
  1074. Func::GetInlineeOpndAtOffset(int32 offset)
  1075. {
  1076. Assert(IsInlinee());
  1077. StackSym *stackSym = CreateInlineeStackSym();
  1078. this->SetArgOffset(stackSym, stackSym->m_offset + offset);
  1079. Assert(stackSym->m_offset >= 0);
  1080. return IR::SymOpnd::New(stackSym, 0, TyMachReg, this);
  1081. }
  1082. StackSym *
  1083. Func::CreateInlineeStackSym()
  1084. {
  1085. // Make sure this is an inlinee and that GlobOpt has initialized the offset
  1086. // in the inlinee's frame.
  1087. Assert(IsInlinee());
  1088. Assert(m_inlineeFrameStartSym->m_offset != -1);
  1089. StackSym *stackSym = m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1090. stackSym->m_isInlinedArgSlot = true;
  1091. stackSym->m_offset = m_inlineeFrameStartSym->m_offset;
  1092. stackSym->m_allocated = true;
  1093. return stackSym;
  1094. }
  1095. uint8 *
  1096. Func::GetCallsCountAddress() const
  1097. {
  1098. Assert(this->m_workItem->Type() == JsFunctionType);
  1099. JsFunctionCodeGen * functionCodeGen = static_cast<JsFunctionCodeGen *>(this->m_workItem);
  1100. return functionCodeGen->GetFunctionBody()->GetCallsCountAddress(functionCodeGen->GetEntryPoint());
  1101. }
  1102. RecyclerWeakReference<Js::FunctionBody> *
  1103. Func::GetWeakFuncRef() const
  1104. {
  1105. if (this->m_jitTimeData == nullptr)
  1106. {
  1107. return nullptr;
  1108. }
  1109. return this->m_jitTimeData->GetWeakFuncRef();
  1110. }
  1111. Js::InlineCache *
  1112. Func::GetRuntimeInlineCache(const uint index) const
  1113. {
  1114. if(this->m_runtimeData)
  1115. {
  1116. const auto inlineCache = this->m_runtimeData->ClonedInlineCaches()->GetInlineCache(this->m_jnFunction, index);
  1117. if(inlineCache)
  1118. {
  1119. return inlineCache;
  1120. }
  1121. }
  1122. return this->m_jnFunction->GetInlineCache(index);
  1123. }
  1124. Js::PolymorphicInlineCache *
  1125. Func::GetRuntimePolymorphicInlineCache(const uint index) const
  1126. {
  1127. if (this->m_polymorphicInlineCacheInfo)
  1128. {
  1129. return this->m_polymorphicInlineCacheInfo->GetPolymorphicInlineCaches()->GetInlineCache(this->m_jnFunction, index);
  1130. }
  1131. return nullptr;
  1132. }
  1133. byte
  1134. Func::GetPolyCacheUtilToInitialize(const uint index) const
  1135. {
  1136. return this->GetRuntimePolymorphicInlineCache(index) ? this->GetPolyCacheUtil(index) : PolymorphicInlineCacheUtilizationMinValue;
  1137. }
  1138. byte
  1139. Func::GetPolyCacheUtil(const uint index) const
  1140. {
  1141. return this->m_polymorphicInlineCacheInfo->GetUtilArray()->GetUtil(this->m_jnFunction, index);
  1142. }
  1143. Js::ObjTypeSpecFldInfo*
  1144. Func::GetObjTypeSpecFldInfo(const uint index) const
  1145. {
  1146. if (this->m_jitTimeData == nullptr)
  1147. {
  1148. return nullptr;
  1149. }
  1150. Assert(this->m_jitTimeData->GetObjTypeSpecFldInfoArray());
  1151. return this->m_jitTimeData->GetObjTypeSpecFldInfoArray()->GetInfo(this->m_jnFunction, index);
  1152. }
  1153. Js::ObjTypeSpecFldInfo*
  1154. Func::GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const
  1155. {
  1156. Assert(this->m_jitTimeData != nullptr);
  1157. return this->m_jitTimeData->GetGlobalObjTypeSpecFldInfo(propertyInfoId);
  1158. }
  1159. void
  1160. Func::SetGlobalObjTypeSpecFldInfo(uint propertyInfoId, Js::ObjTypeSpecFldInfo* info)
  1161. {
  1162. Assert(this->m_jitTimeData != nullptr);
  1163. this->m_jitTimeData->SetGlobalObjTypeSpecFldInfo(propertyInfoId, info);
  1164. }
  1165. void
  1166. Func::EnsurePinnedTypeRefs()
  1167. {
  1168. if (this->pinnedTypeRefs == nullptr)
  1169. {
  1170. this->pinnedTypeRefs = JitAnew(this->m_alloc, TypeRefSet, this->m_alloc);
  1171. }
  1172. }
  1173. void
  1174. Func::PinTypeRef(void* typeRef)
  1175. {
  1176. EnsurePinnedTypeRefs();
  1177. this->pinnedTypeRefs->AddNew(typeRef);
  1178. }
  1179. void
  1180. Func::EnsureSingleTypeGuards()
  1181. {
  1182. if (this->singleTypeGuards == nullptr)
  1183. {
  1184. this->singleTypeGuards = JitAnew(this->m_alloc, TypePropertyGuardDictionary, this->m_alloc);
  1185. }
  1186. }
  1187. Js::JitTypePropertyGuard*
  1188. Func::GetOrCreateSingleTypeGuard(Js::Type* type)
  1189. {
  1190. EnsureSingleTypeGuards();
  1191. Js::JitTypePropertyGuard* guard;
  1192. if (!this->singleTypeGuards->TryGetValue(type, &guard))
  1193. {
  1194. // Property guards are allocated by NativeCodeData::Allocator so that their lifetime extends as long as the EntryPointInfo is alive.
  1195. guard = NativeCodeDataNew(GetNativeCodeDataAllocator(), Js::JitTypePropertyGuard, type, this->indexedPropertyGuardCount++);
  1196. this->singleTypeGuards->Add(type, guard);
  1197. }
  1198. else
  1199. {
  1200. Assert(guard->GetType() == type);
  1201. }
  1202. return guard;
  1203. }
  1204. void
  1205. Func::EnsureEquivalentTypeGuards()
  1206. {
  1207. if (this->equivalentTypeGuards == nullptr)
  1208. {
  1209. this->equivalentTypeGuards = JitAnew(this->m_alloc, EquivalentTypeGuardList, this->m_alloc);
  1210. }
  1211. }
  1212. Js::JitEquivalentTypeGuard*
  1213. Func::CreateEquivalentTypeGuard(Js::Type* type, uint32 objTypeSpecFldId)
  1214. {
  1215. EnsureEquivalentTypeGuards();
  1216. Js::JitEquivalentTypeGuard* guard = NativeCodeDataNew(GetNativeCodeDataAllocator(), Js::JitEquivalentTypeGuard, type, this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1217. // If we want to hard code the address of the cache, we will need to go back to allocating it from the native code data allocator.
  1218. // We would then need to maintain consistency (double write) to both the recycler allocated cache and the one on the heap.
  1219. Js::EquivalentTypeCache* cache = NativeCodeDataNewZ(GetTransferDataAllocator(), Js::EquivalentTypeCache);
  1220. guard->SetCache(cache);
  1221. // Give the cache a back-pointer to the guard so that the guard can be cleared at runtime if necessary.
  1222. cache->SetGuard(guard);
  1223. this->equivalentTypeGuards->Prepend(guard);
  1224. return guard;
  1225. }
  1226. void
  1227. Func::EnsurePropertyGuardsByPropertyId()
  1228. {
  1229. if (this->propertyGuardsByPropertyId == nullptr)
  1230. {
  1231. this->propertyGuardsByPropertyId = JitAnew(this->m_alloc, PropertyGuardByPropertyIdMap, this->m_alloc);
  1232. }
  1233. }
  1234. void
  1235. Func::EnsureCtorCachesByPropertyId()
  1236. {
  1237. if (this->ctorCachesByPropertyId == nullptr)
  1238. {
  1239. this->ctorCachesByPropertyId = JitAnew(this->m_alloc, CtorCachesByPropertyIdMap, this->m_alloc);
  1240. }
  1241. }
  1242. void
  1243. Func::LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard)
  1244. {
  1245. Assert(guard != nullptr);
  1246. Assert(guard->GetValue() != NULL);
  1247. Assert(this->propertyGuardsByPropertyId != nullptr);
  1248. IndexedPropertyGuardSet* set;
  1249. if (!this->propertyGuardsByPropertyId->TryGetValue(propertyId, &set))
  1250. {
  1251. set = JitAnew(this->m_alloc, IndexedPropertyGuardSet, this->m_alloc);
  1252. this->propertyGuardsByPropertyId->Add(propertyId, set);
  1253. }
  1254. set->Item(guard);
  1255. }
  1256. void
  1257. Func::LinkCtorCacheToPropertyId(Js::PropertyId propertyId, Js::JitTimeConstructorCache* cache)
  1258. {
  1259. Assert(cache != nullptr);
  1260. Assert(this->ctorCachesByPropertyId != nullptr);
  1261. CtorCacheSet* set;
  1262. if (!this->ctorCachesByPropertyId->TryGetValue(propertyId, &set))
  1263. {
  1264. set = JitAnew(this->m_alloc, CtorCacheSet, this->m_alloc);
  1265. this->ctorCachesByPropertyId->Add(propertyId, set);
  1266. }
  1267. set->Item(cache->runtimeCache);
  1268. }
  1269. Js::JitTimeConstructorCache* Func::GetConstructorCache(const Js::ProfileId profiledCallSiteId)
  1270. {
  1271. Assert(GetJnFunction() != nullptr);
  1272. Assert(profiledCallSiteId < GetJnFunction()->GetProfiledCallSiteCount());
  1273. Assert(this->constructorCaches != nullptr);
  1274. return this->constructorCaches[profiledCallSiteId];
  1275. }
  1276. void Func::SetConstructorCache(const Js::ProfileId profiledCallSiteId, Js::JitTimeConstructorCache* constructorCache)
  1277. {
  1278. const auto functionBody = this->GetJnFunction();
  1279. Assert(functionBody != nullptr);
  1280. Assert(profiledCallSiteId < functionBody->GetProfiledCallSiteCount());
  1281. Assert(constructorCache != nullptr);
  1282. Assert(this->constructorCaches != nullptr);
  1283. Assert(this->constructorCaches[profiledCallSiteId] == nullptr);
  1284. this->constructorCacheCount++;
  1285. this->constructorCaches[profiledCallSiteId] = constructorCache;
  1286. }
  1287. void Func::EnsurePropertiesWrittenTo()
  1288. {
  1289. if (this->propertiesWrittenTo == nullptr)
  1290. {
  1291. this->propertiesWrittenTo = JitAnew(this->m_alloc, PropertyIdSet, this->m_alloc);
  1292. }
  1293. }
  1294. void Func::EnsureCallSiteToArgumentsOffsetFixupMap()
  1295. {
  1296. if (this->callSiteToArgumentsOffsetFixupMap == nullptr)
  1297. {
  1298. this->callSiteToArgumentsOffsetFixupMap = JitAnew(this->m_alloc, CallSiteToArgumentsOffsetFixupMap, this->m_alloc);
  1299. }
  1300. }
  1301. IR::LabelInstr *
  1302. Func::GetFuncStartLabel()
  1303. {
  1304. return m_funcStartLabel;
  1305. }
  1306. IR::LabelInstr *
  1307. Func::EnsureFuncStartLabel()
  1308. {
  1309. if(m_funcStartLabel == nullptr)
  1310. {
  1311. m_funcStartLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1312. }
  1313. return m_funcStartLabel;
  1314. }
  1315. IR::LabelInstr *
  1316. Func::GetFuncEndLabel()
  1317. {
  1318. return m_funcEndLabel;
  1319. }
  1320. IR::LabelInstr *
  1321. Func::EnsureFuncEndLabel()
  1322. {
  1323. if(m_funcEndLabel == nullptr)
  1324. {
  1325. m_funcEndLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1326. }
  1327. return m_funcEndLabel;
  1328. }
  1329. void
  1330. Cloner::AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone)
  1331. {
  1332. if (!this->instrFirst)
  1333. {
  1334. this->instrFirst = instrClone;
  1335. }
  1336. this->instrLast = instrClone;
  1337. }
  1338. void
  1339. Cloner::Finish()
  1340. {
  1341. this->RetargetClonedBranches();
  1342. if (this->lowerer)
  1343. {
  1344. lowerer->LowerRange(this->instrFirst, this->instrLast, false, false);
  1345. }
  1346. }
  1347. void
  1348. Cloner::RetargetClonedBranches()
  1349. {
  1350. if (!this->fRetargetClonedBranch)
  1351. {
  1352. return;
  1353. }
  1354. FOREACH_INSTR_IN_RANGE(instr, this->instrFirst, this->instrLast)
  1355. {
  1356. if (instr->IsBranchInstr())
  1357. {
  1358. instr->AsBranchInstr()->RetargetClonedBranch();
  1359. }
  1360. }
  1361. NEXT_INSTR_IN_RANGE;
  1362. }
  1363. void Func::ThrowIfScriptClosed()
  1364. {
  1365. Js::ScriptContext* scriptContext = this->GetScriptContext();
  1366. if(scriptContext->IsClosed())
  1367. {
  1368. // Should not be jitting something in the foreground when the script context is actually closed
  1369. Assert(IsBackgroundJIT() || !scriptContext->IsActuallyClosed());
  1370. throw Js::OperationAbortedException();
  1371. }
  1372. }
  1373. IR::IndirOpnd * Func::GetConstantAddressIndirOpnd(void * address, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode)
  1374. {
  1375. Assert(this->GetTopFunc() == this);
  1376. if (!canHoistConstantAddressLoad)
  1377. {
  1378. // We can't hoist constant address load after lower, as we can't mark the sym as
  1379. // live on back edge
  1380. return nullptr;
  1381. }
  1382. int offset = 0;
  1383. IR::RegOpnd ** foundRegOpnd = this->constantAddressRegOpnd.Find([address, &offset](IR::RegOpnd * regOpnd)
  1384. {
  1385. Assert(regOpnd->m_sym->IsSingleDef());
  1386. void * curr = regOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address;
  1387. ptrdiff_t diff = (intptr_t)address - (intptr_t)curr;
  1388. if (!Math::FitsInDWord(diff))
  1389. {
  1390. return false;
  1391. }
  1392. offset = (int)diff;
  1393. return true;
  1394. });
  1395. IR::RegOpnd * addressRegOpnd;
  1396. if (foundRegOpnd != nullptr)
  1397. {
  1398. addressRegOpnd = *foundRegOpnd;
  1399. }
  1400. else
  1401. {
  1402. Assert(offset == 0);
  1403. addressRegOpnd = IR::RegOpnd::New(TyMachPtr, this);
  1404. IR::Instr *const newInstr =
  1405. IR::Instr::New(
  1406. loadOpCode,
  1407. addressRegOpnd,
  1408. IR::AddrOpnd::New(address, kind, this, true),
  1409. this);
  1410. this->constantAddressRegOpnd.Prepend(addressRegOpnd);
  1411. IR::Instr * insertBeforeInstr = this->lastConstantAddressRegLoadInstr;
  1412. if (insertBeforeInstr == nullptr)
  1413. {
  1414. insertBeforeInstr = this->GetFunctionEntryInsertionPoint();
  1415. this->lastConstantAddressRegLoadInstr = newInstr;
  1416. }
  1417. insertBeforeInstr->InsertBefore(newInstr);
  1418. }
  1419. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(addressRegOpnd, offset, type, this, true);
  1420. #if DBG_DUMP
  1421. indirOpnd->SetAddrKind(kind, address);
  1422. #endif
  1423. return indirOpnd;
  1424. }
  1425. void Func::MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv)
  1426. {
  1427. Assert(this->GetTopFunc() == this);
  1428. this->constantAddressRegOpnd.Iterate([bv](IR::RegOpnd * regOpnd)
  1429. {
  1430. bv->Set(regOpnd->m_sym->m_id);
  1431. });
  1432. }
  1433. IR::Instr *
  1434. Func::GetFunctionEntryInsertionPoint()
  1435. {
  1436. Assert(this->GetTopFunc() == this);
  1437. IR::Instr * insertInsert = this->lastConstantAddressRegLoadInstr;
  1438. if (insertInsert != nullptr)
  1439. {
  1440. return insertInsert->m_next;
  1441. }
  1442. insertInsert = this->m_headInstr;
  1443. if (this->HasTry())
  1444. {
  1445. // Insert it inside the root region
  1446. insertInsert = insertInsert->m_next;
  1447. Assert(insertInsert->IsLabelInstr() && insertInsert->AsLabelInstr()->GetRegion()->GetType() == RegionTypeRoot);
  1448. }
  1449. return insertInsert->m_next;
  1450. }
  1451. #if DBG_DUMP
  1452. ///----------------------------------------------------------------------------
  1453. ///
  1454. /// Func::DumpHeader
  1455. ///
  1456. ///----------------------------------------------------------------------------
  1457. void
  1458. Func::DumpHeader()
  1459. {
  1460. Output::Print(L"-----------------------------------------------------------------------------\n");
  1461. this->m_jnFunction->DumpFullFunctionName();
  1462. Output::SkipToColumn(50);
  1463. Output::Print(L"Instr Count:%d", GetInstrCount());
  1464. if(m_codeSize > 0)
  1465. {
  1466. Output::Print(L"\t\tSize:%d\n\n", m_codeSize);
  1467. }
  1468. else
  1469. {
  1470. Output::Print(L"\n\n");
  1471. }
  1472. }
  1473. ///----------------------------------------------------------------------------
  1474. ///
  1475. /// Func::Dump
  1476. ///
  1477. ///----------------------------------------------------------------------------
  1478. void
  1479. Func::Dump(IRDumpFlags flags)
  1480. {
  1481. this->DumpHeader();
  1482. FOREACH_INSTR_IN_FUNC(instr, this)
  1483. {
  1484. instr->DumpGlobOptInstrString();
  1485. instr->Dump(flags);
  1486. }NEXT_INSTR_IN_FUNC;
  1487. Output::Flush();
  1488. }
  1489. void
  1490. Func::Dump()
  1491. {
  1492. this->Dump(IRDumpFlags_None);
  1493. }
  1494. #endif
  1495. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  1496. LPCSTR
  1497. Func::GetVtableName(INT_PTR address)
  1498. {
  1499. #if DBG
  1500. if (vtableMap == nullptr)
  1501. {
  1502. vtableMap = VirtualTableRegistry::CreateVtableHashMap(this->m_alloc);
  1503. };
  1504. LPCSTR name = vtableMap->Lookup(address, nullptr);
  1505. if (name)
  1506. {
  1507. if (strncmp(name, "class ", _countof("class ") - 1) == 0)
  1508. {
  1509. name += _countof("class ") - 1;
  1510. }
  1511. }
  1512. return name;
  1513. #else
  1514. return "";
  1515. #endif
  1516. }
  1517. #endif
  1518. #if DBG_DUMP | defined(VTUNE_PROFILING)
  1519. bool Func::DoRecordNativeMap() const
  1520. {
  1521. #if defined(VTUNE_PROFILING)
  1522. if (EtwTrace::isJitProfilingActive)
  1523. {
  1524. return true;
  1525. }
  1526. #endif
  1527. #if DBG_DUMP
  1528. return PHASE_DUMP(Js::EncoderPhase, this) && Js::Configuration::Global.flags.Verbose;
  1529. #else
  1530. return false;
  1531. #endif
  1532. }
  1533. #endif