Func.cpp 57 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Base/EtwTrace.h"
  7. #include "Base/ScriptContextProfiler.h"
  8. #ifdef VTUNE_PROFILING
  9. #include "Base/VTuneChakraProfile.h"
  10. #endif
  11. #include "Library/ForInObjectEnumerator.h"
  12. Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  13. ThreadContextInfo * threadContextInfo,
  14. ScriptContextInfo * scriptContextInfo,
  15. JITOutputIDL * outputData,
  16. Js::EntryPointInfo* epInfo,
  17. const FunctionJITRuntimeInfo *const runtimeInfo,
  18. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  19. #if !FLOATVAR
  20. CodeGenNumberAllocator * numberAllocator,
  21. #endif
  22. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc,
  23. uint postCallByteCodeOffset, Js::RegSlot returnValueRegSlot, const bool isInlinedConstructor,
  24. Js::ProfileId callSiteIdInParentFunc, bool isGetterSetter) :
  25. m_alloc(alloc),
  26. m_workItem(workItem),
  27. m_output(outputData),
  28. m_entryPointInfo(epInfo),
  29. m_threadContextInfo(threadContextInfo),
  30. m_scriptContextInfo(scriptContextInfo),
  31. m_runtimeInfo(runtimeInfo),
  32. m_polymorphicInlineCacheInfo(polymorphicInlineCacheInfo),
  33. m_codeGenAllocators(codeGenAllocators),
  34. m_inlineeId(0),
  35. pinnedTypeRefs(nullptr),
  36. singleTypeGuards(nullptr),
  37. equivalentTypeGuards(nullptr),
  38. propertyGuardsByPropertyId(nullptr),
  39. ctorCachesByPropertyId(nullptr),
  40. callSiteToArgumentsOffsetFixupMap(nullptr),
  41. indexedPropertyGuardCount(0),
  42. propertiesWrittenTo(nullptr),
  43. lazyBailoutProperties(alloc),
  44. anyPropertyMayBeWrittenTo(false),
  45. #ifdef PROFILE_EXEC
  46. m_codeGenProfiler(codeGenProfiler),
  47. #endif
  48. m_isBackgroundJIT(isBackgroundJIT),
  49. m_cloner(nullptr),
  50. m_cloneMap(nullptr),
  51. m_loopParamSym(nullptr),
  52. m_funcObjSym(nullptr),
  53. m_localClosureSym(nullptr),
  54. m_paramClosureSym(nullptr),
  55. m_localFrameDisplaySym(nullptr),
  56. m_bailoutReturnValueSym(nullptr),
  57. m_hasBailedOutSym(nullptr),
  58. m_inlineeFrameStartSym(nullptr),
  59. m_regsUsed(0),
  60. m_fg(nullptr),
  61. m_labelCount(0),
  62. m_argSlotsForFunctionsCalled(0),
  63. m_isLeaf(false),
  64. m_hasCalls(false),
  65. m_hasInlineArgsOpt(false),
  66. m_canDoInlineArgsOpt(true),
  67. m_doFastPaths(false),
  68. hasBailout(false),
  69. hasBailoutInEHRegion(false),
  70. hasInstrNumber(false),
  71. maintainByteCodeOffset(true),
  72. frameSize(0),
  73. parentFunc(parentFunc),
  74. argObjSyms(nullptr),
  75. m_nonTempLocalVars(nullptr),
  76. hasAnyStackNestedFunc(false),
  77. hasMarkTempObjects(false),
  78. postCallByteCodeOffset(postCallByteCodeOffset),
  79. maxInlineeArgOutCount(0),
  80. returnValueRegSlot(returnValueRegSlot),
  81. firstActualStackOffset(-1),
  82. m_localVarSlotsOffset(Js::Constants::InvalidOffset),
  83. m_hasLocalVarChangedOffset(Js::Constants::InvalidOffset),
  84. actualCount((Js::ArgSlot) - 1),
  85. tryCatchNestingLevel(0),
  86. m_localStackHeight(0),
  87. tempSymDouble(nullptr),
  88. tempSymBool(nullptr),
  89. hasInlinee(false),
  90. thisOrParentInlinerHasArguments(false),
  91. hasStackArgs(false),
  92. hasImplicitParamLoad(false),
  93. hasThrow(false),
  94. hasNonSimpleParams(false),
  95. hasUnoptimizedArgumentsAcccess(false),
  96. applyTargetInliningRemovedArgumentsAccess(false),
  97. hasImplicitCalls(false),
  98. hasTempObjectProducingInstr(false),
  99. isInlinedConstructor(isInlinedConstructor),
  100. #if !FLOATVAR
  101. numberAllocator(numberAllocator),
  102. #endif
  103. loopCount(0),
  104. callSiteIdInParentFunc(callSiteIdInParentFunc),
  105. isGetterSetter(isGetterSetter),
  106. frameInfo(nullptr),
  107. isTJLoopBody(false),
  108. m_nativeCodeDataSym(nullptr),
  109. isFlowGraphValid(false),
  110. #if DBG
  111. m_callSiteCount(0),
  112. #endif
  113. stackNestedFunc(false),
  114. stackClosure(false)
  115. #if defined(_M_ARM32_OR_ARM64)
  116. , m_ArgumentsOffset(0)
  117. , m_epilogLabel(nullptr)
  118. #endif
  119. , m_funcStartLabel(nullptr)
  120. , m_funcEndLabel(nullptr)
  121. #if DBG
  122. , hasCalledSetDoFastPaths(false)
  123. , allowRemoveBailOutArgInstr(false)
  124. , currentPhases(alloc)
  125. , isPostLower(false)
  126. , isPostRegAlloc(false)
  127. , isPostPeeps(false)
  128. , isPostLayout(false)
  129. , isPostFinalLower(false)
  130. , vtableMap(nullptr)
  131. #endif
  132. , m_yieldOffsetResumeLabelList(nullptr)
  133. , m_bailOutNoSaveLabel(nullptr)
  134. , constantAddressRegOpnd(alloc)
  135. , lastConstantAddressRegLoadInstr(nullptr)
  136. , m_totalJumpTableSizeInBytesForSwitchStatements(0)
  137. , slotArrayCheckTable(nullptr)
  138. , frameDisplayCheckTable(nullptr)
  139. , stackArgWithFormalsTracker(nullptr)
  140. , m_forInLoopBaseDepth(0)
  141. , m_forInEnumeratorArrayOffset(-1)
  142. , argInsCount(0)
  143. , m_globalObjTypeSpecFldInfoArray(nullptr)
  144. #ifdef RECYCLER_WRITE_BARRIER_JIT
  145. , m_lowerer(nullptr)
  146. #endif
  147. {
  148. Assert(this->IsInlined() == !!runtimeInfo);
  149. if (this->IsTopFunc())
  150. {
  151. outputData->hasJittedStackClosure = false;
  152. outputData->localVarSlotsOffset = m_localVarSlotsOffset;
  153. outputData->localVarChangedOffset = m_hasLocalVarChangedOffset;
  154. }
  155. if (this->IsInlined())
  156. {
  157. m_inlineeId = ++(GetTopFunc()->m_inlineeId);
  158. }
  159. bool doStackNestedFunc = GetJITFunctionBody()->DoStackNestedFunc();
  160. bool doStackClosure = GetJITFunctionBody()->DoStackClosure() && !PHASE_OFF(Js::FrameDisplayFastPathPhase, this) && !PHASE_OFF(Js::StackClosurePhase, this);
  161. Assert(!doStackClosure || doStackNestedFunc);
  162. this->stackClosure = doStackClosure && this->IsTopFunc();
  163. if (this->stackClosure)
  164. {
  165. // TODO: calculate on runtime side?
  166. m_output.SetHasJITStackClosure();
  167. }
  168. if (m_workItem->Type() == JsFunctionType &&
  169. GetJITFunctionBody()->DoBackendArgumentsOptimization() &&
  170. !GetJITFunctionBody()->HasTry())
  171. {
  172. // doBackendArgumentsOptimization bit is set when there is no eval inside a function
  173. // as determined by the bytecode generator.
  174. SetHasStackArgs(true);
  175. }
  176. if (doStackNestedFunc && GetJITFunctionBody()->GetNestedCount() != 0 &&
  177. (this->IsTopFunc() || this->GetTopFunc()->m_workItem->Type() != JsLoopBodyWorkItemType)) // make sure none of the functions inlined in a jitted loop body allocate nested functions on the stack
  178. {
  179. Assert(!(this->IsJitInDebugMode() && !GetJITFunctionBody()->IsLibraryCode()));
  180. stackNestedFunc = true;
  181. this->GetTopFunc()->hasAnyStackNestedFunc = true;
  182. }
  183. if (GetJITFunctionBody()->HasOrParentHasArguments() || (parentFunc && parentFunc->thisOrParentInlinerHasArguments))
  184. {
  185. thisOrParentInlinerHasArguments = true;
  186. }
  187. if (parentFunc == nullptr)
  188. {
  189. inlineDepth = 0;
  190. m_symTable = JitAnew(alloc, SymTable);
  191. m_symTable->Init(this);
  192. m_symTable->SetStartingID(static_cast<SymID>(workItem->GetJITFunctionBody()->GetLocalsCount() + 1));
  193. Assert(Js::Constants::NoByteCodeOffset == postCallByteCodeOffset);
  194. Assert(Js::Constants::NoRegister == returnValueRegSlot);
  195. #if defined(_M_IX86) || defined(_M_X64)
  196. if (HasArgumentSlot())
  197. {
  198. // Pre-allocate the single argument slot we'll reserve for the arguments object.
  199. // For ARM, the argument slot is not part of the local but part of the register saves
  200. m_localStackHeight = MachArgsSlotOffset;
  201. }
  202. #endif
  203. }
  204. else
  205. {
  206. inlineDepth = parentFunc->inlineDepth + 1;
  207. Assert(Js::Constants::NoByteCodeOffset != postCallByteCodeOffset);
  208. }
  209. this->constructorCacheCount = 0;
  210. this->constructorCaches = AnewArrayZ(this->m_alloc, JITTimeConstructorCache*, GetJITFunctionBody()->GetProfiledCallSiteCount());
  211. #if DBG_DUMP
  212. m_codeSize = -1;
  213. #endif
  214. #if defined(_M_X64)
  215. m_spillSize = -1;
  216. m_argsSize = -1;
  217. m_savedRegSize = -1;
  218. #endif
  219. if (this->IsJitInDebugMode())
  220. {
  221. m_nonTempLocalVars = Anew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  222. }
  223. if (GetJITFunctionBody()->IsCoroutine())
  224. {
  225. m_yieldOffsetResumeLabelList = YieldOffsetResumeLabelList::New(this->m_alloc);
  226. }
  227. if (this->IsTopFunc())
  228. {
  229. m_globalObjTypeSpecFldInfoArray = JitAnewArrayZ(this->m_alloc, JITObjTypeSpecFldInfo*, GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  230. }
  231. for (uint i = 0; i < GetJITFunctionBody()->GetInlineCacheCount(); ++i)
  232. {
  233. JITObjTypeSpecFldInfo * info = GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(i);
  234. if (info != nullptr)
  235. {
  236. Assert(info->GetObjTypeSpecFldId() < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  237. GetTopFunc()->m_globalObjTypeSpecFldInfoArray[info->GetObjTypeSpecFldId()] = info;
  238. }
  239. }
  240. canHoistConstantAddressLoad = !PHASE_OFF(Js::HoistConstAddrPhase, this);
  241. m_forInLoopMaxDepth = this->GetJITFunctionBody()->GetForInLoopDepth();
  242. }
  243. bool
  244. Func::IsLoopBodyInTry() const
  245. {
  246. return IsLoopBody() && m_workItem->GetLoopHeader()->isInTry;
  247. }
  248. /* static */
  249. void
  250. Func::Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  251. ThreadContextInfo * threadContextInfo,
  252. ScriptContextInfo * scriptContextInfo,
  253. JITOutputIDL * outputData,
  254. Js::EntryPointInfo* epInfo, // for in-proc jit only
  255. const FunctionJITRuntimeInfo *const runtimeInfo,
  256. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  257. #if !FLOATVAR
  258. CodeGenNumberAllocator * numberAllocator,
  259. #endif
  260. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT)
  261. {
  262. bool rejit;
  263. do
  264. {
  265. Func func(alloc, workItem, threadContextInfo,
  266. scriptContextInfo, outputData, epInfo, runtimeInfo,
  267. polymorphicInlineCacheInfo, codeGenAllocators,
  268. #if !FLOATVAR
  269. numberAllocator,
  270. #endif
  271. codeGenProfiler, isBackgroundJIT);
  272. try
  273. {
  274. func.TryCodegen();
  275. rejit = false;
  276. }
  277. catch (Js::RejitException ex)
  278. {
  279. // The work item needs to be rejitted, likely due to some optimization that was too aggressive
  280. if (ex.Reason() == RejitReason::AggressiveIntTypeSpecDisabled)
  281. {
  282. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableAggressiveIntTypeSpec(func.IsLoopBody());
  283. outputData->disableAggressiveIntTypeSpec = TRUE;
  284. }
  285. else if (ex.Reason() == RejitReason::InlineApplyDisabled)
  286. {
  287. workItem->GetJITFunctionBody()->DisableInlineApply();
  288. outputData->disableInlineApply = TRUE;
  289. }
  290. else if (ex.Reason() == RejitReason::InlineSpreadDisabled)
  291. {
  292. workItem->GetJITFunctionBody()->DisableInlineSpread();
  293. outputData->disableInlineSpread = TRUE;
  294. }
  295. else if (ex.Reason() == RejitReason::DisableStackArgOpt)
  296. {
  297. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableStackArgOpt();
  298. outputData->disableStackArgOpt = TRUE;
  299. }
  300. else if (ex.Reason() == RejitReason::DisableSwitchOptExpectingInteger ||
  301. ex.Reason() == RejitReason::DisableSwitchOptExpectingString)
  302. {
  303. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableSwitchOpt();
  304. outputData->disableSwitchOpt = TRUE;
  305. }
  306. else if (ex.Reason() == RejitReason::ArrayCheckHoistDisabled || ex.Reason() == RejitReason::ArrayAccessHelperCallEliminationDisabled)
  307. {
  308. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableArrayCheckHoist(func.IsLoopBody());
  309. outputData->disableArrayCheckHoist = TRUE;
  310. }
  311. else
  312. {
  313. Assert(ex.Reason() == RejitReason::TrackIntOverflowDisabled);
  314. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableTrackCompoundedIntOverflow();
  315. outputData->disableTrackCompoundedIntOverflow = TRUE;
  316. }
  317. if (PHASE_TRACE(Js::ReJITPhase, &func))
  318. {
  319. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  320. Output::Print(
  321. _u("Rejit (compile-time): function: %s (%s) reason: %S\n"),
  322. workItem->GetJITFunctionBody()->GetDisplayName(),
  323. workItem->GetJITTimeInfo()->GetDebugNumberSet(debugStringBuffer),
  324. ex.ReasonName());
  325. }
  326. rejit = true;
  327. }
  328. // Either the entry point has a reference to the number now, or we failed to code gen and we
  329. // don't need to numbers, we can flush the completed page now.
  330. //
  331. // If the number allocator is NULL then we are shutting down the thread context and so too the
  332. // code generator. The number allocator must be freed before the recycler (and thus before the
  333. // code generator) so we can't and don't need to flush it.
  334. // TODO: OOP JIT, allocator cleanup
  335. } while (rejit);
  336. }
  337. ///----------------------------------------------------------------------------
  338. ///
  339. /// Func::TryCodegen
  340. ///
  341. /// Attempt to Codegen this function.
  342. ///
  343. ///----------------------------------------------------------------------------
  344. void
  345. Func::TryCodegen()
  346. {
  347. Assert(!IsJitInDebugMode() || !GetJITFunctionBody()->HasTry());
  348. BEGIN_CODEGEN_PHASE(this, Js::BackEndPhase);
  349. {
  350. // IRBuilder
  351. BEGIN_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  352. #ifdef ASMJS_PLAT
  353. if (GetJITFunctionBody()->IsAsmJsMode())
  354. {
  355. IRBuilderAsmJs asmIrBuilder(this);
  356. asmIrBuilder.Build();
  357. }
  358. else
  359. #endif
  360. {
  361. IRBuilder irBuilder(this);
  362. irBuilder.Build();
  363. }
  364. END_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  365. #ifdef IR_VIEWER
  366. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::IRBuilderPhase);
  367. #endif /* IR_VIEWER */
  368. BEGIN_CODEGEN_PHASE(this, Js::InlinePhase);
  369. InliningHeuristics heuristics(GetWorkItem()->GetJITTimeInfo(), this->IsLoopBody());
  370. Inline inliner(this, heuristics);
  371. inliner.Optimize();
  372. END_CODEGEN_PHASE(this, Js::InlinePhase);
  373. ThrowIfScriptClosed();
  374. // FlowGraph
  375. {
  376. // Scope for FlowGraph arena
  377. NoRecoverMemoryJitArenaAllocator fgAlloc(_u("BE-FlowGraph"), m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  378. BEGIN_CODEGEN_PHASE(this, Js::FGBuildPhase);
  379. this->m_fg = FlowGraph::New(this, &fgAlloc);
  380. this->m_fg->Build();
  381. END_CODEGEN_PHASE(this, Js::FGBuildPhase);
  382. // Global Optimization and Type Specialization
  383. BEGIN_CODEGEN_PHASE(this, Js::GlobOptPhase);
  384. GlobOpt globOpt(this);
  385. globOpt.Optimize();
  386. END_CODEGEN_PHASE(this, Js::GlobOptPhase);
  387. // Delete flowGraph now
  388. this->m_fg->Destroy();
  389. this->m_fg = nullptr;
  390. }
  391. #ifdef IR_VIEWER
  392. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::GlobOptPhase);
  393. #endif /* IR_VIEWER */
  394. ThrowIfScriptClosed();
  395. // Lowering
  396. Lowerer lowerer(this);
  397. BEGIN_CODEGEN_PHASE(this, Js::LowererPhase);
  398. lowerer.Lower();
  399. END_CODEGEN_PHASE(this, Js::LowererPhase);
  400. #ifdef IR_VIEWER
  401. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::LowererPhase);
  402. #endif /* IR_VIEWER */
  403. // Encode constants
  404. Security security(this);
  405. BEGIN_CODEGEN_PHASE(this, Js::EncodeConstantsPhase)
  406. security.EncodeLargeConstants();
  407. END_CODEGEN_PHASE(this, Js::EncodeConstantsPhase);
  408. if (GetJITFunctionBody()->DoInterruptProbe())
  409. {
  410. BEGIN_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  411. lowerer.DoInterruptProbes();
  412. END_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  413. }
  414. // Register Allocation
  415. BEGIN_CODEGEN_PHASE(this, Js::RegAllocPhase);
  416. LinearScan linearScan(this);
  417. linearScan.RegAlloc();
  418. END_CODEGEN_PHASE(this, Js::RegAllocPhase);
  419. #ifdef IR_VIEWER
  420. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::RegAllocPhase);
  421. #endif /* IR_VIEWER */
  422. ThrowIfScriptClosed();
  423. // Peephole optimizations
  424. BEGIN_CODEGEN_PHASE(this, Js::PeepsPhase);
  425. Peeps peeps(this);
  426. peeps.PeepFunc();
  427. END_CODEGEN_PHASE(this, Js::PeepsPhase);
  428. // Layout
  429. BEGIN_CODEGEN_PHASE(this, Js::LayoutPhase);
  430. SimpleLayout layout(this);
  431. layout.Layout();
  432. END_CODEGEN_PHASE(this, Js::LayoutPhase);
  433. if (this->HasTry() && this->hasBailoutInEHRegion)
  434. {
  435. BEGIN_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  436. lowerer.EHBailoutPatchUp();
  437. END_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  438. }
  439. // Insert NOPs (moving this before prolog/epilog for AMD64 and possibly ARM).
  440. BEGIN_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  441. security.InsertNOPs();
  442. END_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  443. // Prolog/Epilog
  444. BEGIN_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  445. if (GetJITFunctionBody()->IsAsmJsMode())
  446. {
  447. lowerer.LowerPrologEpilogAsmJs();
  448. }
  449. else
  450. {
  451. lowerer.LowerPrologEpilog();
  452. }
  453. END_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  454. BEGIN_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  455. lowerer.FinalLower();
  456. END_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  457. // Encoder
  458. BEGIN_CODEGEN_PHASE(this, Js::EncoderPhase);
  459. Encoder encoder(this);
  460. encoder.Encode();
  461. END_CODEGEN_PHASE_NO_DUMP(this, Js::EncoderPhase);
  462. #ifdef IR_VIEWER
  463. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::EncoderPhase);
  464. #endif /* IR_VIEWER */
  465. }
  466. #if DBG_DUMP
  467. if (Js::Configuration::Global.flags.IsEnabled(Js::AsmDumpModeFlag))
  468. {
  469. FILE * oldFile = 0;
  470. FILE * asmFile = GetScriptContext()->GetNativeCodeGenerator()->asmFile;
  471. if (asmFile)
  472. {
  473. oldFile = Output::SetFile(asmFile);
  474. }
  475. this->Dump(IRDumpFlags_AsmDumpMode);
  476. Output::Flush();
  477. if (asmFile)
  478. {
  479. FILE *openedFile = Output::SetFile(oldFile);
  480. Assert(openedFile == asmFile);
  481. }
  482. }
  483. #endif
  484. if (this->IsOOPJIT())
  485. {
  486. BEGIN_CODEGEN_PHASE(this, Js::NativeCodeDataPhase);
  487. auto dataAllocator = this->GetNativeCodeDataAllocator();
  488. if (dataAllocator->allocCount > 0)
  489. {
  490. NativeCodeData::DataChunk *chunk = (NativeCodeData::DataChunk*)dataAllocator->chunkList;
  491. NativeCodeData::DataChunk *next1 = chunk;
  492. while (next1)
  493. {
  494. if (next1->fixupFunc)
  495. {
  496. next1->fixupFunc(next1->data, chunk);
  497. }
  498. #if DBG
  499. // Scan memory to see if there's missing pointer needs to be fixed up
  500. // This can hit false positive if some data field happens to have value
  501. // falls into the NativeCodeData memory range.
  502. NativeCodeData::DataChunk *next2 = chunk;
  503. while (next2)
  504. {
  505. for (unsigned int i = 0; i < next1->len / sizeof(void*); i++)
  506. {
  507. if (((void**)next1->data)[i] == (void*)next2->data)
  508. {
  509. NativeCodeData::VerifyExistFixupEntry((void*)next2->data, &((void**)next1->data)[i], next1->data);
  510. }
  511. }
  512. next2 = next2->next;
  513. }
  514. #endif
  515. next1 = next1->next;
  516. }
  517. JITOutputIDL* jitOutputData = m_output.GetOutputData();
  518. size_t allocSize = offsetof(NativeDataFixupTable, fixupRecords) + sizeof(NativeDataFixupRecord)* (dataAllocator->allocCount);
  519. jitOutputData->nativeDataFixupTable = (NativeDataFixupTable*)midl_user_allocate(allocSize);
  520. if (!jitOutputData->nativeDataFixupTable)
  521. {
  522. Js::Throw::OutOfMemory();
  523. }
  524. __analysis_assume(jitOutputData->nativeDataFixupTable);
  525. jitOutputData->nativeDataFixupTable->count = dataAllocator->allocCount;
  526. jitOutputData->buffer = (NativeDataBuffer*)midl_user_allocate(offsetof(NativeDataBuffer, data) + dataAllocator->totalSize);
  527. if (!jitOutputData->buffer)
  528. {
  529. Js::Throw::OutOfMemory();
  530. }
  531. __analysis_assume(jitOutputData->buffer);
  532. jitOutputData->buffer->len = dataAllocator->totalSize;
  533. unsigned int len = 0;
  534. unsigned int count = 0;
  535. next1 = chunk;
  536. while (next1)
  537. {
  538. memcpy(jitOutputData->buffer->data + len, next1->data, next1->len);
  539. len += next1->len;
  540. jitOutputData->nativeDataFixupTable->fixupRecords[count].index = next1->allocIndex;
  541. jitOutputData->nativeDataFixupTable->fixupRecords[count].length = next1->len;
  542. jitOutputData->nativeDataFixupTable->fixupRecords[count].startOffset = next1->offset;
  543. jitOutputData->nativeDataFixupTable->fixupRecords[count].updateList = next1->fixupList;
  544. count++;
  545. next1 = next1->next;
  546. }
  547. #if DBG
  548. if (PHASE_TRACE1(Js::NativeCodeDataPhase))
  549. {
  550. Output::Print(_u("NativeCodeData Server Buffer: %p, len: %x, chunk head: %p\n"), jitOutputData->buffer->data, jitOutputData->buffer->len, chunk);
  551. }
  552. #endif
  553. }
  554. END_CODEGEN_PHASE(this, Js::NativeCodeDataPhase);
  555. }
  556. END_CODEGEN_PHASE(this, Js::BackEndPhase);
  557. }
  558. ///----------------------------------------------------------------------------
  559. /// Func::StackAllocate
  560. /// Allocate stack space of given size.
  561. ///----------------------------------------------------------------------------
  562. int32
  563. Func::StackAllocate(int size)
  564. {
  565. Assert(this->IsTopFunc());
  566. int32 offset;
  567. #ifdef MD_GROW_LOCALS_AREA_UP
  568. // Locals have positive offsets and are allocated from bottom to top.
  569. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  570. offset = m_localStackHeight;
  571. m_localStackHeight += size;
  572. #else
  573. // Locals have negative offsets and are allocated from top to bottom.
  574. m_localStackHeight += size;
  575. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  576. offset = -m_localStackHeight;
  577. #endif
  578. return offset;
  579. }
  580. ///----------------------------------------------------------------------------
  581. ///
  582. /// Func::StackAllocate
  583. ///
  584. /// Allocate stack space for this symbol.
  585. ///
  586. ///----------------------------------------------------------------------------
  587. int32
  588. Func::StackAllocate(StackSym *stackSym, int size)
  589. {
  590. Assert(size > 0);
  591. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym() || stackSym->IsAllocated())
  592. {
  593. return stackSym->m_offset;
  594. }
  595. Assert(stackSym->m_offset == 0);
  596. stackSym->m_allocated = true;
  597. stackSym->m_offset = StackAllocate(size);
  598. return stackSym->m_offset;
  599. }
  600. void
  601. Func::SetArgOffset(StackSym *stackSym, int32 offset)
  602. {
  603. AssertMsg(offset >= 0, "Why is the offset, negative?");
  604. stackSym->m_offset = offset;
  605. stackSym->m_allocated = true;
  606. }
  607. ///
  608. /// Ensures that local var slots are created, if the function has locals.
  609. /// Allocate stack space for locals used for debugging
  610. /// (for local non-temp vars we write-through memory so that locals inspection can make use of that.).
  611. // On stack, after local slots we allocate space for metadata (in particular, whether any the locals was changed in debugger).
  612. ///
  613. void
  614. Func::EnsureLocalVarSlots()
  615. {
  616. Assert(IsJitInDebugMode());
  617. if (!this->HasLocalVarSlotCreated())
  618. {
  619. uint32 localSlotCount = GetJITFunctionBody()->GetNonTempLocalVarCount();
  620. if (localSlotCount && m_localVarSlotsOffset == Js::Constants::InvalidOffset)
  621. {
  622. // Allocate the slots.
  623. int32 size = localSlotCount * GetDiagLocalSlotSize();
  624. m_localVarSlotsOffset = StackAllocate(size);
  625. m_hasLocalVarChangedOffset = StackAllocate(max(1, MachStackAlignment)); // Can't alloc less than StackAlignment bytes.
  626. Assert(m_workItem->Type() == JsFunctionType);
  627. m_output.SetVarSlotsOffset(AdjustOffsetValue(m_localVarSlotsOffset));
  628. m_output.SetVarChangedOffset(AdjustOffsetValue(m_hasLocalVarChangedOffset));
  629. }
  630. }
  631. }
  632. void Func::SetFirstArgOffset(IR::Instr* inlineeStart)
  633. {
  634. Assert(inlineeStart->m_func == this);
  635. Assert(!IsTopFunc());
  636. int32 lastOffset;
  637. IR::Instr* arg = inlineeStart->GetNextArg();
  638. const auto lastArgOutStackSym = arg->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  639. lastOffset = lastArgOutStackSym->m_offset;
  640. Assert(lastArgOutStackSym->m_isSingleDef);
  641. const auto secondLastArgOutOpnd = lastArgOutStackSym->m_instrDef->GetSrc2();
  642. if (secondLastArgOutOpnd->IsSymOpnd())
  643. {
  644. const auto secondLastOffset = secondLastArgOutOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_offset;
  645. if (secondLastOffset > lastOffset)
  646. {
  647. lastOffset = secondLastOffset;
  648. }
  649. }
  650. lastOffset += MachPtr;
  651. int32 firstActualStackOffset = lastOffset - ((this->actualCount + Js::Constants::InlineeMetaArgCount) * MachPtr);
  652. Assert((this->firstActualStackOffset == -1) || (this->firstActualStackOffset == firstActualStackOffset));
  653. this->firstActualStackOffset = firstActualStackOffset;
  654. }
  655. int32
  656. Func::GetLocalVarSlotOffset(int32 slotId)
  657. {
  658. this->EnsureLocalVarSlots();
  659. Assert(m_localVarSlotsOffset != Js::Constants::InvalidOffset);
  660. int32 slotOffset = slotId * GetDiagLocalSlotSize();
  661. return m_localVarSlotsOffset + slotOffset;
  662. }
  663. void Func::OnAddSym(Sym* sym)
  664. {
  665. Assert(sym);
  666. if (this->IsJitInDebugMode() && this->IsNonTempLocalVar(sym->m_id))
  667. {
  668. Assert(m_nonTempLocalVars);
  669. m_nonTempLocalVars->Set(sym->m_id);
  670. }
  671. }
  672. ///
  673. /// Returns offset of the flag (1 byte) whether any local was changed (in debugger).
  674. /// If the function does not have any locals, returns -1.
  675. ///
  676. int32
  677. Func::GetHasLocalVarChangedOffset()
  678. {
  679. this->EnsureLocalVarSlots();
  680. return m_hasLocalVarChangedOffset;
  681. }
  682. bool
  683. Func::IsJitInDebugMode()
  684. {
  685. return m_workItem->IsJitInDebugMode();
  686. }
  687. bool
  688. Func::IsNonTempLocalVar(uint32 slotIndex)
  689. {
  690. return GetJITFunctionBody()->IsNonTempLocalVar(slotIndex);
  691. }
  692. int32
  693. Func::AdjustOffsetValue(int32 offset)
  694. {
  695. #ifdef MD_GROW_LOCALS_AREA_UP
  696. return -(offset + BailOutInfo::StackSymBias);
  697. #else
  698. // Stack offset are negative, includes the PUSH EBP and return address
  699. return offset - (2 * MachPtr);
  700. #endif
  701. }
  702. #ifdef MD_GROW_LOCALS_AREA_UP
  703. // Note: this is called during jit-compile when we finalize bail out record.
  704. void
  705. Func::AjustLocalVarSlotOffset()
  706. {
  707. if (GetJITFunctionBody()->GetNonTempLocalVarCount())
  708. {
  709. // Turn positive SP-relative base locals offset into negative frame-pointer-relative offset
  710. // This is changing value for restoring the locals when read due to locals inspection.
  711. int localsOffset = m_localVarSlotsOffset - (m_localStackHeight + m_ArgumentsOffset);
  712. int valueChangeOffset = m_hasLocalVarChangedOffset - (m_localStackHeight + m_ArgumentsOffset);
  713. m_output.SetVarSlotsOffset(localsOffset);
  714. m_output.SetVarChangedOffset(valueChangeOffset);
  715. }
  716. }
  717. #endif
  718. bool
  719. Func::DoGlobOptsForGeneratorFunc() const
  720. {
  721. // Disable GlobOpt optimizations for generators initially. Will visit and enable each one by one.
  722. return !GetJITFunctionBody()->IsCoroutine();
  723. }
  724. bool
  725. Func::DoSimpleJitDynamicProfile() const
  726. {
  727. return IsSimpleJit() && !PHASE_OFF(Js::SimpleJitDynamicProfilePhase, GetTopFunc()) && !CONFIG_FLAG(NewSimpleJit);
  728. }
  729. void
  730. Func::SetDoFastPaths()
  731. {
  732. // Make sure we only call this once!
  733. Assert(!this->hasCalledSetDoFastPaths);
  734. bool doFastPaths = false;
  735. if(!PHASE_OFF(Js::FastPathPhase, this) && (!IsSimpleJit() || CONFIG_FLAG(NewSimpleJit)))
  736. {
  737. doFastPaths = true;
  738. }
  739. this->m_doFastPaths = doFastPaths;
  740. #ifdef DBG
  741. this->hasCalledSetDoFastPaths = true;
  742. #endif
  743. }
  744. #ifdef _M_ARM
  745. RegNum
  746. Func::GetLocalsPointer() const
  747. {
  748. #ifdef DBG
  749. if (Js::Configuration::Global.flags.IsEnabled(Js::ForceLocalsPtrFlag))
  750. {
  751. return ALT_LOCALS_PTR;
  752. }
  753. #endif
  754. if (GetJITFunctionBody()->HasTry())
  755. {
  756. return ALT_LOCALS_PTR;
  757. }
  758. return RegSP;
  759. }
  760. #endif
  761. void Func::AddSlotArrayCheck(IR::SymOpnd *fieldOpnd)
  762. {
  763. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  764. {
  765. return;
  766. }
  767. Assert(IsTopFunc());
  768. if (this->slotArrayCheckTable == nullptr)
  769. {
  770. this->slotArrayCheckTable = SlotArrayCheckTable::New(m_alloc, 4);
  771. }
  772. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  773. uint32 slot = propertySym->m_propertyId;
  774. uint32 *pSlotId = this->slotArrayCheckTable->FindOrInsert(slot, propertySym->m_stackSym->m_id);
  775. if (pSlotId && (*pSlotId == (uint32)-1 || *pSlotId < slot))
  776. {
  777. *pSlotId = propertySym->m_propertyId;
  778. }
  779. }
  780. void Func::AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId)
  781. {
  782. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  783. {
  784. return;
  785. }
  786. Assert(IsTopFunc());
  787. if (this->frameDisplayCheckTable == nullptr)
  788. {
  789. this->frameDisplayCheckTable = FrameDisplayCheckTable::New(m_alloc, 4);
  790. }
  791. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  792. FrameDisplayCheckRecord **record = this->frameDisplayCheckTable->FindOrInsertNew(propertySym->m_stackSym->m_id);
  793. if (*record == nullptr)
  794. {
  795. *record = JitAnew(m_alloc, FrameDisplayCheckRecord);
  796. }
  797. uint32 frameDisplaySlot = propertySym->m_propertyId;
  798. if ((*record)->table == nullptr || (*record)->slotId < frameDisplaySlot)
  799. {
  800. (*record)->slotId = frameDisplaySlot;
  801. }
  802. if (slotId != (uint32)-1)
  803. {
  804. if ((*record)->table == nullptr)
  805. {
  806. (*record)->table = SlotArrayCheckTable::New(m_alloc, 4);
  807. }
  808. uint32 *pSlotId = (*record)->table->FindOrInsert(slotId, frameDisplaySlot);
  809. if (pSlotId && *pSlotId < slotId)
  810. {
  811. *pSlotId = slotId;
  812. }
  813. }
  814. }
  815. void Func::InitLocalClosureSyms()
  816. {
  817. Assert(this->m_localClosureSym == nullptr);
  818. // Allocate stack space for closure pointers. Do this only if we're jitting for stack closures, and
  819. // tell bailout that these are not byte code symbols so that we don't try to encode them in the bailout record,
  820. // as they don't have normal lifetimes.
  821. Js::RegSlot regSlot = GetJITFunctionBody()->GetLocalClosureReg();
  822. if (regSlot != Js::Constants::NoRegister)
  823. {
  824. this->m_localClosureSym =
  825. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  826. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  827. this);
  828. }
  829. regSlot = this->GetJITFunctionBody()->GetParamClosureReg();
  830. if (regSlot != Js::Constants::NoRegister)
  831. {
  832. Assert(this->GetParamClosureSym() == nullptr && !this->GetJITFunctionBody()->IsParamAndBodyScopeMerged());
  833. this->m_paramClosureSym =
  834. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  835. this->DoStackFrameDisplay() ? (Js::RegSlot) - 1 : regSlot,
  836. this);
  837. }
  838. regSlot = GetJITFunctionBody()->GetLocalFrameDisplayReg();
  839. if (regSlot != Js::Constants::NoRegister)
  840. {
  841. this->m_localFrameDisplaySym =
  842. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  843. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  844. this);
  845. }
  846. }
  847. bool Func::CanAllocInPreReservedHeapPageSegment ()
  848. {
  849. #ifdef _CONTROL_FLOW_GUARD
  850. return PHASE_FORCE1(Js::PreReservedHeapAllocPhase) || (!PHASE_OFF1(Js::PreReservedHeapAllocPhase) &&
  851. !IsJitInDebugMode() && GetThreadContextInfo()->IsCFGEnabled()
  852. //&& !GetScriptContext()->IsScriptContextInDebugMode()
  853. #if _M_IX86
  854. && m_workItem->GetJitMode() == ExecutionMode::FullJit
  855. #if ENABLE_OOP_NATIVE_CODEGEN
  856. && (JITManager::GetJITManager()->IsJITServer()
  857. ? GetOOPCodeGenAllocators()->canCreatePreReservedSegment
  858. : GetInProcCodeGenAllocators()->canCreatePreReservedSegment)
  859. #else
  860. && GetInProcCodeGenAllocators()->canCreatePreReservedSegment
  861. #endif
  862. );
  863. #elif _M_X64
  864. && true);
  865. #else
  866. && false); //Not yet implemented for architectures other than x86 and amd64.
  867. #endif //_M_ARCH
  868. #else
  869. return false;
  870. #endif//_CONTROL_FLOW_GUARD
  871. }
  872. ///----------------------------------------------------------------------------
  873. ///
  874. /// Func::GetInstrCount
  875. ///
  876. /// Returns the number of instrs.
  877. /// Note: It counts all instrs for now, including labels, etc.
  878. ///
  879. ///----------------------------------------------------------------------------
  880. uint32
  881. Func::GetInstrCount()
  882. {
  883. uint instrCount = 0;
  884. FOREACH_INSTR_IN_FUNC(instr, this)
  885. {
  886. instrCount++;
  887. }NEXT_INSTR_IN_FUNC;
  888. return instrCount;
  889. }
  890. ///----------------------------------------------------------------------------
  891. ///
  892. /// Func::NumberInstrs
  893. ///
  894. /// Number each instruction in order of appearance in the function.
  895. ///
  896. ///----------------------------------------------------------------------------
  897. void
  898. Func::NumberInstrs()
  899. {
  900. #if DBG_DUMP
  901. Assert(this->IsTopFunc());
  902. Assert(!this->hasInstrNumber);
  903. this->hasInstrNumber = true;
  904. #endif
  905. uint instrCount = 1;
  906. FOREACH_INSTR_IN_FUNC(instr, this)
  907. {
  908. instr->SetNumber(instrCount++);
  909. }
  910. NEXT_INSTR_IN_FUNC;
  911. }
  912. ///----------------------------------------------------------------------------
  913. ///
  914. /// Func::IsInPhase
  915. ///
  916. /// Determines whether the function is currently in the provided phase
  917. ///
  918. ///----------------------------------------------------------------------------
  919. #if DBG
  920. bool
  921. Func::IsInPhase(Js::Phase tag)
  922. {
  923. return this->GetTopFunc()->currentPhases.Contains(tag);
  924. }
  925. #endif
  926. ///----------------------------------------------------------------------------
  927. ///
  928. /// Func::BeginPhase
  929. ///
  930. /// Takes care of the profiler
  931. ///
  932. ///----------------------------------------------------------------------------
  933. void
  934. Func::BeginPhase(Js::Phase tag)
  935. {
  936. #ifdef DBG
  937. this->GetTopFunc()->currentPhases.Push(tag);
  938. #endif
  939. #ifdef PROFILE_EXEC
  940. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  941. "Profiler tag is supplied but the profiler pointer is NULL");
  942. if (this->m_codeGenProfiler)
  943. {
  944. this->m_codeGenProfiler->ProfileBegin(tag);
  945. }
  946. #endif
  947. }
  948. ///----------------------------------------------------------------------------
  949. ///
  950. /// Func::EndPhase
  951. ///
  952. /// Takes care of the profiler and dumper
  953. ///
  954. ///----------------------------------------------------------------------------
  955. void
  956. Func::EndProfiler(Js::Phase tag)
  957. {
  958. #ifdef DBG
  959. Assert(this->GetTopFunc()->currentPhases.Count() > 0);
  960. Js::Phase popped = this->GetTopFunc()->currentPhases.Pop();
  961. Assert(tag == popped);
  962. #endif
  963. #ifdef PROFILE_EXEC
  964. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  965. "Profiler tag is supplied but the profiler pointer is NULL");
  966. if (this->m_codeGenProfiler)
  967. {
  968. this->m_codeGenProfiler->ProfileEnd(tag);
  969. }
  970. #endif
  971. }
  972. void
  973. Func::EndPhase(Js::Phase tag, bool dump)
  974. {
  975. this->EndProfiler(tag);
  976. #if DBG_DUMP
  977. if(dump && (PHASE_DUMP(tag, this)
  978. || PHASE_DUMP(Js::BackEndPhase, this)))
  979. {
  980. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  981. if (IsLoopBody())
  982. {
  983. Output::Print(_u("************ IR after %s (%S) Loop %d ************\n"),
  984. Js::PhaseNames[tag],
  985. ExecutionModeName(m_workItem->GetJitMode()),
  986. m_workItem->GetLoopNumber());
  987. }
  988. else
  989. {
  990. Output::Print(_u("************ IR after %s (%S) ************\n"),
  991. Js::PhaseNames[tag],
  992. ExecutionModeName(m_workItem->GetJitMode()));
  993. }
  994. this->Dump(Js::Configuration::Global.flags.AsmDiff? IRDumpFlags_AsmDumpMode : IRDumpFlags_None);
  995. }
  996. #endif
  997. #if DBG
  998. if (tag == Js::LowererPhase)
  999. {
  1000. Assert(!this->isPostLower);
  1001. this->isPostLower = true;
  1002. }
  1003. else if (tag == Js::RegAllocPhase)
  1004. {
  1005. Assert(!this->isPostRegAlloc);
  1006. this->isPostRegAlloc = true;
  1007. }
  1008. else if (tag == Js::PeepsPhase)
  1009. {
  1010. Assert(this->isPostLower && !this->isPostLayout);
  1011. this->isPostPeeps = true;
  1012. }
  1013. else if (tag == Js::LayoutPhase)
  1014. {
  1015. Assert(this->isPostPeeps && !this->isPostLayout);
  1016. this->isPostLayout = true;
  1017. }
  1018. else if (tag == Js::FinalLowerPhase)
  1019. {
  1020. Assert(this->isPostLayout && !this->isPostFinalLower);
  1021. this->isPostFinalLower = true;
  1022. }
  1023. if (this->isPostLower)
  1024. {
  1025. #ifndef _M_ARM // Need to verify ARM is clean.
  1026. DbCheckPostLower dbCheck(this);
  1027. dbCheck.Check();
  1028. #endif
  1029. }
  1030. this->m_alloc->MergeDelayFreeList();
  1031. #endif
  1032. }
  1033. Func const *
  1034. Func::GetTopFunc() const
  1035. {
  1036. Func const * func = this;
  1037. while (!func->IsTopFunc())
  1038. {
  1039. func = func->parentFunc;
  1040. }
  1041. return func;
  1042. }
  1043. Func *
  1044. Func::GetTopFunc()
  1045. {
  1046. Func * func = this;
  1047. while (!func->IsTopFunc())
  1048. {
  1049. func = func->parentFunc;
  1050. }
  1051. return func;
  1052. }
  1053. StackSym *
  1054. Func::EnsureLoopParamSym()
  1055. {
  1056. if (this->m_loopParamSym == nullptr)
  1057. {
  1058. this->m_loopParamSym = StackSym::New(TyMachPtr, this);
  1059. }
  1060. return this->m_loopParamSym;
  1061. }
  1062. void
  1063. Func::UpdateMaxInlineeArgOutCount(uint inlineeArgOutCount)
  1064. {
  1065. if (maxInlineeArgOutCount < inlineeArgOutCount)
  1066. {
  1067. maxInlineeArgOutCount = inlineeArgOutCount;
  1068. }
  1069. }
  1070. void
  1071. Func::BeginClone(Lowerer * lowerer, JitArenaAllocator *alloc)
  1072. {
  1073. Assert(this->IsTopFunc());
  1074. AssertMsg(m_cloner == nullptr, "Starting new clone while one is in progress");
  1075. m_cloner = JitAnew(alloc, Cloner, lowerer, alloc);
  1076. if (m_cloneMap == nullptr)
  1077. {
  1078. m_cloneMap = JitAnew(alloc, InstrMap, alloc, 7);
  1079. }
  1080. }
  1081. void
  1082. Func::EndClone()
  1083. {
  1084. Assert(this->IsTopFunc());
  1085. if (m_cloner)
  1086. {
  1087. m_cloner->Finish();
  1088. JitAdelete(m_cloner->alloc, m_cloner);
  1089. m_cloner = nullptr;
  1090. }
  1091. }
  1092. IR::SymOpnd *
  1093. Func::GetInlineeOpndAtOffset(int32 offset)
  1094. {
  1095. Assert(IsInlinee());
  1096. StackSym *stackSym = CreateInlineeStackSym();
  1097. this->SetArgOffset(stackSym, stackSym->m_offset + offset);
  1098. Assert(stackSym->m_offset >= 0);
  1099. return IR::SymOpnd::New(stackSym, 0, TyMachReg, this);
  1100. }
  1101. StackSym *
  1102. Func::CreateInlineeStackSym()
  1103. {
  1104. // Make sure this is an inlinee and that GlobOpt has initialized the offset
  1105. // in the inlinee's frame.
  1106. Assert(IsInlinee());
  1107. Assert(m_inlineeFrameStartSym->m_offset != -1);
  1108. StackSym *stackSym = m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1109. stackSym->m_isInlinedArgSlot = true;
  1110. stackSym->m_offset = m_inlineeFrameStartSym->m_offset;
  1111. stackSym->m_allocated = true;
  1112. return stackSym;
  1113. }
  1114. uint16
  1115. Func::GetArgUsedForBranch() const
  1116. {
  1117. // this value can change while JITing, so or these together
  1118. return GetJITFunctionBody()->GetArgUsedForBranch() | GetJITOutput()->GetArgUsedForBranch();
  1119. }
  1120. intptr_t
  1121. Func::GetJittedLoopIterationsSinceLastBailoutAddress() const
  1122. {
  1123. Assert(this->m_workItem->Type() == JsLoopBodyWorkItemType);
  1124. return m_workItem->GetJittedLoopIterationsSinceLastBailoutAddr();
  1125. }
  1126. intptr_t
  1127. Func::GetWeakFuncRef() const
  1128. {
  1129. // TODO: OOP JIT figure out if this can be null
  1130. return m_workItem->GetJITTimeInfo()->GetWeakFuncRef();
  1131. }
  1132. intptr_t
  1133. Func::GetRuntimeInlineCache(const uint index) const
  1134. {
  1135. if(m_runtimeInfo != nullptr && m_runtimeInfo->HasClonedInlineCaches())
  1136. {
  1137. intptr_t inlineCache = m_runtimeInfo->GetClonedInlineCache(index);
  1138. if(inlineCache)
  1139. {
  1140. return inlineCache;
  1141. }
  1142. }
  1143. return GetJITFunctionBody()->GetInlineCache(index);
  1144. }
  1145. JITTimePolymorphicInlineCache *
  1146. Func::GetRuntimePolymorphicInlineCache(const uint index) const
  1147. {
  1148. if (this->m_polymorphicInlineCacheInfo && this->m_polymorphicInlineCacheInfo->HasInlineCaches())
  1149. {
  1150. return this->m_polymorphicInlineCacheInfo->GetInlineCache(index);
  1151. }
  1152. return nullptr;
  1153. }
  1154. byte
  1155. Func::GetPolyCacheUtilToInitialize(const uint index) const
  1156. {
  1157. return this->GetRuntimePolymorphicInlineCache(index) ? this->GetPolyCacheUtil(index) : PolymorphicInlineCacheUtilizationMinValue;
  1158. }
  1159. byte
  1160. Func::GetPolyCacheUtil(const uint index) const
  1161. {
  1162. return this->m_polymorphicInlineCacheInfo->GetUtil(index);
  1163. }
  1164. JITObjTypeSpecFldInfo*
  1165. Func::GetObjTypeSpecFldInfo(const uint index) const
  1166. {
  1167. if (GetJITFunctionBody()->GetInlineCacheCount() == 0)
  1168. {
  1169. Assert(UNREACHED);
  1170. return nullptr;
  1171. }
  1172. return GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(index);
  1173. }
  1174. JITObjTypeSpecFldInfo*
  1175. Func::GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const
  1176. {
  1177. Assert(propertyInfoId < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  1178. return GetTopFunc()->m_globalObjTypeSpecFldInfoArray[propertyInfoId];
  1179. }
  1180. void
  1181. Func::EnsurePinnedTypeRefs()
  1182. {
  1183. if (this->pinnedTypeRefs == nullptr)
  1184. {
  1185. this->pinnedTypeRefs = JitAnew(this->m_alloc, TypeRefSet, this->m_alloc);
  1186. }
  1187. }
  1188. void
  1189. Func::PinTypeRef(void* typeRef)
  1190. {
  1191. EnsurePinnedTypeRefs();
  1192. this->pinnedTypeRefs->AddNew(typeRef);
  1193. }
  1194. void
  1195. Func::EnsureSingleTypeGuards()
  1196. {
  1197. if (this->singleTypeGuards == nullptr)
  1198. {
  1199. this->singleTypeGuards = JitAnew(this->m_alloc, TypePropertyGuardDictionary, this->m_alloc);
  1200. }
  1201. }
  1202. Js::JitTypePropertyGuard*
  1203. Func::GetOrCreateSingleTypeGuard(intptr_t typeAddr)
  1204. {
  1205. EnsureSingleTypeGuards();
  1206. Js::JitTypePropertyGuard* guard;
  1207. if (!this->singleTypeGuards->TryGetValue(typeAddr, &guard))
  1208. {
  1209. // Property guards are allocated by NativeCodeData::Allocator so that their lifetime extends as long as the EntryPointInfo is alive.
  1210. guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitTypePropertyGuard, typeAddr, this->indexedPropertyGuardCount++);
  1211. this->singleTypeGuards->Add(typeAddr, guard);
  1212. }
  1213. else
  1214. {
  1215. Assert(guard->GetTypeAddr() == typeAddr);
  1216. }
  1217. return guard;
  1218. }
  1219. void
  1220. Func::EnsureEquivalentTypeGuards()
  1221. {
  1222. if (this->equivalentTypeGuards == nullptr)
  1223. {
  1224. this->equivalentTypeGuards = JitAnew(this->m_alloc, EquivalentTypeGuardList, this->m_alloc);
  1225. }
  1226. }
  1227. Js::JitEquivalentTypeGuard*
  1228. Func::CreateEquivalentTypeGuard(JITTypeHolder type, uint32 objTypeSpecFldId)
  1229. {
  1230. EnsureEquivalentTypeGuards();
  1231. Js::JitEquivalentTypeGuard* guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitEquivalentTypeGuard, type->GetAddr(), this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1232. // If we want to hard code the address of the cache, we will need to go back to allocating it from the native code data allocator.
  1233. // We would then need to maintain consistency (double write) to both the recycler allocated cache and the one on the heap.
  1234. Js::EquivalentTypeCache* cache = nullptr;
  1235. if (this->IsOOPJIT())
  1236. {
  1237. cache = JitAnewZ(this->m_alloc, Js::EquivalentTypeCache);
  1238. }
  1239. else
  1240. {
  1241. cache = NativeCodeDataNewZNoFixup(GetTransferDataAllocator(), Js::EquivalentTypeCache);
  1242. }
  1243. guard->SetCache(cache);
  1244. // Give the cache a back-pointer to the guard so that the guard can be cleared at runtime if necessary.
  1245. cache->SetGuard(guard);
  1246. this->equivalentTypeGuards->Prepend(guard);
  1247. return guard;
  1248. }
  1249. void
  1250. Func::EnsurePropertyGuardsByPropertyId()
  1251. {
  1252. if (this->propertyGuardsByPropertyId == nullptr)
  1253. {
  1254. this->propertyGuardsByPropertyId = JitAnew(this->m_alloc, PropertyGuardByPropertyIdMap, this->m_alloc);
  1255. }
  1256. }
  1257. void
  1258. Func::EnsureCtorCachesByPropertyId()
  1259. {
  1260. if (this->ctorCachesByPropertyId == nullptr)
  1261. {
  1262. this->ctorCachesByPropertyId = JitAnew(this->m_alloc, CtorCachesByPropertyIdMap, this->m_alloc);
  1263. }
  1264. }
  1265. void
  1266. Func::LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard)
  1267. {
  1268. Assert(guard != nullptr);
  1269. Assert(guard->GetValue() != NULL);
  1270. Assert(this->propertyGuardsByPropertyId != nullptr);
  1271. IndexedPropertyGuardSet* set;
  1272. if (!this->propertyGuardsByPropertyId->TryGetValue(propertyId, &set))
  1273. {
  1274. set = JitAnew(this->m_alloc, IndexedPropertyGuardSet, this->m_alloc);
  1275. this->propertyGuardsByPropertyId->Add(propertyId, set);
  1276. }
  1277. set->Item(guard);
  1278. }
  1279. void
  1280. Func::LinkCtorCacheToPropertyId(Js::PropertyId propertyId, JITTimeConstructorCache* cache)
  1281. {
  1282. Assert(cache != nullptr);
  1283. Assert(this->ctorCachesByPropertyId != nullptr);
  1284. CtorCacheSet* set;
  1285. if (!this->ctorCachesByPropertyId->TryGetValue(propertyId, &set))
  1286. {
  1287. set = JitAnew(this->m_alloc, CtorCacheSet, this->m_alloc);
  1288. this->ctorCachesByPropertyId->Add(propertyId, set);
  1289. }
  1290. set->Item(cache->GetRuntimeCacheAddr());
  1291. }
  1292. JITTimeConstructorCache* Func::GetConstructorCache(const Js::ProfileId profiledCallSiteId)
  1293. {
  1294. Assert(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1295. Assert(this->constructorCaches != nullptr);
  1296. return this->constructorCaches[profiledCallSiteId];
  1297. }
  1298. void Func::SetConstructorCache(const Js::ProfileId profiledCallSiteId, JITTimeConstructorCache* constructorCache)
  1299. {
  1300. Assert(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1301. Assert(constructorCache != nullptr);
  1302. Assert(this->constructorCaches != nullptr);
  1303. Assert(this->constructorCaches[profiledCallSiteId] == nullptr);
  1304. this->constructorCacheCount++;
  1305. this->constructorCaches[profiledCallSiteId] = constructorCache;
  1306. }
  1307. void Func::EnsurePropertiesWrittenTo()
  1308. {
  1309. if (this->propertiesWrittenTo == nullptr)
  1310. {
  1311. this->propertiesWrittenTo = JitAnew(this->m_alloc, PropertyIdSet, this->m_alloc);
  1312. }
  1313. }
  1314. void Func::EnsureCallSiteToArgumentsOffsetFixupMap()
  1315. {
  1316. if (this->callSiteToArgumentsOffsetFixupMap == nullptr)
  1317. {
  1318. this->callSiteToArgumentsOffsetFixupMap = JitAnew(this->m_alloc, CallSiteToArgumentsOffsetFixupMap, this->m_alloc);
  1319. }
  1320. }
  1321. IR::LabelInstr *
  1322. Func::GetFuncStartLabel()
  1323. {
  1324. return m_funcStartLabel;
  1325. }
  1326. IR::LabelInstr *
  1327. Func::EnsureFuncStartLabel()
  1328. {
  1329. if(m_funcStartLabel == nullptr)
  1330. {
  1331. m_funcStartLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1332. }
  1333. return m_funcStartLabel;
  1334. }
  1335. IR::LabelInstr *
  1336. Func::GetFuncEndLabel()
  1337. {
  1338. return m_funcEndLabel;
  1339. }
  1340. IR::LabelInstr *
  1341. Func::EnsureFuncEndLabel()
  1342. {
  1343. if(m_funcEndLabel == nullptr)
  1344. {
  1345. m_funcEndLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1346. }
  1347. return m_funcEndLabel;
  1348. }
  1349. void
  1350. Func::EnsureStackArgWithFormalsTracker()
  1351. {
  1352. if (stackArgWithFormalsTracker == nullptr)
  1353. {
  1354. stackArgWithFormalsTracker = JitAnew(m_alloc, StackArgWithFormalsTracker, m_alloc);
  1355. }
  1356. }
  1357. BOOL
  1358. Func::IsFormalsArraySym(SymID symId)
  1359. {
  1360. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsArraySyms() == nullptr)
  1361. {
  1362. return false;
  1363. }
  1364. return stackArgWithFormalsTracker->GetFormalsArraySyms()->Test(symId);
  1365. }
  1366. void
  1367. Func::TrackFormalsArraySym(SymID symId)
  1368. {
  1369. EnsureStackArgWithFormalsTracker();
  1370. stackArgWithFormalsTracker->SetFormalsArraySyms(symId);
  1371. }
  1372. void
  1373. Func::TrackStackSymForFormalIndex(Js::ArgSlot formalsIndex, StackSym * sym)
  1374. {
  1375. EnsureStackArgWithFormalsTracker();
  1376. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1377. stackArgWithFormalsTracker->SetStackSymInFormalsIndexMap(sym, formalsIndex, formalsCount);
  1378. }
  1379. StackSym *
  1380. Func::GetStackSymForFormal(Js::ArgSlot formalsIndex)
  1381. {
  1382. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1383. {
  1384. return nullptr;
  1385. }
  1386. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1387. StackSym ** formalsIndexToStackSymMap = stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap();
  1388. AssertMsg(formalsIndex < formalsCount, "OutOfRange ? ");
  1389. return formalsIndexToStackSymMap[formalsIndex];
  1390. }
  1391. bool
  1392. Func::HasStackSymForFormal(Js::ArgSlot formalsIndex)
  1393. {
  1394. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1395. {
  1396. return false;
  1397. }
  1398. return GetStackSymForFormal(formalsIndex) != nullptr;
  1399. }
  1400. void
  1401. Func::SetScopeObjSym(StackSym * sym)
  1402. {
  1403. EnsureStackArgWithFormalsTracker();
  1404. stackArgWithFormalsTracker->SetScopeObjSym(sym);
  1405. }
  1406. StackSym *
  1407. Func::GetNativeCodeDataSym() const
  1408. {
  1409. Assert(IsOOPJIT());
  1410. return m_nativeCodeDataSym;
  1411. }
  1412. void
  1413. Func::SetNativeCodeDataSym(StackSym * opnd)
  1414. {
  1415. Assert(IsOOPJIT());
  1416. m_nativeCodeDataSym = opnd;
  1417. }
  1418. StackSym*
  1419. Func::GetScopeObjSym()
  1420. {
  1421. if (stackArgWithFormalsTracker == nullptr)
  1422. {
  1423. return nullptr;
  1424. }
  1425. return stackArgWithFormalsTracker->GetScopeObjSym();
  1426. }
  1427. BVSparse<JitArenaAllocator> *
  1428. StackArgWithFormalsTracker::GetFormalsArraySyms()
  1429. {
  1430. return formalsArraySyms;
  1431. }
  1432. void
  1433. StackArgWithFormalsTracker::SetFormalsArraySyms(SymID symId)
  1434. {
  1435. if (formalsArraySyms == nullptr)
  1436. {
  1437. formalsArraySyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1438. }
  1439. formalsArraySyms->Set(symId);
  1440. }
  1441. StackSym **
  1442. StackArgWithFormalsTracker::GetFormalsIndexToStackSymMap()
  1443. {
  1444. return formalsIndexToStackSymMap;
  1445. }
  1446. void
  1447. StackArgWithFormalsTracker::SetStackSymInFormalsIndexMap(StackSym * sym, Js::ArgSlot formalsIndex, Js::ArgSlot formalsCount)
  1448. {
  1449. if(formalsIndexToStackSymMap == nullptr)
  1450. {
  1451. formalsIndexToStackSymMap = JitAnewArrayZ(alloc, StackSym*, formalsCount);
  1452. }
  1453. AssertMsg(formalsIndex < formalsCount, "Out of range ?");
  1454. formalsIndexToStackSymMap[formalsIndex] = sym;
  1455. }
  1456. void
  1457. StackArgWithFormalsTracker::SetScopeObjSym(StackSym * sym)
  1458. {
  1459. m_scopeObjSym = sym;
  1460. }
  1461. StackSym *
  1462. StackArgWithFormalsTracker::GetScopeObjSym()
  1463. {
  1464. return m_scopeObjSym;
  1465. }
  1466. void
  1467. Cloner::AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone)
  1468. {
  1469. if (!this->instrFirst)
  1470. {
  1471. this->instrFirst = instrClone;
  1472. }
  1473. this->instrLast = instrClone;
  1474. }
  1475. void
  1476. Cloner::Finish()
  1477. {
  1478. this->RetargetClonedBranches();
  1479. if (this->lowerer)
  1480. {
  1481. lowerer->LowerRange(this->instrFirst, this->instrLast, false, false);
  1482. }
  1483. }
  1484. void
  1485. Cloner::RetargetClonedBranches()
  1486. {
  1487. if (!this->fRetargetClonedBranch)
  1488. {
  1489. return;
  1490. }
  1491. FOREACH_INSTR_IN_RANGE(instr, this->instrFirst, this->instrLast)
  1492. {
  1493. if (instr->IsBranchInstr())
  1494. {
  1495. instr->AsBranchInstr()->RetargetClonedBranch();
  1496. }
  1497. }
  1498. NEXT_INSTR_IN_RANGE;
  1499. }
  1500. void Func::ThrowIfScriptClosed()
  1501. {
  1502. if (GetScriptContextInfo()->IsClosed())
  1503. {
  1504. // Should not be jitting something in the foreground when the script context is actually closed
  1505. Assert(IsBackgroundJIT() || !GetScriptContext()->IsActuallyClosed());
  1506. throw Js::OperationAbortedException();
  1507. }
  1508. }
  1509. IR::IndirOpnd * Func::GetConstantAddressIndirOpnd(intptr_t address, IR::Opnd * largeConstOpnd, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode)
  1510. {
  1511. Assert(this->GetTopFunc() == this);
  1512. if (!canHoistConstantAddressLoad)
  1513. {
  1514. // We can't hoist constant address load after lower, as we can't mark the sym as
  1515. // live on back edge
  1516. return nullptr;
  1517. }
  1518. int offset = 0;
  1519. IR::RegOpnd ** foundRegOpnd = this->constantAddressRegOpnd.Find([address, &offset](IR::RegOpnd * regOpnd)
  1520. {
  1521. Assert(regOpnd->m_sym->IsSingleDef());
  1522. Assert(regOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd() || regOpnd->m_sym->m_instrDef->GetSrc1()->IsIntConstOpnd());
  1523. void * curr = regOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd() ?
  1524. regOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address :
  1525. (void *)regOpnd->m_sym->m_instrDef->GetSrc1()->AsIntConstOpnd()->GetValue();
  1526. ptrdiff_t diff = (uintptr_t)address - (uintptr_t)curr;
  1527. if (!Math::FitsInDWord(diff))
  1528. {
  1529. return false;
  1530. }
  1531. offset = (int)diff;
  1532. return true;
  1533. });
  1534. IR::RegOpnd * addressRegOpnd;
  1535. if (foundRegOpnd != nullptr)
  1536. {
  1537. addressRegOpnd = *foundRegOpnd;
  1538. }
  1539. else
  1540. {
  1541. Assert(offset == 0);
  1542. addressRegOpnd = IR::RegOpnd::New(TyMachPtr, this);
  1543. IR::Instr *const newInstr =
  1544. IR::Instr::New(
  1545. loadOpCode,
  1546. addressRegOpnd,
  1547. largeConstOpnd,
  1548. this);
  1549. this->constantAddressRegOpnd.Prepend(addressRegOpnd);
  1550. IR::Instr * insertBeforeInstr = this->lastConstantAddressRegLoadInstr;
  1551. if (insertBeforeInstr == nullptr)
  1552. {
  1553. insertBeforeInstr = this->GetFunctionEntryInsertionPoint();
  1554. this->lastConstantAddressRegLoadInstr = newInstr;
  1555. }
  1556. insertBeforeInstr->InsertBefore(newInstr);
  1557. }
  1558. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(addressRegOpnd, offset, type, this, true);
  1559. #if DBG_DUMP
  1560. // TODO: michhol make intptr_t
  1561. indirOpnd->SetAddrKind(kind, (void*)address);
  1562. #endif
  1563. return indirOpnd;
  1564. }
  1565. void Func::MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv)
  1566. {
  1567. Assert(this->GetTopFunc() == this);
  1568. this->constantAddressRegOpnd.Iterate([bv](IR::RegOpnd * regOpnd)
  1569. {
  1570. bv->Set(regOpnd->m_sym->m_id);
  1571. });
  1572. }
  1573. IR::Instr *
  1574. Func::GetFunctionEntryInsertionPoint()
  1575. {
  1576. Assert(this->GetTopFunc() == this);
  1577. IR::Instr * insertInsert = this->lastConstantAddressRegLoadInstr;
  1578. if (insertInsert != nullptr)
  1579. {
  1580. return insertInsert->m_next;
  1581. }
  1582. insertInsert = this->m_headInstr;
  1583. if (this->HasTry())
  1584. {
  1585. // Insert it inside the root region
  1586. insertInsert = insertInsert->m_next;
  1587. Assert(insertInsert->IsLabelInstr() && insertInsert->AsLabelInstr()->GetRegion()->GetType() == RegionTypeRoot);
  1588. }
  1589. return insertInsert->m_next;
  1590. }
  1591. Js::Var
  1592. Func::AllocateNumber(double value)
  1593. {
  1594. Js::Var number = nullptr;
  1595. #if FLOATVAR
  1596. number = Js::JavascriptNumber::NewCodeGenInstance((double)value, nullptr);
  1597. #else
  1598. if (!IsOOPJIT()) // in-proc jit
  1599. {
  1600. number = Js::JavascriptNumber::NewCodeGenInstance(GetNumberAllocator(), (double)value, GetScriptContext());
  1601. }
  1602. else // OOP JIT
  1603. {
  1604. number = GetXProcNumberAllocator()->AllocateNumber(this, value);
  1605. }
  1606. #endif
  1607. return number;
  1608. }
  1609. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1610. void
  1611. Func::DumpFullFunctionName()
  1612. {
  1613. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1614. Output::Print(_u("Function %s (%s)"), GetJITFunctionBody()->GetDisplayName(), GetDebugNumberSet(debugStringBuffer));
  1615. }
  1616. #endif
  1617. void
  1618. Func::UpdateForInLoopMaxDepth(uint forInLoopMaxDepth)
  1619. {
  1620. Assert(this->IsTopFunc());
  1621. this->m_forInLoopMaxDepth = max(this->m_forInLoopMaxDepth, forInLoopMaxDepth);
  1622. }
  1623. int
  1624. Func::GetForInEnumeratorArrayOffset() const
  1625. {
  1626. Func const* topFunc = this->GetTopFunc();
  1627. Assert(this->m_forInLoopBaseDepth + this->GetJITFunctionBody()->GetForInLoopDepth() <= topFunc->m_forInLoopMaxDepth);
  1628. return topFunc->m_forInEnumeratorArrayOffset
  1629. + this->m_forInLoopBaseDepth * sizeof(Js::ForInObjectEnumerator);
  1630. }
  1631. #if DBG_DUMP
  1632. ///----------------------------------------------------------------------------
  1633. ///
  1634. /// Func::DumpHeader
  1635. ///
  1636. ///----------------------------------------------------------------------------
  1637. void
  1638. Func::DumpHeader()
  1639. {
  1640. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1641. DumpFullFunctionName();
  1642. Output::SkipToColumn(50);
  1643. Output::Print(_u("Instr Count:%d"), GetInstrCount());
  1644. if(m_codeSize > 0)
  1645. {
  1646. Output::Print(_u("\t\tSize:%d\n\n"), m_codeSize);
  1647. }
  1648. else
  1649. {
  1650. Output::Print(_u("\n\n"));
  1651. }
  1652. }
  1653. ///----------------------------------------------------------------------------
  1654. ///
  1655. /// Func::Dump
  1656. ///
  1657. ///----------------------------------------------------------------------------
  1658. void
  1659. Func::Dump(IRDumpFlags flags)
  1660. {
  1661. this->DumpHeader();
  1662. FOREACH_INSTR_IN_FUNC(instr, this)
  1663. {
  1664. instr->DumpGlobOptInstrString();
  1665. instr->Dump(flags);
  1666. }NEXT_INSTR_IN_FUNC;
  1667. Output::Flush();
  1668. }
  1669. void
  1670. Func::Dump()
  1671. {
  1672. this->Dump(IRDumpFlags_None);
  1673. }
  1674. #endif
  1675. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  1676. LPCSTR
  1677. Func::GetVtableName(INT_PTR address)
  1678. {
  1679. #if DBG
  1680. if (vtableMap == nullptr)
  1681. {
  1682. vtableMap = VirtualTableRegistry::CreateVtableHashMap(this->m_alloc);
  1683. };
  1684. LPCSTR name = vtableMap->Lookup(address, nullptr);
  1685. if (name)
  1686. {
  1687. if (strncmp(name, "class ", _countof("class ") - 1) == 0)
  1688. {
  1689. name += _countof("class ") - 1;
  1690. }
  1691. }
  1692. return name;
  1693. #else
  1694. return "";
  1695. #endif
  1696. }
  1697. #endif
  1698. #if DBG_DUMP | defined(VTUNE_PROFILING)
  1699. bool Func::DoRecordNativeMap() const
  1700. {
  1701. #if defined(VTUNE_PROFILING)
  1702. if (VTuneChakraProfile::isJitProfilingActive)
  1703. {
  1704. return true;
  1705. }
  1706. #endif
  1707. #if DBG_DUMP
  1708. return PHASE_DUMP(Js::EncoderPhase, this) && Js::Configuration::Global.flags.Verbose;
  1709. #else
  1710. return false;
  1711. #endif
  1712. }
  1713. #endif
  1714. #ifdef PERF_HINT
  1715. void WritePerfHint(PerfHints hint, Func* func, uint byteCodeOffset /*= Js::Constants::NoByteCodeOffset*/)
  1716. {
  1717. if (!func->IsOOPJIT())
  1718. {
  1719. WritePerfHint(hint, (Js::FunctionBody*)func->GetJITFunctionBody()->GetAddr(), byteCodeOffset);
  1720. }
  1721. }
  1722. #endif