Func.cpp 61 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Base/EtwTrace.h"
  7. #include "Base/ScriptContextProfiler.h"
  8. #ifdef VTUNE_PROFILING
  9. #include "Base/VTuneChakraProfile.h"
  10. #endif
  11. #include "Library/ForInObjectEnumerator.h"
  12. Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  13. ThreadContextInfo * threadContextInfo,
  14. ScriptContextInfo * scriptContextInfo,
  15. JITOutputIDL * outputData,
  16. Js::EntryPointInfo* epInfo,
  17. const FunctionJITRuntimeInfo *const runtimeInfo,
  18. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  19. #if !FLOATVAR
  20. CodeGenNumberAllocator * numberAllocator,
  21. #endif
  22. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc,
  23. uint postCallByteCodeOffset, Js::RegSlot returnValueRegSlot, const bool isInlinedConstructor,
  24. Js::ProfileId callSiteIdInParentFunc, bool isGetterSetter) :
  25. m_alloc(alloc),
  26. m_workItem(workItem),
  27. m_output(outputData),
  28. m_entryPointInfo(epInfo),
  29. m_threadContextInfo(threadContextInfo),
  30. m_scriptContextInfo(scriptContextInfo),
  31. m_runtimeInfo(runtimeInfo),
  32. m_polymorphicInlineCacheInfo(polymorphicInlineCacheInfo),
  33. m_codeGenAllocators(codeGenAllocators),
  34. m_inlineeId(0),
  35. pinnedTypeRefs(nullptr),
  36. singleTypeGuards(nullptr),
  37. equivalentTypeGuards(nullptr),
  38. propertyGuardsByPropertyId(nullptr),
  39. ctorCachesByPropertyId(nullptr),
  40. callSiteToArgumentsOffsetFixupMap(nullptr),
  41. indexedPropertyGuardCount(0),
  42. propertiesWrittenTo(nullptr),
  43. lazyBailoutProperties(alloc),
  44. anyPropertyMayBeWrittenTo(false),
  45. #ifdef PROFILE_EXEC
  46. m_codeGenProfiler(codeGenProfiler),
  47. #endif
  48. m_isBackgroundJIT(isBackgroundJIT),
  49. m_cloner(nullptr),
  50. m_cloneMap(nullptr),
  51. m_loopParamSym(nullptr),
  52. m_funcObjSym(nullptr),
  53. m_localClosureSym(nullptr),
  54. m_paramClosureSym(nullptr),
  55. m_localFrameDisplaySym(nullptr),
  56. m_bailoutReturnValueSym(nullptr),
  57. m_hasBailedOutSym(nullptr),
  58. m_inlineeFrameStartSym(nullptr),
  59. m_regsUsed(0),
  60. m_fg(nullptr),
  61. m_labelCount(0),
  62. m_argSlotsForFunctionsCalled(0),
  63. m_isLeaf(false),
  64. m_hasCalls(false),
  65. m_hasInlineArgsOpt(false),
  66. m_canDoInlineArgsOpt(true),
  67. m_doFastPaths(false),
  68. hasBailout(false),
  69. hasBailoutInEHRegion(false),
  70. hasInstrNumber(false),
  71. maintainByteCodeOffset(true),
  72. frameSize(0),
  73. parentFunc(parentFunc),
  74. argObjSyms(nullptr),
  75. m_nonTempLocalVars(nullptr),
  76. hasAnyStackNestedFunc(false),
  77. hasMarkTempObjects(false),
  78. postCallByteCodeOffset(postCallByteCodeOffset),
  79. maxInlineeArgOutCount(0),
  80. returnValueRegSlot(returnValueRegSlot),
  81. firstActualStackOffset(-1),
  82. m_localVarSlotsOffset(Js::Constants::InvalidOffset),
  83. m_hasLocalVarChangedOffset(Js::Constants::InvalidOffset),
  84. actualCount((Js::ArgSlot) - 1),
  85. tryCatchNestingLevel(0),
  86. m_localStackHeight(0),
  87. tempSymDouble(nullptr),
  88. tempSymBool(nullptr),
  89. hasInlinee(false),
  90. thisOrParentInlinerHasArguments(false),
  91. hasStackArgs(false),
  92. hasImplicitParamLoad(false),
  93. hasThrow(false),
  94. hasNonSimpleParams(false),
  95. hasUnoptimizedArgumentsAccess(false),
  96. applyTargetInliningRemovedArgumentsAccess(false),
  97. hasImplicitCalls(false),
  98. hasTempObjectProducingInstr(false),
  99. isInlinedConstructor(isInlinedConstructor),
  100. #if !FLOATVAR
  101. numberAllocator(numberAllocator),
  102. #endif
  103. loopCount(0),
  104. callSiteIdInParentFunc(callSiteIdInParentFunc),
  105. isGetterSetter(isGetterSetter),
  106. frameInfo(nullptr),
  107. isTJLoopBody(false),
  108. m_nativeCodeDataSym(nullptr),
  109. isFlowGraphValid(false),
  110. #if DBG
  111. m_callSiteCount(0),
  112. #endif
  113. stackNestedFunc(false),
  114. stackClosure(false)
  115. #if defined(_M_ARM32_OR_ARM64)
  116. , m_ArgumentsOffset(0)
  117. , m_epilogLabel(nullptr)
  118. #endif
  119. , m_funcStartLabel(nullptr)
  120. , m_funcEndLabel(nullptr)
  121. #if DBG
  122. , hasCalledSetDoFastPaths(false)
  123. , allowRemoveBailOutArgInstr(false)
  124. , currentPhases(alloc)
  125. , isPostLower(false)
  126. , isPostRegAlloc(false)
  127. , isPostPeeps(false)
  128. , isPostLayout(false)
  129. , isPostFinalLower(false)
  130. , vtableMap(nullptr)
  131. #endif
  132. , m_yieldOffsetResumeLabelList(nullptr)
  133. , m_bailOutNoSaveLabel(nullptr)
  134. , constantAddressRegOpnd(alloc)
  135. , lastConstantAddressRegLoadInstr(nullptr)
  136. , m_totalJumpTableSizeInBytesForSwitchStatements(0)
  137. , slotArrayCheckTable(nullptr)
  138. , frameDisplayCheckTable(nullptr)
  139. , stackArgWithFormalsTracker(nullptr)
  140. , m_forInLoopBaseDepth(0)
  141. , m_forInEnumeratorArrayOffset(-1)
  142. , argInsCount(0)
  143. , m_globalObjTypeSpecFldInfoArray(nullptr)
  144. #if LOWER_SPLIT_INT64
  145. , m_int64SymPairMap(nullptr)
  146. #endif
  147. #ifdef RECYCLER_WRITE_BARRIER_JIT
  148. , m_lowerer(nullptr)
  149. #endif
  150. {
  151. Assert(this->IsInlined() == !!runtimeInfo);
  152. if (this->IsTopFunc())
  153. {
  154. outputData->hasJittedStackClosure = false;
  155. outputData->localVarSlotsOffset = m_localVarSlotsOffset;
  156. outputData->localVarChangedOffset = m_hasLocalVarChangedOffset;
  157. }
  158. if (this->IsInlined())
  159. {
  160. m_inlineeId = ++(GetTopFunc()->m_inlineeId);
  161. }
  162. bool doStackNestedFunc = GetJITFunctionBody()->DoStackNestedFunc();
  163. bool doStackClosure = GetJITFunctionBody()->DoStackClosure() && !PHASE_OFF(Js::FrameDisplayFastPathPhase, this) && !PHASE_OFF(Js::StackClosurePhase, this);
  164. Assert(!doStackClosure || doStackNestedFunc);
  165. this->stackClosure = doStackClosure && this->IsTopFunc();
  166. if (this->stackClosure)
  167. {
  168. // TODO: calculate on runtime side?
  169. m_output.SetHasJITStackClosure();
  170. }
  171. if (m_workItem->Type() == JsFunctionType &&
  172. GetJITFunctionBody()->DoBackendArgumentsOptimization() &&
  173. !GetJITFunctionBody()->HasTry())
  174. {
  175. // doBackendArgumentsOptimization bit is set when there is no eval inside a function
  176. // as determined by the bytecode generator.
  177. SetHasStackArgs(true);
  178. }
  179. if (doStackNestedFunc && GetJITFunctionBody()->GetNestedCount() != 0 &&
  180. (this->IsTopFunc() || this->GetTopFunc()->m_workItem->Type() != JsLoopBodyWorkItemType)) // make sure none of the functions inlined in a jitted loop body allocate nested functions on the stack
  181. {
  182. Assert(!(this->IsJitInDebugMode() && !GetJITFunctionBody()->IsLibraryCode()));
  183. stackNestedFunc = true;
  184. this->GetTopFunc()->hasAnyStackNestedFunc = true;
  185. }
  186. if (GetJITFunctionBody()->HasOrParentHasArguments() || (parentFunc && parentFunc->thisOrParentInlinerHasArguments))
  187. {
  188. thisOrParentInlinerHasArguments = true;
  189. }
  190. if (parentFunc == nullptr)
  191. {
  192. inlineDepth = 0;
  193. m_symTable = JitAnew(alloc, SymTable);
  194. m_symTable->Init(this);
  195. m_symTable->SetStartingID(static_cast<SymID>(workItem->GetJITFunctionBody()->GetLocalsCount() + 1));
  196. Assert(Js::Constants::NoByteCodeOffset == postCallByteCodeOffset);
  197. Assert(Js::Constants::NoRegister == returnValueRegSlot);
  198. #if defined(_M_IX86) || defined(_M_X64)
  199. if (HasArgumentSlot())
  200. {
  201. // Pre-allocate the single argument slot we'll reserve for the arguments object.
  202. // For ARM, the argument slot is not part of the local but part of the register saves
  203. m_localStackHeight = MachArgsSlotOffset;
  204. }
  205. #endif
  206. }
  207. else
  208. {
  209. inlineDepth = parentFunc->inlineDepth + 1;
  210. Assert(Js::Constants::NoByteCodeOffset != postCallByteCodeOffset);
  211. }
  212. this->constructorCacheCount = 0;
  213. this->constructorCaches = AnewArrayZ(this->m_alloc, JITTimeConstructorCache*, GetJITFunctionBody()->GetProfiledCallSiteCount());
  214. #if DBG_DUMP
  215. m_codeSize = -1;
  216. #endif
  217. #if defined(_M_X64)
  218. m_spillSize = -1;
  219. m_argsSize = -1;
  220. m_savedRegSize = -1;
  221. #endif
  222. if (this->IsJitInDebugMode())
  223. {
  224. m_nonTempLocalVars = Anew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  225. }
  226. if (GetJITFunctionBody()->IsCoroutine())
  227. {
  228. m_yieldOffsetResumeLabelList = YieldOffsetResumeLabelList::New(this->m_alloc);
  229. }
  230. if (this->IsTopFunc())
  231. {
  232. m_globalObjTypeSpecFldInfoArray = JitAnewArrayZ(this->m_alloc, ObjTypeSpecFldInfo*, GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  233. }
  234. for (uint i = 0; i < GetJITFunctionBody()->GetInlineCacheCount(); ++i)
  235. {
  236. ObjTypeSpecFldInfo * info = GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(i);
  237. if (info != nullptr)
  238. {
  239. Assert(info->GetObjTypeSpecFldId() < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  240. GetTopFunc()->m_globalObjTypeSpecFldInfoArray[info->GetObjTypeSpecFldId()] = info;
  241. }
  242. }
  243. canHoistConstantAddressLoad = !PHASE_OFF(Js::HoistConstAddrPhase, this);
  244. m_forInLoopMaxDepth = this->GetJITFunctionBody()->GetForInLoopDepth();
  245. }
  246. bool
  247. Func::IsLoopBodyInTry() const
  248. {
  249. return IsLoopBody() && m_workItem->GetLoopHeader()->isInTry;
  250. }
  251. /* static */
  252. void
  253. Func::Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  254. ThreadContextInfo * threadContextInfo,
  255. ScriptContextInfo * scriptContextInfo,
  256. JITOutputIDL * outputData,
  257. Js::EntryPointInfo* epInfo, // for in-proc jit only
  258. const FunctionJITRuntimeInfo *const runtimeInfo,
  259. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  260. #if !FLOATVAR
  261. CodeGenNumberAllocator * numberAllocator,
  262. #endif
  263. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT)
  264. {
  265. bool rejit;
  266. do
  267. {
  268. Func func(alloc, workItem, threadContextInfo,
  269. scriptContextInfo, outputData, epInfo, runtimeInfo,
  270. polymorphicInlineCacheInfo, codeGenAllocators,
  271. #if !FLOATVAR
  272. numberAllocator,
  273. #endif
  274. codeGenProfiler, isBackgroundJIT);
  275. try
  276. {
  277. func.TryCodegen();
  278. rejit = false;
  279. }
  280. catch (Js::RejitException ex)
  281. {
  282. // The work item needs to be rejitted, likely due to some optimization that was too aggressive
  283. switch (ex.Reason())
  284. {
  285. case RejitReason::AggressiveIntTypeSpecDisabled:
  286. outputData->disableAggressiveIntTypeSpec = TRUE;
  287. break;
  288. case RejitReason::InlineApplyDisabled:
  289. workItem->GetJITFunctionBody()->DisableInlineApply();
  290. outputData->disableInlineApply = TRUE;
  291. break;
  292. case RejitReason::InlineSpreadDisabled:
  293. workItem->GetJITFunctionBody()->DisableInlineSpread();
  294. outputData->disableInlineSpread = TRUE;
  295. break;
  296. case RejitReason::DisableStackArgOpt:
  297. outputData->disableStackArgOpt = TRUE;
  298. break;
  299. case RejitReason::DisableSwitchOptExpectingInteger:
  300. case RejitReason::DisableSwitchOptExpectingString:
  301. outputData->disableSwitchOpt = TRUE;
  302. break;
  303. case RejitReason::ArrayCheckHoistDisabled:
  304. case RejitReason::ArrayAccessHelperCallEliminationDisabled:
  305. outputData->disableArrayCheckHoist = TRUE;
  306. break;
  307. case RejitReason::TrackIntOverflowDisabled:
  308. outputData->disableTrackCompoundedIntOverflow = TRUE;
  309. break;
  310. default:
  311. Assume(UNREACHED);
  312. }
  313. if (PHASE_TRACE(Js::ReJITPhase, &func))
  314. {
  315. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  316. Output::Print(
  317. _u("Rejit (compile-time): function: %s (%s) reason: %S\n"),
  318. workItem->GetJITFunctionBody()->GetDisplayName(),
  319. workItem->GetJITTimeInfo()->GetDebugNumberSet(debugStringBuffer),
  320. ex.ReasonName());
  321. }
  322. rejit = true;
  323. }
  324. // Either the entry point has a reference to the number now, or we failed to code gen and we
  325. // don't need to numbers, we can flush the completed page now.
  326. //
  327. // If the number allocator is NULL then we are shutting down the thread context and so too the
  328. // code generator. The number allocator must be freed before the recycler (and thus before the
  329. // code generator) so we can't and don't need to flush it.
  330. // TODO: OOP JIT, allocator cleanup
  331. } while (rejit);
  332. }
  333. ///----------------------------------------------------------------------------
  334. ///
  335. /// Func::TryCodegen
  336. ///
  337. /// Attempt to Codegen this function.
  338. ///
  339. ///----------------------------------------------------------------------------
  340. void
  341. Func::TryCodegen()
  342. {
  343. Assert(!IsJitInDebugMode() || !GetJITFunctionBody()->HasTry());
  344. BEGIN_CODEGEN_PHASE(this, Js::BackEndPhase);
  345. {
  346. // IRBuilder
  347. BEGIN_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  348. #ifdef ASMJS_PLAT
  349. if (GetJITFunctionBody()->IsAsmJsMode())
  350. {
  351. IRBuilderAsmJs asmIrBuilder(this);
  352. asmIrBuilder.Build();
  353. }
  354. else
  355. #endif
  356. {
  357. IRBuilder irBuilder(this);
  358. irBuilder.Build();
  359. }
  360. END_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  361. #ifdef IR_VIEWER
  362. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::IRBuilderPhase);
  363. #endif /* IR_VIEWER */
  364. BEGIN_CODEGEN_PHASE(this, Js::InlinePhase);
  365. InliningHeuristics heuristics(GetWorkItem()->GetJITTimeInfo(), this->IsLoopBody());
  366. Inline inliner(this, heuristics);
  367. inliner.Optimize();
  368. END_CODEGEN_PHASE(this, Js::InlinePhase);
  369. ThrowIfScriptClosed();
  370. // FlowGraph
  371. {
  372. // Scope for FlowGraph arena
  373. NoRecoverMemoryJitArenaAllocator fgAlloc(_u("BE-FlowGraph"), m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  374. BEGIN_CODEGEN_PHASE(this, Js::FGBuildPhase);
  375. this->m_fg = FlowGraph::New(this, &fgAlloc);
  376. this->m_fg->Build();
  377. END_CODEGEN_PHASE(this, Js::FGBuildPhase);
  378. // Global Optimization and Type Specialization
  379. BEGIN_CODEGEN_PHASE(this, Js::GlobOptPhase);
  380. GlobOpt globOpt(this);
  381. globOpt.Optimize();
  382. END_CODEGEN_PHASE(this, Js::GlobOptPhase);
  383. // Delete flowGraph now
  384. this->m_fg->Destroy();
  385. this->m_fg = nullptr;
  386. }
  387. #ifdef IR_VIEWER
  388. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::GlobOptPhase);
  389. #endif /* IR_VIEWER */
  390. ThrowIfScriptClosed();
  391. // Lowering
  392. Lowerer lowerer(this);
  393. BEGIN_CODEGEN_PHASE(this, Js::LowererPhase);
  394. lowerer.Lower();
  395. END_CODEGEN_PHASE(this, Js::LowererPhase);
  396. #ifdef IR_VIEWER
  397. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::LowererPhase);
  398. #endif /* IR_VIEWER */
  399. // Encode constants
  400. Security security(this);
  401. BEGIN_CODEGEN_PHASE(this, Js::EncodeConstantsPhase)
  402. security.EncodeLargeConstants();
  403. END_CODEGEN_PHASE(this, Js::EncodeConstantsPhase);
  404. if (GetJITFunctionBody()->DoInterruptProbe())
  405. {
  406. BEGIN_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  407. lowerer.DoInterruptProbes();
  408. END_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  409. }
  410. // Register Allocation
  411. BEGIN_CODEGEN_PHASE(this, Js::RegAllocPhase);
  412. LinearScan linearScan(this);
  413. linearScan.RegAlloc();
  414. END_CODEGEN_PHASE(this, Js::RegAllocPhase);
  415. #ifdef IR_VIEWER
  416. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::RegAllocPhase);
  417. #endif /* IR_VIEWER */
  418. ThrowIfScriptClosed();
  419. // Peephole optimizations
  420. BEGIN_CODEGEN_PHASE(this, Js::PeepsPhase);
  421. Peeps peeps(this);
  422. peeps.PeepFunc();
  423. END_CODEGEN_PHASE(this, Js::PeepsPhase);
  424. // Layout
  425. BEGIN_CODEGEN_PHASE(this, Js::LayoutPhase);
  426. SimpleLayout layout(this);
  427. layout.Layout();
  428. END_CODEGEN_PHASE(this, Js::LayoutPhase);
  429. if (this->HasTry() && this->hasBailoutInEHRegion)
  430. {
  431. BEGIN_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  432. lowerer.EHBailoutPatchUp();
  433. END_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  434. }
  435. // Insert NOPs (moving this before prolog/epilog for AMD64 and possibly ARM).
  436. BEGIN_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  437. security.InsertNOPs();
  438. END_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  439. // Prolog/Epilog
  440. BEGIN_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  441. if (GetJITFunctionBody()->IsAsmJsMode())
  442. {
  443. lowerer.LowerPrologEpilogAsmJs();
  444. }
  445. else
  446. {
  447. lowerer.LowerPrologEpilog();
  448. }
  449. END_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  450. BEGIN_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  451. lowerer.FinalLower();
  452. END_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  453. // Encoder
  454. BEGIN_CODEGEN_PHASE(this, Js::EncoderPhase);
  455. Encoder encoder(this);
  456. encoder.Encode();
  457. END_CODEGEN_PHASE_NO_DUMP(this, Js::EncoderPhase);
  458. #ifdef IR_VIEWER
  459. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::EncoderPhase);
  460. #endif /* IR_VIEWER */
  461. }
  462. #if DBG_DUMP
  463. if (Js::Configuration::Global.flags.IsEnabled(Js::AsmDumpModeFlag))
  464. {
  465. FILE * oldFile = 0;
  466. FILE * asmFile = GetScriptContext()->GetNativeCodeGenerator()->asmFile;
  467. if (asmFile)
  468. {
  469. oldFile = Output::SetFile(asmFile);
  470. }
  471. this->Dump(IRDumpFlags_AsmDumpMode);
  472. Output::Flush();
  473. if (asmFile)
  474. {
  475. FILE *openedFile = Output::SetFile(oldFile);
  476. Assert(openedFile == asmFile);
  477. }
  478. }
  479. #endif
  480. if (this->IsOOPJIT())
  481. {
  482. BEGIN_CODEGEN_PHASE(this, Js::NativeCodeDataPhase);
  483. auto dataAllocator = this->GetNativeCodeDataAllocator();
  484. if (dataAllocator->allocCount > 0)
  485. {
  486. NativeCodeData::DataChunk *chunk = (NativeCodeData::DataChunk*)dataAllocator->chunkList;
  487. NativeCodeData::DataChunk *next1 = chunk;
  488. while (next1)
  489. {
  490. if (next1->fixupFunc)
  491. {
  492. next1->fixupFunc(next1->data, chunk);
  493. }
  494. #if DBG
  495. if (CONFIG_FLAG(OOPJITFixupValidate))
  496. {
  497. // Scan memory to see if there's missing pointer needs to be fixed up
  498. // This can hit false positive if some data field happens to have value
  499. // falls into the NativeCodeData memory range.
  500. NativeCodeData::DataChunk *next2 = chunk;
  501. while (next2)
  502. {
  503. for (unsigned int i = 0; i < next1->len / sizeof(void*); i++)
  504. {
  505. if (((void**)next1->data)[i] == (void*)next2->data)
  506. {
  507. NativeCodeData::VerifyExistFixupEntry((void*)next2->data, &((void**)next1->data)[i], next1->data);
  508. }
  509. }
  510. next2 = next2->next;
  511. }
  512. }
  513. #endif
  514. next1 = next1->next;
  515. }
  516. JITOutputIDL* jitOutputData = m_output.GetOutputData();
  517. size_t allocSize = offsetof(NativeDataFixupTable, fixupRecords) + sizeof(NativeDataFixupRecord)* (dataAllocator->allocCount);
  518. jitOutputData->nativeDataFixupTable = (NativeDataFixupTable*)midl_user_allocate(allocSize);
  519. if (!jitOutputData->nativeDataFixupTable)
  520. {
  521. Js::Throw::OutOfMemory();
  522. }
  523. __analysis_assume(jitOutputData->nativeDataFixupTable);
  524. jitOutputData->nativeDataFixupTable->count = dataAllocator->allocCount;
  525. jitOutputData->buffer = (NativeDataBuffer*)midl_user_allocate(offsetof(NativeDataBuffer, data) + dataAllocator->totalSize);
  526. if (!jitOutputData->buffer)
  527. {
  528. Js::Throw::OutOfMemory();
  529. }
  530. __analysis_assume(jitOutputData->buffer);
  531. jitOutputData->buffer->len = dataAllocator->totalSize;
  532. unsigned int len = 0;
  533. unsigned int count = 0;
  534. next1 = chunk;
  535. while (next1)
  536. {
  537. memcpy(jitOutputData->buffer->data + len, next1->data, next1->len);
  538. len += next1->len;
  539. jitOutputData->nativeDataFixupTable->fixupRecords[count].index = next1->allocIndex;
  540. jitOutputData->nativeDataFixupTable->fixupRecords[count].length = next1->len;
  541. jitOutputData->nativeDataFixupTable->fixupRecords[count].startOffset = next1->offset;
  542. jitOutputData->nativeDataFixupTable->fixupRecords[count].updateList = next1->fixupList;
  543. count++;
  544. next1 = next1->next;
  545. }
  546. #if DBG
  547. if (PHASE_TRACE1(Js::NativeCodeDataPhase))
  548. {
  549. Output::Print(_u("NativeCodeData Server Buffer: %p, len: %x, chunk head: %p\n"), jitOutputData->buffer->data, jitOutputData->buffer->len, chunk);
  550. }
  551. #endif
  552. }
  553. END_CODEGEN_PHASE(this, Js::NativeCodeDataPhase);
  554. }
  555. END_CODEGEN_PHASE(this, Js::BackEndPhase);
  556. }
  557. ///----------------------------------------------------------------------------
  558. /// Func::StackAllocate
  559. /// Allocate stack space of given size.
  560. ///----------------------------------------------------------------------------
  561. int32
  562. Func::StackAllocate(int size)
  563. {
  564. Assert(this->IsTopFunc());
  565. int32 offset;
  566. #ifdef MD_GROW_LOCALS_AREA_UP
  567. // Locals have positive offsets and are allocated from bottom to top.
  568. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  569. offset = m_localStackHeight;
  570. m_localStackHeight += size;
  571. #else
  572. // Locals have negative offsets and are allocated from top to bottom.
  573. m_localStackHeight += size;
  574. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  575. offset = -m_localStackHeight;
  576. #endif
  577. return offset;
  578. }
  579. ///----------------------------------------------------------------------------
  580. ///
  581. /// Func::StackAllocate
  582. ///
  583. /// Allocate stack space for this symbol.
  584. ///
  585. ///----------------------------------------------------------------------------
  586. int32
  587. Func::StackAllocate(StackSym *stackSym, int size)
  588. {
  589. Assert(size > 0);
  590. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym() || stackSym->IsAllocated())
  591. {
  592. return stackSym->m_offset;
  593. }
  594. Assert(stackSym->m_offset == 0);
  595. stackSym->m_allocated = true;
  596. stackSym->m_offset = StackAllocate(size);
  597. return stackSym->m_offset;
  598. }
  599. void
  600. Func::SetArgOffset(StackSym *stackSym, int32 offset)
  601. {
  602. AssertMsg(offset >= 0, "Why is the offset, negative?");
  603. stackSym->m_offset = offset;
  604. stackSym->m_allocated = true;
  605. }
  606. ///
  607. /// Ensures that local var slots are created, if the function has locals.
  608. /// Allocate stack space for locals used for debugging
  609. /// (for local non-temp vars we write-through memory so that locals inspection can make use of that.).
  610. // On stack, after local slots we allocate space for metadata (in particular, whether any the locals was changed in debugger).
  611. ///
  612. void
  613. Func::EnsureLocalVarSlots()
  614. {
  615. Assert(IsJitInDebugMode());
  616. if (!this->HasLocalVarSlotCreated())
  617. {
  618. uint32 localSlotCount = GetJITFunctionBody()->GetNonTempLocalVarCount();
  619. if (localSlotCount && m_localVarSlotsOffset == Js::Constants::InvalidOffset)
  620. {
  621. // Allocate the slots.
  622. int32 size = localSlotCount * GetDiagLocalSlotSize();
  623. m_localVarSlotsOffset = StackAllocate(size);
  624. m_hasLocalVarChangedOffset = StackAllocate(max(1, MachStackAlignment)); // Can't alloc less than StackAlignment bytes.
  625. Assert(m_workItem->Type() == JsFunctionType);
  626. m_output.SetVarSlotsOffset(AdjustOffsetValue(m_localVarSlotsOffset));
  627. m_output.SetVarChangedOffset(AdjustOffsetValue(m_hasLocalVarChangedOffset));
  628. }
  629. }
  630. }
  631. void Func::SetFirstArgOffset(IR::Instr* inlineeStart)
  632. {
  633. Assert(inlineeStart->m_func == this);
  634. Assert(!IsTopFunc());
  635. int32 lastOffset;
  636. IR::Instr* arg = inlineeStart->GetNextArg();
  637. const auto lastArgOutStackSym = arg->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  638. lastOffset = lastArgOutStackSym->m_offset;
  639. Assert(lastArgOutStackSym->m_isSingleDef);
  640. const auto secondLastArgOutOpnd = lastArgOutStackSym->m_instrDef->GetSrc2();
  641. if (secondLastArgOutOpnd->IsSymOpnd())
  642. {
  643. const auto secondLastOffset = secondLastArgOutOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_offset;
  644. if (secondLastOffset > lastOffset)
  645. {
  646. lastOffset = secondLastOffset;
  647. }
  648. }
  649. lastOffset += MachPtr;
  650. int32 firstActualStackOffset = lastOffset - ((this->actualCount + Js::Constants::InlineeMetaArgCount) * MachPtr);
  651. Assert((this->firstActualStackOffset == -1) || (this->firstActualStackOffset == firstActualStackOffset));
  652. this->firstActualStackOffset = firstActualStackOffset;
  653. }
  654. int32
  655. Func::GetLocalVarSlotOffset(int32 slotId)
  656. {
  657. this->EnsureLocalVarSlots();
  658. Assert(m_localVarSlotsOffset != Js::Constants::InvalidOffset);
  659. int32 slotOffset = slotId * GetDiagLocalSlotSize();
  660. return m_localVarSlotsOffset + slotOffset;
  661. }
  662. void Func::OnAddSym(Sym* sym)
  663. {
  664. Assert(sym);
  665. if (this->IsJitInDebugMode() && this->IsNonTempLocalVar(sym->m_id))
  666. {
  667. Assert(m_nonTempLocalVars);
  668. m_nonTempLocalVars->Set(sym->m_id);
  669. }
  670. }
  671. ///
  672. /// Returns offset of the flag (1 byte) whether any local was changed (in debugger).
  673. /// If the function does not have any locals, returns -1.
  674. ///
  675. int32
  676. Func::GetHasLocalVarChangedOffset()
  677. {
  678. this->EnsureLocalVarSlots();
  679. return m_hasLocalVarChangedOffset;
  680. }
  681. bool
  682. Func::IsJitInDebugMode() const
  683. {
  684. return m_workItem->IsJitInDebugMode();
  685. }
  686. bool
  687. Func::IsNonTempLocalVar(uint32 slotIndex)
  688. {
  689. return GetJITFunctionBody()->IsNonTempLocalVar(slotIndex);
  690. }
  691. int32
  692. Func::AdjustOffsetValue(int32 offset)
  693. {
  694. #ifdef MD_GROW_LOCALS_AREA_UP
  695. return -(offset + BailOutInfo::StackSymBias);
  696. #else
  697. // Stack offset are negative, includes the PUSH EBP and return address
  698. return offset - (2 * MachPtr);
  699. #endif
  700. }
  701. #ifdef MD_GROW_LOCALS_AREA_UP
  702. // Note: this is called during jit-compile when we finalize bail out record.
  703. void
  704. Func::AjustLocalVarSlotOffset()
  705. {
  706. if (GetJITFunctionBody()->GetNonTempLocalVarCount())
  707. {
  708. // Turn positive SP-relative base locals offset into negative frame-pointer-relative offset
  709. // This is changing value for restoring the locals when read due to locals inspection.
  710. int localsOffset = m_localVarSlotsOffset - (m_localStackHeight + m_ArgumentsOffset);
  711. int valueChangeOffset = m_hasLocalVarChangedOffset - (m_localStackHeight + m_ArgumentsOffset);
  712. m_output.SetVarSlotsOffset(localsOffset);
  713. m_output.SetVarChangedOffset(valueChangeOffset);
  714. }
  715. }
  716. #endif
  717. bool
  718. Func::DoGlobOptsForGeneratorFunc() const
  719. {
  720. // Disable GlobOpt optimizations for generators initially. Will visit and enable each one by one.
  721. return !GetJITFunctionBody()->IsCoroutine();
  722. }
  723. bool
  724. Func::DoSimpleJitDynamicProfile() const
  725. {
  726. return IsSimpleJit() && !PHASE_OFF(Js::SimpleJitDynamicProfilePhase, GetTopFunc()) && !CONFIG_FLAG(NewSimpleJit);
  727. }
  728. void
  729. Func::SetDoFastPaths()
  730. {
  731. // Make sure we only call this once!
  732. Assert(!this->hasCalledSetDoFastPaths);
  733. bool doFastPaths = false;
  734. if(!PHASE_OFF(Js::FastPathPhase, this) && (!IsSimpleJit() || CONFIG_FLAG(NewSimpleJit)))
  735. {
  736. doFastPaths = true;
  737. }
  738. this->m_doFastPaths = doFastPaths;
  739. #ifdef DBG
  740. this->hasCalledSetDoFastPaths = true;
  741. #endif
  742. }
  743. #if LOWER_SPLIT_INT64
  744. Int64RegPair Func::FindOrCreateInt64Pair(IR::Opnd* opnd)
  745. {
  746. AssertMsg(this->GetTopFunc()->currentPhases.Top() == Js::LowererPhase, "New Int64 sym map is only allowed during lower");
  747. Int64RegPair pair;
  748. IRType pairType = opnd->GetType();
  749. if (opnd->IsInt64())
  750. {
  751. pairType = IRType_IsSignedInt(pairType) ? TyInt32 : TyUint32;
  752. }
  753. if (opnd->IsIndirOpnd())
  754. {
  755. IR::IndirOpnd* indir = opnd->AsIndirOpnd();
  756. indir->SetType(pairType);
  757. pair.low = indir;
  758. pair.high = indir->Copy(this)->AsIndirOpnd();
  759. pair.high->AsIndirOpnd()->SetOffset(indir->GetOffset() + 4);
  760. return pair;
  761. }
  762. // Only indir opnd can have a type other than int64
  763. Assert(opnd->IsInt64());
  764. if (opnd->IsImmediateOpnd())
  765. {
  766. int64 value = opnd->GetImmediateValue(this);
  767. pair.low = IR::IntConstOpnd::New((int32)value, pairType, this);
  768. pair.high = IR::IntConstOpnd::New((int32)(value >> 32), pairType, this);
  769. return pair;
  770. }
  771. Int64SymPair symPair;
  772. if (!m_int64SymPairMap)
  773. {
  774. m_int64SymPairMap = Anew(m_alloc, Int64SymPairMap, m_alloc);
  775. }
  776. StackSym* stackSym = opnd->GetStackSym();
  777. AssertOrFailFastMsg(stackSym, "Invalid int64 operand type");
  778. SymID symId = stackSym->m_id;
  779. if (!m_int64SymPairMap->TryGetValue(symId, &symPair))
  780. {
  781. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym())
  782. {
  783. const bool isArg = stackSym->IsArgSlotSym();
  784. if (isArg)
  785. {
  786. Js::ArgSlot slotNumber = stackSym->GetArgSlotNum();
  787. symPair.low = StackSym::NewArgSlotSym(slotNumber, this, pairType);
  788. symPair.high = StackSym::NewArgSlotSym(slotNumber + 1, this, pairType);
  789. }
  790. else
  791. {
  792. Js::ArgSlot slotNumber = stackSym->GetParamSlotNum();
  793. symPair.low = StackSym::NewParamSlotSym(slotNumber, this, pairType);
  794. symPair.high = StackSym::NewParamSlotSym(slotNumber + 1, this, pairType);
  795. }
  796. symPair.low->m_allocated = true;
  797. symPair.low->m_offset = stackSym->m_offset;
  798. symPair.high->m_allocated = true;
  799. symPair.high->m_offset = stackSym->m_offset + 4;
  800. }
  801. else
  802. {
  803. symPair.low = StackSym::New(pairType, this);
  804. symPair.high = StackSym::New(pairType, this);
  805. }
  806. m_int64SymPairMap->Add(symId, symPair);
  807. }
  808. if (opnd->IsSymOpnd())
  809. {
  810. pair.low = IR::SymOpnd::New(symPair.low, opnd->AsSymOpnd()->m_offset, pairType, this);
  811. pair.high = IR::SymOpnd::New(symPair.high, opnd->AsSymOpnd()->m_offset, pairType, this);
  812. }
  813. else
  814. {
  815. pair.low = IR::RegOpnd::New(symPair.low, pairType, this);
  816. pair.high = IR::RegOpnd::New(symPair.high, pairType, this);
  817. }
  818. return pair;
  819. }
  820. void Func::Int64SplitExtendLoopLifetime(Loop* loop)
  821. {
  822. if (m_int64SymPairMap)
  823. {
  824. BVSparse<JitArenaAllocator> *liveOnBackEdgeSyms = loop->regAlloc.liveOnBackEdgeSyms;
  825. FOREACH_BITSET_IN_SPARSEBV(symId, liveOnBackEdgeSyms)
  826. {
  827. Int64SymPair pair;
  828. if (m_int64SymPairMap->TryGetValue(symId, &pair))
  829. {
  830. // If we have replaced any sym that was live on the back edge for 2 other syms
  831. // these 2 syms needs to be live on back edge as well.
  832. liveOnBackEdgeSyms->Set(pair.low->m_id);
  833. liveOnBackEdgeSyms->Set(pair.high->m_id);
  834. }
  835. } NEXT_BITSET_IN_SPARSEBV;
  836. }
  837. }
  838. #endif
  839. #ifdef _M_ARM
  840. RegNum
  841. Func::GetLocalsPointer() const
  842. {
  843. #ifdef DBG
  844. if (Js::Configuration::Global.flags.IsEnabled(Js::ForceLocalsPtrFlag))
  845. {
  846. return ALT_LOCALS_PTR;
  847. }
  848. #endif
  849. if (GetJITFunctionBody()->HasTry())
  850. {
  851. return ALT_LOCALS_PTR;
  852. }
  853. return RegSP;
  854. }
  855. #endif
  856. void Func::AddSlotArrayCheck(IR::SymOpnd *fieldOpnd)
  857. {
  858. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  859. {
  860. return;
  861. }
  862. Assert(IsTopFunc());
  863. if (this->slotArrayCheckTable == nullptr)
  864. {
  865. this->slotArrayCheckTable = SlotArrayCheckTable::New(m_alloc, 4);
  866. }
  867. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  868. uint32 slot = propertySym->m_propertyId;
  869. uint32 *pSlotId = this->slotArrayCheckTable->FindOrInsert(slot, propertySym->m_stackSym->m_id);
  870. if (pSlotId && (*pSlotId == (uint32)-1 || *pSlotId < slot))
  871. {
  872. *pSlotId = propertySym->m_propertyId;
  873. }
  874. }
  875. void Func::AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId)
  876. {
  877. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  878. {
  879. return;
  880. }
  881. Assert(IsTopFunc());
  882. if (this->frameDisplayCheckTable == nullptr)
  883. {
  884. this->frameDisplayCheckTable = FrameDisplayCheckTable::New(m_alloc, 4);
  885. }
  886. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  887. FrameDisplayCheckRecord **record = this->frameDisplayCheckTable->FindOrInsertNew(propertySym->m_stackSym->m_id);
  888. if (*record == nullptr)
  889. {
  890. *record = JitAnew(m_alloc, FrameDisplayCheckRecord);
  891. }
  892. uint32 frameDisplaySlot = propertySym->m_propertyId;
  893. if ((*record)->table == nullptr || (*record)->slotId < frameDisplaySlot)
  894. {
  895. (*record)->slotId = frameDisplaySlot;
  896. }
  897. if (slotId != (uint32)-1)
  898. {
  899. if ((*record)->table == nullptr)
  900. {
  901. (*record)->table = SlotArrayCheckTable::New(m_alloc, 4);
  902. }
  903. uint32 *pSlotId = (*record)->table->FindOrInsert(slotId, frameDisplaySlot);
  904. if (pSlotId && *pSlotId < slotId)
  905. {
  906. *pSlotId = slotId;
  907. }
  908. }
  909. }
  910. void Func::InitLocalClosureSyms()
  911. {
  912. Assert(this->m_localClosureSym == nullptr);
  913. // Allocate stack space for closure pointers. Do this only if we're jitting for stack closures, and
  914. // tell bailout that these are not byte code symbols so that we don't try to encode them in the bailout record,
  915. // as they don't have normal lifetimes.
  916. Js::RegSlot regSlot = GetJITFunctionBody()->GetLocalClosureReg();
  917. if (regSlot != Js::Constants::NoRegister)
  918. {
  919. this->m_localClosureSym =
  920. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  921. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  922. this);
  923. }
  924. regSlot = this->GetJITFunctionBody()->GetParamClosureReg();
  925. if (regSlot != Js::Constants::NoRegister)
  926. {
  927. Assert(this->GetParamClosureSym() == nullptr && !this->GetJITFunctionBody()->IsParamAndBodyScopeMerged());
  928. this->m_paramClosureSym =
  929. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  930. this->DoStackFrameDisplay() ? (Js::RegSlot) - 1 : regSlot,
  931. this);
  932. }
  933. regSlot = GetJITFunctionBody()->GetLocalFrameDisplayReg();
  934. if (regSlot != Js::Constants::NoRegister)
  935. {
  936. this->m_localFrameDisplaySym =
  937. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  938. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  939. this);
  940. }
  941. }
  942. bool
  943. Func::IsTrackCompoundedIntOverflowDisabled() const
  944. {
  945. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsTrackCompoundedIntOverflowDisabled()) || m_output.IsTrackCompoundedIntOverflowDisabled();
  946. }
  947. bool
  948. Func::IsArrayCheckHoistDisabled() const
  949. {
  950. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsArrayCheckHoistDisabled(IsLoopBody())) || m_output.IsArrayCheckHoistDisabled();
  951. }
  952. bool
  953. Func::IsStackArgOptDisabled() const
  954. {
  955. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsStackArgOptDisabled()) || m_output.IsStackArgOptDisabled();
  956. }
  957. bool
  958. Func::IsSwitchOptDisabled() const
  959. {
  960. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsSwitchOptDisabled()) || m_output.IsSwitchOptDisabled();
  961. }
  962. bool
  963. Func::IsAggressiveIntTypeSpecDisabled() const
  964. {
  965. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsAggressiveIntTypeSpecDisabled(IsLoopBody())) || m_output.IsAggressiveIntTypeSpecDisabled();
  966. }
  967. bool Func::CanAllocInPreReservedHeapPageSegment ()
  968. {
  969. #ifdef _CONTROL_FLOW_GUARD
  970. return PHASE_FORCE1(Js::PreReservedHeapAllocPhase) || (!PHASE_OFF1(Js::PreReservedHeapAllocPhase) &&
  971. !IsJitInDebugMode() && GetThreadContextInfo()->IsCFGEnabled()
  972. //&& !GetScriptContext()->IsScriptContextInDebugMode()
  973. #if _M_IX86
  974. && m_workItem->GetJitMode() == ExecutionMode::FullJit
  975. #if ENABLE_OOP_NATIVE_CODEGEN
  976. && (JITManager::GetJITManager()->IsJITServer()
  977. ? GetOOPCodeGenAllocators()->canCreatePreReservedSegment
  978. : GetInProcCodeGenAllocators()->canCreatePreReservedSegment)
  979. #else
  980. && GetInProcCodeGenAllocators()->canCreatePreReservedSegment
  981. #endif
  982. );
  983. #elif _M_X64
  984. && true);
  985. #else
  986. && false); //Not yet implemented for architectures other than x86 and amd64.
  987. #endif //_M_ARCH
  988. #else
  989. return false;
  990. #endif//_CONTROL_FLOW_GUARD
  991. }
  992. ///----------------------------------------------------------------------------
  993. ///
  994. /// Func::GetInstrCount
  995. ///
  996. /// Returns the number of instrs.
  997. /// Note: It counts all instrs for now, including labels, etc.
  998. ///
  999. ///----------------------------------------------------------------------------
  1000. uint32
  1001. Func::GetInstrCount()
  1002. {
  1003. uint instrCount = 0;
  1004. FOREACH_INSTR_IN_FUNC(instr, this)
  1005. {
  1006. instrCount++;
  1007. }NEXT_INSTR_IN_FUNC;
  1008. return instrCount;
  1009. }
  1010. ///----------------------------------------------------------------------------
  1011. ///
  1012. /// Func::NumberInstrs
  1013. ///
  1014. /// Number each instruction in order of appearance in the function.
  1015. ///
  1016. ///----------------------------------------------------------------------------
  1017. void
  1018. Func::NumberInstrs()
  1019. {
  1020. #if DBG_DUMP
  1021. Assert(this->IsTopFunc());
  1022. Assert(!this->hasInstrNumber);
  1023. this->hasInstrNumber = true;
  1024. #endif
  1025. uint instrCount = 1;
  1026. FOREACH_INSTR_IN_FUNC(instr, this)
  1027. {
  1028. instr->SetNumber(instrCount++);
  1029. }
  1030. NEXT_INSTR_IN_FUNC;
  1031. }
  1032. ///----------------------------------------------------------------------------
  1033. ///
  1034. /// Func::IsInPhase
  1035. ///
  1036. /// Determines whether the function is currently in the provided phase
  1037. ///
  1038. ///----------------------------------------------------------------------------
  1039. #if DBG
  1040. bool
  1041. Func::IsInPhase(Js::Phase tag)
  1042. {
  1043. return this->GetTopFunc()->currentPhases.Contains(tag);
  1044. }
  1045. #endif
  1046. ///----------------------------------------------------------------------------
  1047. ///
  1048. /// Func::BeginPhase
  1049. ///
  1050. /// Takes care of the profiler
  1051. ///
  1052. ///----------------------------------------------------------------------------
  1053. void
  1054. Func::BeginPhase(Js::Phase tag)
  1055. {
  1056. #ifdef DBG
  1057. this->GetTopFunc()->currentPhases.Push(tag);
  1058. if (PHASE_DEBUGBREAK_ON_PHASE_BEGIN(tag, this))
  1059. {
  1060. __debugbreak();
  1061. }
  1062. #endif
  1063. #ifdef PROFILE_EXEC
  1064. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  1065. "Profiler tag is supplied but the profiler pointer is NULL");
  1066. if (this->m_codeGenProfiler)
  1067. {
  1068. this->m_codeGenProfiler->ProfileBegin(tag);
  1069. }
  1070. #endif
  1071. }
  1072. ///----------------------------------------------------------------------------
  1073. ///
  1074. /// Func::EndPhase
  1075. ///
  1076. /// Takes care of the profiler and dumper
  1077. ///
  1078. ///----------------------------------------------------------------------------
  1079. void
  1080. Func::EndProfiler(Js::Phase tag)
  1081. {
  1082. #ifdef DBG
  1083. Assert(this->GetTopFunc()->currentPhases.Count() > 0);
  1084. Js::Phase popped = this->GetTopFunc()->currentPhases.Pop();
  1085. Assert(tag == popped);
  1086. #endif
  1087. #ifdef PROFILE_EXEC
  1088. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  1089. "Profiler tag is supplied but the profiler pointer is NULL");
  1090. if (this->m_codeGenProfiler)
  1091. {
  1092. this->m_codeGenProfiler->ProfileEnd(tag);
  1093. }
  1094. #endif
  1095. }
  1096. void
  1097. Func::EndPhase(Js::Phase tag, bool dump)
  1098. {
  1099. this->EndProfiler(tag);
  1100. #if DBG_DUMP
  1101. if(dump && (PHASE_DUMP(tag, this)
  1102. || PHASE_DUMP(Js::BackEndPhase, this)))
  1103. {
  1104. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1105. if (IsLoopBody())
  1106. {
  1107. Output::Print(_u("************ IR after %s (%S) Loop %d ************\n"),
  1108. Js::PhaseNames[tag],
  1109. ExecutionModeName(m_workItem->GetJitMode()),
  1110. m_workItem->GetLoopNumber());
  1111. }
  1112. else
  1113. {
  1114. Output::Print(_u("************ IR after %s (%S) ************\n"),
  1115. Js::PhaseNames[tag],
  1116. ExecutionModeName(m_workItem->GetJitMode()));
  1117. }
  1118. this->Dump(Js::Configuration::Global.flags.AsmDiff? IRDumpFlags_AsmDumpMode : IRDumpFlags_None);
  1119. }
  1120. #endif
  1121. #if DBG
  1122. if (tag == Js::LowererPhase)
  1123. {
  1124. Assert(!this->isPostLower);
  1125. this->isPostLower = true;
  1126. }
  1127. else if (tag == Js::RegAllocPhase)
  1128. {
  1129. Assert(!this->isPostRegAlloc);
  1130. this->isPostRegAlloc = true;
  1131. }
  1132. else if (tag == Js::PeepsPhase)
  1133. {
  1134. Assert(this->isPostLower && !this->isPostLayout);
  1135. this->isPostPeeps = true;
  1136. }
  1137. else if (tag == Js::LayoutPhase)
  1138. {
  1139. Assert(this->isPostPeeps && !this->isPostLayout);
  1140. this->isPostLayout = true;
  1141. }
  1142. else if (tag == Js::FinalLowerPhase)
  1143. {
  1144. Assert(this->isPostLayout && !this->isPostFinalLower);
  1145. this->isPostFinalLower = true;
  1146. }
  1147. if (this->isPostLower)
  1148. {
  1149. #ifndef _M_ARM // Need to verify ARM is clean.
  1150. DbCheckPostLower dbCheck(this);
  1151. dbCheck.Check();
  1152. #endif
  1153. }
  1154. this->m_alloc->MergeDelayFreeList();
  1155. #endif
  1156. }
  1157. Func const *
  1158. Func::GetTopFunc() const
  1159. {
  1160. Func const * func = this;
  1161. while (!func->IsTopFunc())
  1162. {
  1163. func = func->parentFunc;
  1164. }
  1165. return func;
  1166. }
  1167. Func *
  1168. Func::GetTopFunc()
  1169. {
  1170. Func * func = this;
  1171. while (!func->IsTopFunc())
  1172. {
  1173. func = func->parentFunc;
  1174. }
  1175. return func;
  1176. }
  1177. StackSym *
  1178. Func::EnsureLoopParamSym()
  1179. {
  1180. if (this->m_loopParamSym == nullptr)
  1181. {
  1182. this->m_loopParamSym = StackSym::New(TyMachPtr, this);
  1183. }
  1184. return this->m_loopParamSym;
  1185. }
  1186. void
  1187. Func::UpdateMaxInlineeArgOutCount(uint inlineeArgOutCount)
  1188. {
  1189. if (maxInlineeArgOutCount < inlineeArgOutCount)
  1190. {
  1191. maxInlineeArgOutCount = inlineeArgOutCount;
  1192. }
  1193. }
  1194. void
  1195. Func::BeginClone(Lowerer * lowerer, JitArenaAllocator *alloc)
  1196. {
  1197. Assert(this->IsTopFunc());
  1198. AssertMsg(m_cloner == nullptr, "Starting new clone while one is in progress");
  1199. m_cloner = JitAnew(alloc, Cloner, lowerer, alloc);
  1200. if (m_cloneMap == nullptr)
  1201. {
  1202. m_cloneMap = JitAnew(alloc, InstrMap, alloc, 7);
  1203. }
  1204. }
  1205. void
  1206. Func::EndClone()
  1207. {
  1208. Assert(this->IsTopFunc());
  1209. if (m_cloner)
  1210. {
  1211. m_cloner->Finish();
  1212. JitAdelete(m_cloner->alloc, m_cloner);
  1213. m_cloner = nullptr;
  1214. }
  1215. }
  1216. IR::SymOpnd *
  1217. Func::GetInlineeOpndAtOffset(int32 offset)
  1218. {
  1219. Assert(IsInlinee());
  1220. StackSym *stackSym = CreateInlineeStackSym();
  1221. this->SetArgOffset(stackSym, stackSym->m_offset + offset);
  1222. Assert(stackSym->m_offset >= 0);
  1223. return IR::SymOpnd::New(stackSym, 0, TyMachReg, this);
  1224. }
  1225. StackSym *
  1226. Func::CreateInlineeStackSym()
  1227. {
  1228. // Make sure this is an inlinee and that GlobOpt has initialized the offset
  1229. // in the inlinee's frame.
  1230. Assert(IsInlinee());
  1231. Assert(m_inlineeFrameStartSym->m_offset != -1);
  1232. StackSym *stackSym = m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1233. stackSym->m_isInlinedArgSlot = true;
  1234. stackSym->m_offset = m_inlineeFrameStartSym->m_offset;
  1235. stackSym->m_allocated = true;
  1236. return stackSym;
  1237. }
  1238. uint16
  1239. Func::GetArgUsedForBranch() const
  1240. {
  1241. // this value can change while JITing, so or these together
  1242. return GetJITFunctionBody()->GetArgUsedForBranch() | GetJITOutput()->GetArgUsedForBranch();
  1243. }
  1244. intptr_t
  1245. Func::GetJittedLoopIterationsSinceLastBailoutAddress() const
  1246. {
  1247. Assert(this->m_workItem->Type() == JsLoopBodyWorkItemType);
  1248. return m_workItem->GetJittedLoopIterationsSinceLastBailoutAddr();
  1249. }
  1250. intptr_t
  1251. Func::GetWeakFuncRef() const
  1252. {
  1253. // TODO: OOP JIT figure out if this can be null
  1254. return m_workItem->GetJITTimeInfo()->GetWeakFuncRef();
  1255. }
  1256. intptr_t
  1257. Func::GetRuntimeInlineCache(const uint index) const
  1258. {
  1259. if(m_runtimeInfo != nullptr && m_runtimeInfo->HasClonedInlineCaches())
  1260. {
  1261. intptr_t inlineCache = m_runtimeInfo->GetClonedInlineCache(index);
  1262. if(inlineCache)
  1263. {
  1264. return inlineCache;
  1265. }
  1266. }
  1267. return GetJITFunctionBody()->GetInlineCache(index);
  1268. }
  1269. JITTimePolymorphicInlineCache *
  1270. Func::GetRuntimePolymorphicInlineCache(const uint index) const
  1271. {
  1272. if (this->m_polymorphicInlineCacheInfo && this->m_polymorphicInlineCacheInfo->HasInlineCaches())
  1273. {
  1274. return this->m_polymorphicInlineCacheInfo->GetInlineCache(index);
  1275. }
  1276. return nullptr;
  1277. }
  1278. byte
  1279. Func::GetPolyCacheUtilToInitialize(const uint index) const
  1280. {
  1281. return this->GetRuntimePolymorphicInlineCache(index) ? this->GetPolyCacheUtil(index) : PolymorphicInlineCacheUtilizationMinValue;
  1282. }
  1283. byte
  1284. Func::GetPolyCacheUtil(const uint index) const
  1285. {
  1286. return this->m_polymorphicInlineCacheInfo->GetUtil(index);
  1287. }
  1288. ObjTypeSpecFldInfo*
  1289. Func::GetObjTypeSpecFldInfo(const uint index) const
  1290. {
  1291. if (GetJITFunctionBody()->GetInlineCacheCount() == 0)
  1292. {
  1293. Assert(UNREACHED);
  1294. return nullptr;
  1295. }
  1296. return GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(index);
  1297. }
  1298. ObjTypeSpecFldInfo*
  1299. Func::GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const
  1300. {
  1301. Assert(propertyInfoId < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  1302. return GetTopFunc()->m_globalObjTypeSpecFldInfoArray[propertyInfoId];
  1303. }
  1304. void
  1305. Func::EnsurePinnedTypeRefs()
  1306. {
  1307. if (this->pinnedTypeRefs == nullptr)
  1308. {
  1309. this->pinnedTypeRefs = JitAnew(this->m_alloc, TypeRefSet, this->m_alloc);
  1310. }
  1311. }
  1312. void
  1313. Func::PinTypeRef(void* typeRef)
  1314. {
  1315. EnsurePinnedTypeRefs();
  1316. this->pinnedTypeRefs->AddNew(typeRef);
  1317. }
  1318. void
  1319. Func::EnsureSingleTypeGuards()
  1320. {
  1321. if (this->singleTypeGuards == nullptr)
  1322. {
  1323. this->singleTypeGuards = JitAnew(this->m_alloc, TypePropertyGuardDictionary, this->m_alloc);
  1324. }
  1325. }
  1326. Js::JitTypePropertyGuard*
  1327. Func::GetOrCreateSingleTypeGuard(intptr_t typeAddr)
  1328. {
  1329. EnsureSingleTypeGuards();
  1330. Js::JitTypePropertyGuard* guard = nullptr;
  1331. if (!this->singleTypeGuards->TryGetValue(typeAddr, &guard))
  1332. {
  1333. // Property guards are allocated by NativeCodeData::Allocator so that their lifetime extends as long as the EntryPointInfo is alive.
  1334. guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitTypePropertyGuard, typeAddr, this->indexedPropertyGuardCount++);
  1335. this->singleTypeGuards->Add(typeAddr, guard);
  1336. }
  1337. else
  1338. {
  1339. Assert(guard->GetTypeAddr() == typeAddr);
  1340. }
  1341. return guard;
  1342. }
  1343. void
  1344. Func::EnsureEquivalentTypeGuards()
  1345. {
  1346. if (this->equivalentTypeGuards == nullptr)
  1347. {
  1348. this->equivalentTypeGuards = JitAnew(this->m_alloc, EquivalentTypeGuardList, this->m_alloc);
  1349. }
  1350. }
  1351. Js::JitEquivalentTypeGuard*
  1352. Func::CreateEquivalentTypeGuard(JITTypeHolder type, uint32 objTypeSpecFldId)
  1353. {
  1354. EnsureEquivalentTypeGuards();
  1355. Js::JitEquivalentTypeGuard* guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitEquivalentTypeGuard, type->GetAddr(), this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1356. // If we want to hard code the address of the cache, we will need to go back to allocating it from the native code data allocator.
  1357. // We would then need to maintain consistency (double write) to both the recycler allocated cache and the one on the heap.
  1358. Js::EquivalentTypeCache* cache = nullptr;
  1359. if (this->IsOOPJIT())
  1360. {
  1361. cache = JitAnewZ(this->m_alloc, Js::EquivalentTypeCache);
  1362. }
  1363. else
  1364. {
  1365. cache = NativeCodeDataNewZNoFixup(GetTransferDataAllocator(), Js::EquivalentTypeCache);
  1366. }
  1367. guard->SetCache(cache);
  1368. // Give the cache a back-pointer to the guard so that the guard can be cleared at runtime if necessary.
  1369. cache->SetGuard(guard);
  1370. this->equivalentTypeGuards->Prepend(guard);
  1371. return guard;
  1372. }
  1373. void
  1374. Func::EnsurePropertyGuardsByPropertyId()
  1375. {
  1376. if (this->propertyGuardsByPropertyId == nullptr)
  1377. {
  1378. this->propertyGuardsByPropertyId = JitAnew(this->m_alloc, PropertyGuardByPropertyIdMap, this->m_alloc);
  1379. }
  1380. }
  1381. void
  1382. Func::EnsureCtorCachesByPropertyId()
  1383. {
  1384. if (this->ctorCachesByPropertyId == nullptr)
  1385. {
  1386. this->ctorCachesByPropertyId = JitAnew(this->m_alloc, CtorCachesByPropertyIdMap, this->m_alloc);
  1387. }
  1388. }
  1389. void
  1390. Func::LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard)
  1391. {
  1392. Assert(guard != nullptr);
  1393. Assert(guard->GetValue() != NULL);
  1394. Assert(this->propertyGuardsByPropertyId != nullptr);
  1395. IndexedPropertyGuardSet* set;
  1396. if (!this->propertyGuardsByPropertyId->TryGetValue(propertyId, &set))
  1397. {
  1398. set = JitAnew(this->m_alloc, IndexedPropertyGuardSet, this->m_alloc);
  1399. this->propertyGuardsByPropertyId->Add(propertyId, set);
  1400. }
  1401. set->Item(guard);
  1402. }
  1403. void
  1404. Func::LinkCtorCacheToPropertyId(Js::PropertyId propertyId, JITTimeConstructorCache* cache)
  1405. {
  1406. Assert(cache != nullptr);
  1407. Assert(this->ctorCachesByPropertyId != nullptr);
  1408. CtorCacheSet* set;
  1409. if (!this->ctorCachesByPropertyId->TryGetValue(propertyId, &set))
  1410. {
  1411. set = JitAnew(this->m_alloc, CtorCacheSet, this->m_alloc);
  1412. this->ctorCachesByPropertyId->Add(propertyId, set);
  1413. }
  1414. set->Item(cache->GetRuntimeCacheAddr());
  1415. }
  1416. JITTimeConstructorCache* Func::GetConstructorCache(const Js::ProfileId profiledCallSiteId)
  1417. {
  1418. Assert(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1419. Assert(this->constructorCaches != nullptr);
  1420. return this->constructorCaches[profiledCallSiteId];
  1421. }
  1422. void Func::SetConstructorCache(const Js::ProfileId profiledCallSiteId, JITTimeConstructorCache* constructorCache)
  1423. {
  1424. Assert(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1425. Assert(constructorCache != nullptr);
  1426. Assert(this->constructorCaches != nullptr);
  1427. Assert(this->constructorCaches[profiledCallSiteId] == nullptr);
  1428. this->constructorCacheCount++;
  1429. this->constructorCaches[profiledCallSiteId] = constructorCache;
  1430. }
  1431. void Func::EnsurePropertiesWrittenTo()
  1432. {
  1433. if (this->propertiesWrittenTo == nullptr)
  1434. {
  1435. this->propertiesWrittenTo = JitAnew(this->m_alloc, PropertyIdSet, this->m_alloc);
  1436. }
  1437. }
  1438. void Func::EnsureCallSiteToArgumentsOffsetFixupMap()
  1439. {
  1440. if (this->callSiteToArgumentsOffsetFixupMap == nullptr)
  1441. {
  1442. this->callSiteToArgumentsOffsetFixupMap = JitAnew(this->m_alloc, CallSiteToArgumentsOffsetFixupMap, this->m_alloc);
  1443. }
  1444. }
  1445. IR::LabelInstr *
  1446. Func::GetFuncStartLabel()
  1447. {
  1448. return m_funcStartLabel;
  1449. }
  1450. IR::LabelInstr *
  1451. Func::EnsureFuncStartLabel()
  1452. {
  1453. if(m_funcStartLabel == nullptr)
  1454. {
  1455. m_funcStartLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1456. }
  1457. return m_funcStartLabel;
  1458. }
  1459. IR::LabelInstr *
  1460. Func::GetFuncEndLabel()
  1461. {
  1462. return m_funcEndLabel;
  1463. }
  1464. IR::LabelInstr *
  1465. Func::EnsureFuncEndLabel()
  1466. {
  1467. if(m_funcEndLabel == nullptr)
  1468. {
  1469. m_funcEndLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1470. }
  1471. return m_funcEndLabel;
  1472. }
  1473. void
  1474. Func::EnsureStackArgWithFormalsTracker()
  1475. {
  1476. if (stackArgWithFormalsTracker == nullptr)
  1477. {
  1478. stackArgWithFormalsTracker = JitAnew(m_alloc, StackArgWithFormalsTracker, m_alloc);
  1479. }
  1480. }
  1481. BOOL
  1482. Func::IsFormalsArraySym(SymID symId)
  1483. {
  1484. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsArraySyms() == nullptr)
  1485. {
  1486. return false;
  1487. }
  1488. return stackArgWithFormalsTracker->GetFormalsArraySyms()->Test(symId);
  1489. }
  1490. void
  1491. Func::TrackFormalsArraySym(SymID symId)
  1492. {
  1493. EnsureStackArgWithFormalsTracker();
  1494. stackArgWithFormalsTracker->SetFormalsArraySyms(symId);
  1495. }
  1496. void
  1497. Func::TrackStackSymForFormalIndex(Js::ArgSlot formalsIndex, StackSym * sym)
  1498. {
  1499. EnsureStackArgWithFormalsTracker();
  1500. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1501. stackArgWithFormalsTracker->SetStackSymInFormalsIndexMap(sym, formalsIndex, formalsCount);
  1502. }
  1503. StackSym *
  1504. Func::GetStackSymForFormal(Js::ArgSlot formalsIndex)
  1505. {
  1506. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1507. {
  1508. return nullptr;
  1509. }
  1510. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1511. StackSym ** formalsIndexToStackSymMap = stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap();
  1512. AssertMsg(formalsIndex < formalsCount, "OutOfRange ? ");
  1513. return formalsIndexToStackSymMap[formalsIndex];
  1514. }
  1515. bool
  1516. Func::HasStackSymForFormal(Js::ArgSlot formalsIndex)
  1517. {
  1518. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1519. {
  1520. return false;
  1521. }
  1522. return GetStackSymForFormal(formalsIndex) != nullptr;
  1523. }
  1524. void
  1525. Func::SetScopeObjSym(StackSym * sym)
  1526. {
  1527. EnsureStackArgWithFormalsTracker();
  1528. stackArgWithFormalsTracker->SetScopeObjSym(sym);
  1529. }
  1530. StackSym *
  1531. Func::GetNativeCodeDataSym() const
  1532. {
  1533. Assert(IsOOPJIT());
  1534. return m_nativeCodeDataSym;
  1535. }
  1536. void
  1537. Func::SetNativeCodeDataSym(StackSym * opnd)
  1538. {
  1539. Assert(IsOOPJIT());
  1540. m_nativeCodeDataSym = opnd;
  1541. }
  1542. StackSym*
  1543. Func::GetScopeObjSym()
  1544. {
  1545. if (stackArgWithFormalsTracker == nullptr)
  1546. {
  1547. return nullptr;
  1548. }
  1549. return stackArgWithFormalsTracker->GetScopeObjSym();
  1550. }
  1551. BVSparse<JitArenaAllocator> *
  1552. StackArgWithFormalsTracker::GetFormalsArraySyms()
  1553. {
  1554. return formalsArraySyms;
  1555. }
  1556. void
  1557. StackArgWithFormalsTracker::SetFormalsArraySyms(SymID symId)
  1558. {
  1559. if (formalsArraySyms == nullptr)
  1560. {
  1561. formalsArraySyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1562. }
  1563. formalsArraySyms->Set(symId);
  1564. }
  1565. StackSym **
  1566. StackArgWithFormalsTracker::GetFormalsIndexToStackSymMap()
  1567. {
  1568. return formalsIndexToStackSymMap;
  1569. }
  1570. void
  1571. StackArgWithFormalsTracker::SetStackSymInFormalsIndexMap(StackSym * sym, Js::ArgSlot formalsIndex, Js::ArgSlot formalsCount)
  1572. {
  1573. if(formalsIndexToStackSymMap == nullptr)
  1574. {
  1575. formalsIndexToStackSymMap = JitAnewArrayZ(alloc, StackSym*, formalsCount);
  1576. }
  1577. AssertMsg(formalsIndex < formalsCount, "Out of range ?");
  1578. formalsIndexToStackSymMap[formalsIndex] = sym;
  1579. }
  1580. void
  1581. StackArgWithFormalsTracker::SetScopeObjSym(StackSym * sym)
  1582. {
  1583. m_scopeObjSym = sym;
  1584. }
  1585. StackSym *
  1586. StackArgWithFormalsTracker::GetScopeObjSym()
  1587. {
  1588. return m_scopeObjSym;
  1589. }
  1590. void
  1591. Cloner::AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone)
  1592. {
  1593. if (!this->instrFirst)
  1594. {
  1595. this->instrFirst = instrClone;
  1596. }
  1597. this->instrLast = instrClone;
  1598. }
  1599. void
  1600. Cloner::Finish()
  1601. {
  1602. this->RetargetClonedBranches();
  1603. if (this->lowerer)
  1604. {
  1605. lowerer->LowerRange(this->instrFirst, this->instrLast, false, false);
  1606. }
  1607. }
  1608. void
  1609. Cloner::RetargetClonedBranches()
  1610. {
  1611. if (!this->fRetargetClonedBranch)
  1612. {
  1613. return;
  1614. }
  1615. FOREACH_INSTR_IN_RANGE(instr, this->instrFirst, this->instrLast)
  1616. {
  1617. if (instr->IsBranchInstr())
  1618. {
  1619. instr->AsBranchInstr()->RetargetClonedBranch();
  1620. }
  1621. }
  1622. NEXT_INSTR_IN_RANGE;
  1623. }
  1624. void Func::ThrowIfScriptClosed()
  1625. {
  1626. if (GetScriptContextInfo()->IsClosed())
  1627. {
  1628. // Should not be jitting something in the foreground when the script context is actually closed
  1629. Assert(IsBackgroundJIT() || !GetScriptContext()->IsActuallyClosed());
  1630. throw Js::OperationAbortedException();
  1631. }
  1632. }
  1633. IR::IndirOpnd * Func::GetConstantAddressIndirOpnd(intptr_t address, IR::Opnd * largeConstOpnd, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode)
  1634. {
  1635. Assert(this->GetTopFunc() == this);
  1636. if (!canHoistConstantAddressLoad)
  1637. {
  1638. // We can't hoist constant address load after lower, as we can't mark the sym as
  1639. // live on back edge
  1640. return nullptr;
  1641. }
  1642. int offset = 0;
  1643. IR::RegOpnd ** foundRegOpnd = this->constantAddressRegOpnd.Find([address, &offset](IR::RegOpnd * regOpnd)
  1644. {
  1645. Assert(regOpnd->m_sym->IsSingleDef());
  1646. Assert(regOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd() || regOpnd->m_sym->m_instrDef->GetSrc1()->IsIntConstOpnd());
  1647. void * curr = regOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd() ?
  1648. regOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address :
  1649. (void *)regOpnd->m_sym->m_instrDef->GetSrc1()->AsIntConstOpnd()->GetValue();
  1650. ptrdiff_t diff = (uintptr_t)address - (uintptr_t)curr;
  1651. if (!Math::FitsInDWord(diff))
  1652. {
  1653. return false;
  1654. }
  1655. offset = (int)diff;
  1656. return true;
  1657. });
  1658. IR::RegOpnd * addressRegOpnd;
  1659. if (foundRegOpnd != nullptr)
  1660. {
  1661. addressRegOpnd = *foundRegOpnd;
  1662. }
  1663. else
  1664. {
  1665. Assert(offset == 0);
  1666. addressRegOpnd = IR::RegOpnd::New(TyMachPtr, this);
  1667. IR::Instr *const newInstr =
  1668. IR::Instr::New(
  1669. loadOpCode,
  1670. addressRegOpnd,
  1671. largeConstOpnd,
  1672. this);
  1673. this->constantAddressRegOpnd.Prepend(addressRegOpnd);
  1674. IR::Instr * insertBeforeInstr = this->lastConstantAddressRegLoadInstr;
  1675. if (insertBeforeInstr == nullptr)
  1676. {
  1677. insertBeforeInstr = this->GetFunctionEntryInsertionPoint();
  1678. this->lastConstantAddressRegLoadInstr = newInstr;
  1679. }
  1680. insertBeforeInstr->InsertBefore(newInstr);
  1681. }
  1682. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(addressRegOpnd, offset, type, this, true);
  1683. #if DBG_DUMP
  1684. // TODO: michhol make intptr_t
  1685. indirOpnd->SetAddrKind(kind, (void*)address);
  1686. #endif
  1687. return indirOpnd;
  1688. }
  1689. void Func::MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv)
  1690. {
  1691. Assert(this->GetTopFunc() == this);
  1692. this->constantAddressRegOpnd.Iterate([bv](IR::RegOpnd * regOpnd)
  1693. {
  1694. bv->Set(regOpnd->m_sym->m_id);
  1695. });
  1696. }
  1697. IR::Instr *
  1698. Func::GetFunctionEntryInsertionPoint()
  1699. {
  1700. Assert(this->GetTopFunc() == this);
  1701. IR::Instr * insertInsert = this->lastConstantAddressRegLoadInstr;
  1702. if (insertInsert != nullptr)
  1703. {
  1704. return insertInsert->m_next;
  1705. }
  1706. insertInsert = this->m_headInstr;
  1707. if (this->HasTry())
  1708. {
  1709. // Insert it inside the root region
  1710. insertInsert = insertInsert->m_next;
  1711. Assert(insertInsert->IsLabelInstr() && insertInsert->AsLabelInstr()->GetRegion()->GetType() == RegionTypeRoot);
  1712. }
  1713. return insertInsert->m_next;
  1714. }
  1715. Js::Var
  1716. Func::AllocateNumber(double value)
  1717. {
  1718. Js::Var number = nullptr;
  1719. #if FLOATVAR
  1720. number = Js::JavascriptNumber::NewCodeGenInstance((double)value, nullptr);
  1721. #else
  1722. if (!IsOOPJIT()) // in-proc jit
  1723. {
  1724. number = Js::JavascriptNumber::NewCodeGenInstance(GetNumberAllocator(), (double)value, GetScriptContext());
  1725. }
  1726. else // OOP JIT
  1727. {
  1728. number = GetXProcNumberAllocator()->AllocateNumber(this, value);
  1729. }
  1730. #endif
  1731. return number;
  1732. }
  1733. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1734. void
  1735. Func::DumpFullFunctionName()
  1736. {
  1737. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1738. Output::Print(_u("Function %s (%s)"), GetJITFunctionBody()->GetDisplayName(), GetDebugNumberSet(debugStringBuffer));
  1739. }
  1740. #endif
  1741. void
  1742. Func::UpdateForInLoopMaxDepth(uint forInLoopMaxDepth)
  1743. {
  1744. Assert(this->IsTopFunc());
  1745. this->m_forInLoopMaxDepth = max(this->m_forInLoopMaxDepth, forInLoopMaxDepth);
  1746. }
  1747. int
  1748. Func::GetForInEnumeratorArrayOffset() const
  1749. {
  1750. Func const* topFunc = this->GetTopFunc();
  1751. Assert(this->m_forInLoopBaseDepth + this->GetJITFunctionBody()->GetForInLoopDepth() <= topFunc->m_forInLoopMaxDepth);
  1752. return topFunc->m_forInEnumeratorArrayOffset
  1753. + this->m_forInLoopBaseDepth * sizeof(Js::ForInObjectEnumerator);
  1754. }
  1755. #if DBG_DUMP
  1756. ///----------------------------------------------------------------------------
  1757. ///
  1758. /// Func::DumpHeader
  1759. ///
  1760. ///----------------------------------------------------------------------------
  1761. void
  1762. Func::DumpHeader()
  1763. {
  1764. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1765. DumpFullFunctionName();
  1766. Output::SkipToColumn(50);
  1767. Output::Print(_u("Instr Count:%d"), GetInstrCount());
  1768. if(m_codeSize > 0)
  1769. {
  1770. Output::Print(_u("\t\tSize:%d\n\n"), m_codeSize);
  1771. }
  1772. else
  1773. {
  1774. Output::Print(_u("\n\n"));
  1775. }
  1776. }
  1777. ///----------------------------------------------------------------------------
  1778. ///
  1779. /// Func::Dump
  1780. ///
  1781. ///----------------------------------------------------------------------------
  1782. void
  1783. Func::Dump(IRDumpFlags flags)
  1784. {
  1785. this->DumpHeader();
  1786. FOREACH_INSTR_IN_FUNC(instr, this)
  1787. {
  1788. instr->DumpGlobOptInstrString();
  1789. instr->Dump(flags);
  1790. }NEXT_INSTR_IN_FUNC;
  1791. Output::Flush();
  1792. }
  1793. void
  1794. Func::Dump()
  1795. {
  1796. this->Dump(IRDumpFlags_None);
  1797. }
  1798. #endif
  1799. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  1800. LPCSTR
  1801. Func::GetVtableName(INT_PTR address)
  1802. {
  1803. #if DBG
  1804. if (vtableMap == nullptr)
  1805. {
  1806. vtableMap = VirtualTableRegistry::CreateVtableHashMap(this->m_alloc);
  1807. };
  1808. LPCSTR name = vtableMap->Lookup(address, nullptr);
  1809. if (name)
  1810. {
  1811. if (strncmp(name, "class ", _countof("class ") - 1) == 0)
  1812. {
  1813. name += _countof("class ") - 1;
  1814. }
  1815. }
  1816. return name;
  1817. #else
  1818. return "";
  1819. #endif
  1820. }
  1821. #endif
  1822. #if DBG_DUMP | defined(VTUNE_PROFILING)
  1823. bool Func::DoRecordNativeMap() const
  1824. {
  1825. #if defined(VTUNE_PROFILING)
  1826. if (VTuneChakraProfile::isJitProfilingActive)
  1827. {
  1828. return true;
  1829. }
  1830. #endif
  1831. #if DBG_DUMP
  1832. return PHASE_DUMP(Js::EncoderPhase, this) && Js::Configuration::Global.flags.Verbose;
  1833. #else
  1834. return false;
  1835. #endif
  1836. }
  1837. #endif
  1838. #ifdef PERF_HINT
  1839. void WritePerfHint(PerfHints hint, Func* func, uint byteCodeOffset /*= Js::Constants::NoByteCodeOffset*/)
  1840. {
  1841. if (!func->IsOOPJIT())
  1842. {
  1843. WritePerfHint(hint, (Js::FunctionBody*)func->GetJITFunctionBody()->GetAddr(), byteCodeOffset);
  1844. }
  1845. }
  1846. #endif