Func.cpp 64 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Base/EtwTrace.h"
  7. #include "Base/ScriptContextProfiler.h"
  8. #ifdef VTUNE_PROFILING
  9. #include "Base/VTuneChakraProfile.h"
  10. #endif
  11. #include "Library/ForInObjectEnumerator.h"
  12. Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  13. ThreadContextInfo * threadContextInfo,
  14. ScriptContextInfo * scriptContextInfo,
  15. JITOutputIDL * outputData,
  16. Js::EntryPointInfo* epInfo,
  17. const FunctionJITRuntimeInfo *const runtimeInfo,
  18. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  19. #if !FLOATVAR
  20. CodeGenNumberAllocator * numberAllocator,
  21. #endif
  22. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc,
  23. uint postCallByteCodeOffset, Js::RegSlot returnValueRegSlot, const bool isInlinedConstructor,
  24. Js::ProfileId callSiteIdInParentFunc, bool isGetterSetter) :
  25. m_alloc(alloc),
  26. m_workItem(workItem),
  27. m_output(outputData),
  28. m_entryPointInfo(epInfo),
  29. m_threadContextInfo(threadContextInfo),
  30. m_scriptContextInfo(scriptContextInfo),
  31. m_runtimeInfo(runtimeInfo),
  32. m_polymorphicInlineCacheInfo(polymorphicInlineCacheInfo),
  33. m_codeGenAllocators(codeGenAllocators),
  34. m_inlineeId(0),
  35. pinnedTypeRefs(nullptr),
  36. singleTypeGuards(nullptr),
  37. equivalentTypeGuards(nullptr),
  38. propertyGuardsByPropertyId(nullptr),
  39. ctorCachesByPropertyId(nullptr),
  40. callSiteToArgumentsOffsetFixupMap(nullptr),
  41. indexedPropertyGuardCount(0),
  42. propertiesWrittenTo(nullptr),
  43. lazyBailoutProperties(alloc),
  44. anyPropertyMayBeWrittenTo(false),
  45. #ifdef PROFILE_EXEC
  46. m_codeGenProfiler(codeGenProfiler),
  47. #endif
  48. m_isBackgroundJIT(isBackgroundJIT),
  49. m_cloner(nullptr),
  50. m_cloneMap(nullptr),
  51. m_loopParamSym(nullptr),
  52. m_funcObjSym(nullptr),
  53. m_localClosureSym(nullptr),
  54. m_paramClosureSym(nullptr),
  55. m_localFrameDisplaySym(nullptr),
  56. m_inlineeFrameDisplaySyms(nullptr),
  57. m_bailoutReturnValueSym(nullptr),
  58. m_hasBailedOutSym(nullptr),
  59. m_inlineeFrameStartSym(nullptr),
  60. m_regsUsed(0),
  61. m_fg(nullptr),
  62. m_labelCount(0),
  63. m_argSlotsForFunctionsCalled(0),
  64. m_hasCalls(false),
  65. m_hasInlineArgsOpt(false),
  66. m_canDoInlineArgsOpt(true),
  67. m_doFastPaths(false),
  68. hasBailout(false),
  69. firstIRTemp(0),
  70. hasBailoutInEHRegion(false),
  71. hasInstrNumber(false),
  72. maintainByteCodeOffset(true),
  73. frameSize(0),
  74. topFunc(parentFunc ? parentFunc->topFunc : this),
  75. parentFunc(parentFunc),
  76. argObjSyms(nullptr),
  77. m_nonTempLocalVars(nullptr),
  78. hasAnyStackNestedFunc(false),
  79. hasMarkTempObjects(false),
  80. postCallByteCodeOffset(postCallByteCodeOffset),
  81. maxInlineeArgOutSize(0),
  82. returnValueRegSlot(returnValueRegSlot),
  83. firstActualStackOffset(-1),
  84. m_localVarSlotsOffset(Js::Constants::InvalidOffset),
  85. m_hasLocalVarChangedOffset(Js::Constants::InvalidOffset),
  86. actualCount((Js::ArgSlot) - 1),
  87. tryCatchNestingLevel(0),
  88. m_localStackHeight(0),
  89. tempSymDouble(nullptr),
  90. tempSymBool(nullptr),
  91. hasInlinee(false),
  92. thisOrParentInlinerHasArguments(false),
  93. hasStackArgs(false),
  94. hasImplicitParamLoad(false),
  95. hasThrow(false),
  96. hasNonSimpleParams(false),
  97. hasUnoptimizedArgumentsAccess(false),
  98. applyTargetInliningRemovedArgumentsAccess(false),
  99. hasImplicitCalls(false),
  100. hasTempObjectProducingInstr(false),
  101. isInlinedConstructor(isInlinedConstructor),
  102. #if !FLOATVAR
  103. numberAllocator(numberAllocator),
  104. #endif
  105. loopCount(0),
  106. callSiteIdInParentFunc(callSiteIdInParentFunc),
  107. isGetterSetter(isGetterSetter),
  108. frameInfo(nullptr),
  109. isTJLoopBody(false),
  110. m_nativeCodeDataSym(nullptr),
  111. isFlowGraphValid(false),
  112. legalizePostRegAlloc(false),
  113. #if DBG
  114. m_callSiteCount(0),
  115. #endif
  116. stackNestedFunc(false),
  117. stackClosure(false)
  118. #if defined(_M_ARM32_OR_ARM64)
  119. , m_ArgumentsOffset(0)
  120. , m_epilogLabel(nullptr)
  121. #endif
  122. , m_funcStartLabel(nullptr)
  123. , m_funcEndLabel(nullptr)
  124. #if DBG
  125. , hasCalledSetDoFastPaths(false)
  126. , allowRemoveBailOutArgInstr(false)
  127. , currentPhases(alloc)
  128. , isPostLower(false)
  129. , isPostRegAlloc(false)
  130. , isPostPeeps(false)
  131. , isPostLayout(false)
  132. , isPostFinalLower(false)
  133. , vtableMap(nullptr)
  134. #endif
  135. , m_yieldOffsetResumeLabelList(nullptr)
  136. , m_bailOutNoSaveLabel(nullptr)
  137. , constantAddressRegOpnd(alloc)
  138. , lastConstantAddressRegLoadInstr(nullptr)
  139. , m_totalJumpTableSizeInBytesForSwitchStatements(0)
  140. , slotArrayCheckTable(nullptr)
  141. , frameDisplayCheckTable(nullptr)
  142. , stackArgWithFormalsTracker(nullptr)
  143. , m_forInLoopBaseDepth(0)
  144. , m_forInEnumeratorArrayOffset(-1)
  145. , argInsCount(0)
  146. , m_globalObjTypeSpecFldInfoArray(nullptr)
  147. #if LOWER_SPLIT_INT64
  148. , m_int64SymPairMap(nullptr)
  149. #endif
  150. #ifdef RECYCLER_WRITE_BARRIER_JIT
  151. , m_lowerer(nullptr)
  152. #endif
  153. {
  154. Assert(this->IsInlined() == !!runtimeInfo);
  155. AssertOrFailFast(!HasProfileInfo() || GetReadOnlyProfileInfo()->GetLoopCount() == GetJITFunctionBody()->GetLoopCount());
  156. Js::RegSlot tmpResult;
  157. AssertOrFailFast(!UInt32Math::Add(GetJITFunctionBody()->GetConstCount(), GetJITFunctionBody()->GetVarCount(), &tmpResult));
  158. AssertOrFailFast(GetJITFunctionBody()->IsAsmJsMode() || GetJITFunctionBody()->GetFirstTmpReg() <= GetJITFunctionBody()->GetLocalsCount());
  159. AssertOrFailFast(!IsLoopBody() || m_workItem->GetLoopNumber() < GetJITFunctionBody()->GetLoopCount());
  160. AssertOrFailFast(CONFIG_FLAG(Prejit) || CONFIG_ISENABLED(Js::ForceNativeFlag) || GetJITFunctionBody()->GetByteCodeLength() < (uint)CONFIG_FLAG(MaxJITFunctionBytecodeByteLength));
  161. GetJITFunctionBody()->EnsureConsistentConstCount();
  162. if (this->IsTopFunc())
  163. {
  164. outputData->hasJittedStackClosure = false;
  165. outputData->localVarSlotsOffset = m_localVarSlotsOffset;
  166. outputData->localVarChangedOffset = m_hasLocalVarChangedOffset;
  167. }
  168. if (this->IsInlined())
  169. {
  170. m_inlineeId = ++(GetTopFunc()->m_inlineeId);
  171. }
  172. bool doStackNestedFunc = GetJITFunctionBody()->DoStackNestedFunc();
  173. bool doStackClosure = GetJITFunctionBody()->DoStackClosure() && !PHASE_OFF(Js::FrameDisplayFastPathPhase, this) && !PHASE_OFF(Js::StackClosurePhase, this);
  174. Assert(!doStackClosure || doStackNestedFunc);
  175. this->stackClosure = doStackClosure && this->IsTopFunc();
  176. if (this->stackClosure)
  177. {
  178. // TODO: calculate on runtime side?
  179. m_output.SetHasJITStackClosure();
  180. }
  181. if (m_workItem->Type() == JsFunctionType &&
  182. GetJITFunctionBody()->DoBackendArgumentsOptimization() &&
  183. (!GetJITFunctionBody()->HasTry() || this->DoOptimizeTry()))
  184. {
  185. // doBackendArgumentsOptimization bit is set when there is no eval inside a function
  186. // as determined by the bytecode generator.
  187. SetHasStackArgs(true);
  188. }
  189. if (doStackNestedFunc && GetJITFunctionBody()->GetNestedCount() != 0 &&
  190. (this->IsTopFunc() || this->GetTopFunc()->m_workItem->Type() != JsLoopBodyWorkItemType)) // make sure none of the functions inlined in a jitted loop body allocate nested functions on the stack
  191. {
  192. Assert(!(this->IsJitInDebugMode() && !GetJITFunctionBody()->IsLibraryCode()));
  193. stackNestedFunc = true;
  194. this->GetTopFunc()->hasAnyStackNestedFunc = true;
  195. }
  196. if (GetJITFunctionBody()->HasOrParentHasArguments() || (parentFunc && parentFunc->thisOrParentInlinerHasArguments))
  197. {
  198. thisOrParentInlinerHasArguments = true;
  199. }
  200. if (parentFunc == nullptr)
  201. {
  202. inlineDepth = 0;
  203. m_symTable = JitAnew(alloc, SymTable);
  204. m_symTable->Init(this);
  205. m_symTable->SetStartingID(static_cast<SymID>(workItem->GetJITFunctionBody()->GetLocalsCount() + 1));
  206. Assert(Js::Constants::NoByteCodeOffset == postCallByteCodeOffset);
  207. Assert(Js::Constants::NoRegister == returnValueRegSlot);
  208. #if defined(_M_IX86) || defined(_M_X64)
  209. if (HasArgumentSlot())
  210. {
  211. // Pre-allocate the single argument slot we'll reserve for the arguments object.
  212. // For ARM, the argument slot is not part of the local but part of the register saves
  213. m_localStackHeight = MachArgsSlotOffset;
  214. }
  215. #endif
  216. }
  217. else
  218. {
  219. inlineDepth = parentFunc->inlineDepth + 1;
  220. Assert(Js::Constants::NoByteCodeOffset != postCallByteCodeOffset);
  221. }
  222. this->constructorCacheCount = 0;
  223. this->constructorCaches = AnewArrayZ(this->m_alloc, JITTimeConstructorCache*, GetJITFunctionBody()->GetProfiledCallSiteCount());
  224. #if DBG_DUMP
  225. m_codeSize = -1;
  226. #endif
  227. #if defined(_M_X64)
  228. m_spillSize = -1;
  229. m_argsSize = -1;
  230. m_savedRegSize = -1;
  231. #endif
  232. if (this->IsJitInDebugMode())
  233. {
  234. m_nonTempLocalVars = Anew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  235. }
  236. if (GetJITFunctionBody()->IsCoroutine())
  237. {
  238. m_yieldOffsetResumeLabelList = YieldOffsetResumeLabelList::New(this->m_alloc);
  239. }
  240. if (this->IsTopFunc())
  241. {
  242. m_globalObjTypeSpecFldInfoArray = JitAnewArrayZ(this->m_alloc, ObjTypeSpecFldInfo*, GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  243. }
  244. for (uint i = 0; i < GetJITFunctionBody()->GetInlineCacheCount(); ++i)
  245. {
  246. ObjTypeSpecFldInfo * info = GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(i);
  247. if (info != nullptr)
  248. {
  249. AssertOrFailFast(info->GetObjTypeSpecFldId() < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  250. GetTopFunc()->m_globalObjTypeSpecFldInfoArray[info->GetObjTypeSpecFldId()] = info;
  251. }
  252. }
  253. canHoistConstantAddressLoad = !PHASE_OFF(Js::HoistConstAddrPhase, this);
  254. m_forInLoopMaxDepth = this->GetJITFunctionBody()->GetForInLoopDepth();
  255. }
  256. bool
  257. Func::IsLoopBodyInTry() const
  258. {
  259. return IsLoopBody() && m_workItem->GetLoopHeader()->isInTry;
  260. }
  261. bool
  262. Func::IsLoopBodyInTryFinally() const
  263. {
  264. return IsLoopBody() && m_workItem->GetLoopHeader()->isInTryFinally;
  265. }
  266. /* static */
  267. void
  268. Func::Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  269. ThreadContextInfo * threadContextInfo,
  270. ScriptContextInfo * scriptContextInfo,
  271. JITOutputIDL * outputData,
  272. Js::EntryPointInfo* epInfo, // for in-proc jit only
  273. const FunctionJITRuntimeInfo *const runtimeInfo,
  274. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  275. #if !FLOATVAR
  276. CodeGenNumberAllocator * numberAllocator,
  277. #endif
  278. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT)
  279. {
  280. bool rejit;
  281. do
  282. {
  283. Func func(alloc, workItem, threadContextInfo,
  284. scriptContextInfo, outputData, epInfo, runtimeInfo,
  285. polymorphicInlineCacheInfo, codeGenAllocators,
  286. #if !FLOATVAR
  287. numberAllocator,
  288. #endif
  289. codeGenProfiler, isBackgroundJIT);
  290. try
  291. {
  292. func.TryCodegen();
  293. rejit = false;
  294. }
  295. catch (Js::RejitException ex)
  296. {
  297. // The work item needs to be rejitted, likely due to some optimization that was too aggressive
  298. switch (ex.Reason())
  299. {
  300. case RejitReason::AggressiveIntTypeSpecDisabled:
  301. outputData->disableAggressiveIntTypeSpec = TRUE;
  302. break;
  303. case RejitReason::InlineApplyDisabled:
  304. workItem->GetJITFunctionBody()->DisableInlineApply();
  305. outputData->disableInlineApply = TRUE;
  306. break;
  307. case RejitReason::InlineSpreadDisabled:
  308. workItem->GetJITFunctionBody()->DisableInlineSpread();
  309. outputData->disableInlineSpread = TRUE;
  310. break;
  311. case RejitReason::DisableStackArgOpt:
  312. outputData->disableStackArgOpt = TRUE;
  313. break;
  314. case RejitReason::DisableSwitchOptExpectingInteger:
  315. case RejitReason::DisableSwitchOptExpectingString:
  316. outputData->disableSwitchOpt = TRUE;
  317. break;
  318. case RejitReason::ArrayCheckHoistDisabled:
  319. case RejitReason::ArrayAccessHelperCallEliminationDisabled:
  320. outputData->disableArrayCheckHoist = TRUE;
  321. break;
  322. case RejitReason::TrackIntOverflowDisabled:
  323. outputData->disableTrackCompoundedIntOverflow = TRUE;
  324. break;
  325. case RejitReason::MemOpDisabled:
  326. outputData->disableMemOp = TRUE;
  327. break;
  328. case RejitReason::FailedEquivalentTypeCheck:
  329. // No disable flag. The thrower of the re-jit exception must guarantee that objtypespec is disabled where appropriate.
  330. break;
  331. default:
  332. Assume(UNREACHED);
  333. }
  334. if (PHASE_TRACE(Js::ReJITPhase, &func))
  335. {
  336. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  337. Output::Print(
  338. _u("Rejit (compile-time): function: %s (%s) reason: %S\n"),
  339. workItem->GetJITFunctionBody()->GetDisplayName(),
  340. workItem->GetJITTimeInfo()->GetDebugNumberSet(debugStringBuffer),
  341. ex.ReasonName());
  342. }
  343. rejit = true;
  344. }
  345. // Either the entry point has a reference to the number now, or we failed to code gen and we
  346. // don't need to numbers, we can flush the completed page now.
  347. //
  348. // If the number allocator is NULL then we are shutting down the thread context and so too the
  349. // code generator. The number allocator must be freed before the recycler (and thus before the
  350. // code generator) so we can't and don't need to flush it.
  351. // TODO: OOP JIT, allocator cleanup
  352. } while (rejit);
  353. }
  354. ///----------------------------------------------------------------------------
  355. ///
  356. /// Func::TryCodegen
  357. ///
  358. /// Attempt to Codegen this function.
  359. ///
  360. ///----------------------------------------------------------------------------
  361. void
  362. Func::TryCodegen()
  363. {
  364. Assert(!IsJitInDebugMode() || !GetJITFunctionBody()->HasTry());
  365. BEGIN_CODEGEN_PHASE(this, Js::BackEndPhase);
  366. {
  367. // IRBuilder
  368. BEGIN_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  369. #ifdef ASMJS_PLAT
  370. if (GetJITFunctionBody()->IsAsmJsMode())
  371. {
  372. IRBuilderAsmJs asmIrBuilder(this);
  373. asmIrBuilder.Build();
  374. }
  375. else
  376. #endif
  377. {
  378. IRBuilder irBuilder(this);
  379. irBuilder.Build();
  380. }
  381. END_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  382. #ifdef IR_VIEWER
  383. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::IRBuilderPhase);
  384. #endif /* IR_VIEWER */
  385. BEGIN_CODEGEN_PHASE(this, Js::InlinePhase);
  386. InliningHeuristics heuristics(GetWorkItem()->GetJITTimeInfo(), this->IsLoopBody());
  387. Inline inliner(this, heuristics);
  388. inliner.Optimize();
  389. END_CODEGEN_PHASE(this, Js::InlinePhase);
  390. ThrowIfScriptClosed();
  391. // FlowGraph
  392. {
  393. // Scope for FlowGraph arena
  394. NoRecoverMemoryJitArenaAllocator fgAlloc(_u("BE-FlowGraph"), m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  395. BEGIN_CODEGEN_PHASE(this, Js::FGBuildPhase);
  396. this->m_fg = FlowGraph::New(this, &fgAlloc);
  397. this->m_fg->Build();
  398. END_CODEGEN_PHASE(this, Js::FGBuildPhase);
  399. // Global Optimization and Type Specialization
  400. BEGIN_CODEGEN_PHASE(this, Js::GlobOptPhase);
  401. GlobOpt globOpt(this);
  402. globOpt.Optimize();
  403. END_CODEGEN_PHASE(this, Js::GlobOptPhase);
  404. // Delete flowGraph now
  405. this->m_fg->Destroy();
  406. this->m_fg = nullptr;
  407. }
  408. #ifdef IR_VIEWER
  409. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::GlobOptPhase);
  410. #endif /* IR_VIEWER */
  411. ThrowIfScriptClosed();
  412. // Lowering
  413. Lowerer lowerer(this);
  414. BEGIN_CODEGEN_PHASE(this, Js::LowererPhase);
  415. lowerer.Lower();
  416. END_CODEGEN_PHASE(this, Js::LowererPhase);
  417. #ifdef IR_VIEWER
  418. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::LowererPhase);
  419. #endif /* IR_VIEWER */
  420. // Encode constants
  421. Security security(this);
  422. BEGIN_CODEGEN_PHASE(this, Js::EncodeConstantsPhase)
  423. security.EncodeLargeConstants();
  424. END_CODEGEN_PHASE(this, Js::EncodeConstantsPhase);
  425. if (GetJITFunctionBody()->DoInterruptProbe())
  426. {
  427. BEGIN_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  428. lowerer.DoInterruptProbes();
  429. END_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  430. }
  431. // Register Allocation
  432. BEGIN_CODEGEN_PHASE(this, Js::RegAllocPhase);
  433. LinearScan linearScan(this);
  434. linearScan.RegAlloc();
  435. END_CODEGEN_PHASE(this, Js::RegAllocPhase);
  436. #ifdef IR_VIEWER
  437. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::RegAllocPhase);
  438. #endif /* IR_VIEWER */
  439. ThrowIfScriptClosed();
  440. // Peephole optimizations
  441. BEGIN_CODEGEN_PHASE(this, Js::PeepsPhase);
  442. Peeps peeps(this);
  443. peeps.PeepFunc();
  444. END_CODEGEN_PHASE(this, Js::PeepsPhase);
  445. // Layout
  446. BEGIN_CODEGEN_PHASE(this, Js::LayoutPhase);
  447. SimpleLayout layout(this);
  448. layout.Layout();
  449. END_CODEGEN_PHASE(this, Js::LayoutPhase);
  450. if (this->HasTry() && this->hasBailoutInEHRegion)
  451. {
  452. BEGIN_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  453. lowerer.EHBailoutPatchUp();
  454. END_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  455. }
  456. // Insert NOPs (moving this before prolog/epilog for AMD64 and possibly ARM).
  457. BEGIN_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  458. security.InsertNOPs();
  459. END_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  460. // Prolog/Epilog
  461. BEGIN_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  462. if (GetJITFunctionBody()->IsAsmJsMode())
  463. {
  464. lowerer.LowerPrologEpilogAsmJs();
  465. }
  466. else
  467. {
  468. lowerer.LowerPrologEpilog();
  469. }
  470. END_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  471. BEGIN_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  472. lowerer.FinalLower();
  473. END_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  474. // Encoder
  475. BEGIN_CODEGEN_PHASE(this, Js::EncoderPhase);
  476. Encoder encoder(this);
  477. encoder.Encode();
  478. END_CODEGEN_PHASE_NO_DUMP(this, Js::EncoderPhase);
  479. #ifdef IR_VIEWER
  480. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::EncoderPhase);
  481. #endif /* IR_VIEWER */
  482. }
  483. #if DBG_DUMP
  484. if (Js::Configuration::Global.flags.IsEnabled(Js::AsmDumpModeFlag))
  485. {
  486. FILE * oldFile = 0;
  487. FILE * asmFile = GetScriptContext()->GetNativeCodeGenerator()->asmFile;
  488. if (asmFile)
  489. {
  490. oldFile = Output::SetFile(asmFile);
  491. }
  492. this->Dump(IRDumpFlags_AsmDumpMode);
  493. Output::Flush();
  494. if (asmFile)
  495. {
  496. FILE *openedFile = Output::SetFile(oldFile);
  497. Assert(openedFile == asmFile);
  498. }
  499. }
  500. #endif
  501. if (this->IsOOPJIT())
  502. {
  503. BEGIN_CODEGEN_PHASE(this, Js::NativeCodeDataPhase);
  504. auto dataAllocator = this->GetNativeCodeDataAllocator();
  505. if (dataAllocator->allocCount > 0)
  506. {
  507. NativeCodeData::DataChunk *chunk = (NativeCodeData::DataChunk*)dataAllocator->chunkList;
  508. NativeCodeData::DataChunk *next1 = chunk;
  509. while (next1)
  510. {
  511. if (next1->fixupFunc)
  512. {
  513. next1->fixupFunc(next1->data, chunk);
  514. }
  515. #if DBG
  516. if (CONFIG_FLAG(OOPJITFixupValidate))
  517. {
  518. // Scan memory to see if there's missing pointer needs to be fixed up
  519. // This can hit false positive if some data field happens to have value
  520. // falls into the NativeCodeData memory range.
  521. NativeCodeData::DataChunk *next2 = chunk;
  522. while (next2)
  523. {
  524. for (unsigned int i = 0; i < next1->len / sizeof(void*); i++)
  525. {
  526. if (((void**)next1->data)[i] == (void*)next2->data)
  527. {
  528. NativeCodeData::VerifyExistFixupEntry((void*)next2->data, &((void**)next1->data)[i], next1->data);
  529. }
  530. }
  531. next2 = next2->next;
  532. }
  533. }
  534. #endif
  535. next1 = next1->next;
  536. }
  537. JITOutputIDL* jitOutputData = m_output.GetOutputData();
  538. size_t allocSize = offsetof(NativeDataFixupTable, fixupRecords) + sizeof(NativeDataFixupRecord)* (dataAllocator->allocCount);
  539. jitOutputData->nativeDataFixupTable = (NativeDataFixupTable*)midl_user_allocate(allocSize);
  540. if (!jitOutputData->nativeDataFixupTable)
  541. {
  542. Js::Throw::OutOfMemory();
  543. }
  544. __analysis_assume(jitOutputData->nativeDataFixupTable);
  545. jitOutputData->nativeDataFixupTable->count = dataAllocator->allocCount;
  546. jitOutputData->buffer = (NativeDataBuffer*)midl_user_allocate(offsetof(NativeDataBuffer, data) + dataAllocator->totalSize);
  547. if (!jitOutputData->buffer)
  548. {
  549. Js::Throw::OutOfMemory();
  550. }
  551. __analysis_assume(jitOutputData->buffer);
  552. jitOutputData->buffer->len = dataAllocator->totalSize;
  553. unsigned int len = 0;
  554. unsigned int count = 0;
  555. next1 = chunk;
  556. while (next1)
  557. {
  558. memcpy(jitOutputData->buffer->data + len, next1->data, next1->len);
  559. len += next1->len;
  560. jitOutputData->nativeDataFixupTable->fixupRecords[count].index = next1->allocIndex;
  561. jitOutputData->nativeDataFixupTable->fixupRecords[count].length = next1->len;
  562. jitOutputData->nativeDataFixupTable->fixupRecords[count].startOffset = next1->offset;
  563. jitOutputData->nativeDataFixupTable->fixupRecords[count].updateList = next1->fixupList;
  564. count++;
  565. next1 = next1->next;
  566. }
  567. #if DBG
  568. if (PHASE_TRACE1(Js::NativeCodeDataPhase))
  569. {
  570. Output::Print(_u("NativeCodeData Server Buffer: %p, len: %x, chunk head: %p\n"), jitOutputData->buffer->data, jitOutputData->buffer->len, chunk);
  571. }
  572. #endif
  573. }
  574. END_CODEGEN_PHASE(this, Js::NativeCodeDataPhase);
  575. }
  576. END_CODEGEN_PHASE(this, Js::BackEndPhase);
  577. }
  578. ///----------------------------------------------------------------------------
  579. /// Func::StackAllocate
  580. /// Allocate stack space of given size.
  581. ///----------------------------------------------------------------------------
  582. int32
  583. Func::StackAllocate(int size)
  584. {
  585. Assert(this->IsTopFunc());
  586. int32 offset;
  587. #ifdef MD_GROW_LOCALS_AREA_UP
  588. // Locals have positive offsets and are allocated from bottom to top.
  589. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  590. offset = m_localStackHeight;
  591. m_localStackHeight += size;
  592. #else
  593. // Locals have negative offsets and are allocated from top to bottom.
  594. m_localStackHeight += size;
  595. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  596. offset = -m_localStackHeight;
  597. #endif
  598. return offset;
  599. }
  600. ///----------------------------------------------------------------------------
  601. ///
  602. /// Func::StackAllocate
  603. ///
  604. /// Allocate stack space for this symbol.
  605. ///
  606. ///----------------------------------------------------------------------------
  607. int32
  608. Func::StackAllocate(StackSym *stackSym, int size)
  609. {
  610. Assert(size > 0);
  611. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym() || stackSym->IsAllocated())
  612. {
  613. return stackSym->m_offset;
  614. }
  615. Assert(stackSym->m_offset == 0);
  616. stackSym->m_allocated = true;
  617. stackSym->m_offset = StackAllocate(size);
  618. return stackSym->m_offset;
  619. }
  620. void
  621. Func::SetArgOffset(StackSym *stackSym, int32 offset)
  622. {
  623. AssertMsg(offset >= 0, "Why is the offset, negative?");
  624. stackSym->m_offset = offset;
  625. stackSym->m_allocated = true;
  626. }
  627. ///
  628. /// Ensures that local var slots are created, if the function has locals.
  629. /// Allocate stack space for locals used for debugging
  630. /// (for local non-temp vars we write-through memory so that locals inspection can make use of that.).
  631. // On stack, after local slots we allocate space for metadata (in particular, whether any the locals was changed in debugger).
  632. ///
  633. void
  634. Func::EnsureLocalVarSlots()
  635. {
  636. Assert(IsJitInDebugMode());
  637. if (!this->HasLocalVarSlotCreated())
  638. {
  639. uint32 localSlotCount = GetJITFunctionBody()->GetNonTempLocalVarCount();
  640. if (localSlotCount && m_localVarSlotsOffset == Js::Constants::InvalidOffset)
  641. {
  642. // Allocate the slots.
  643. int32 size = localSlotCount * GetDiagLocalSlotSize();
  644. m_localVarSlotsOffset = StackAllocate(size);
  645. m_hasLocalVarChangedOffset = StackAllocate(max(1, MachStackAlignment)); // Can't alloc less than StackAlignment bytes.
  646. Assert(m_workItem->Type() == JsFunctionType);
  647. m_output.SetVarSlotsOffset(AdjustOffsetValue(m_localVarSlotsOffset));
  648. m_output.SetVarChangedOffset(AdjustOffsetValue(m_hasLocalVarChangedOffset));
  649. }
  650. }
  651. }
  652. void Func::SetFirstArgOffset(IR::Instr* inlineeStart)
  653. {
  654. Assert(inlineeStart->m_func == this);
  655. Assert(!IsTopFunc());
  656. int32 lastOffset;
  657. IR::Instr* arg = inlineeStart->GetNextArg();
  658. if (arg)
  659. {
  660. const auto lastArgOutStackSym = arg->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  661. lastOffset = lastArgOutStackSym->m_offset;
  662. Assert(lastArgOutStackSym->m_isSingleDef);
  663. const auto secondLastArgOutOpnd = lastArgOutStackSym->m_instrDef->GetSrc2();
  664. if (secondLastArgOutOpnd->IsSymOpnd())
  665. {
  666. const auto secondLastOffset = secondLastArgOutOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_offset;
  667. if (secondLastOffset > lastOffset)
  668. {
  669. lastOffset = secondLastOffset;
  670. }
  671. }
  672. lastOffset += MachPtr;
  673. }
  674. else
  675. {
  676. Assert(this->GetTopFunc()->GetJITFunctionBody()->IsAsmJsMode());
  677. lastOffset = MachPtr;
  678. }
  679. int32 firstActualStackOffset = lastOffset - ((this->actualCount + Js::Constants::InlineeMetaArgCount) * MachPtr);
  680. Assert((this->firstActualStackOffset == -1) || (this->firstActualStackOffset == firstActualStackOffset));
  681. this->firstActualStackOffset = firstActualStackOffset;
  682. }
  683. int32
  684. Func::GetLocalVarSlotOffset(int32 slotId)
  685. {
  686. this->EnsureLocalVarSlots();
  687. Assert(m_localVarSlotsOffset != Js::Constants::InvalidOffset);
  688. int32 slotOffset = slotId * GetDiagLocalSlotSize();
  689. return m_localVarSlotsOffset + slotOffset;
  690. }
  691. void Func::OnAddSym(Sym* sym)
  692. {
  693. Assert(sym);
  694. if (this->IsJitInDebugMode() && this->IsNonTempLocalVar(sym->m_id))
  695. {
  696. Assert(m_nonTempLocalVars);
  697. m_nonTempLocalVars->Set(sym->m_id);
  698. }
  699. }
  700. ///
  701. /// Returns offset of the flag (1 byte) whether any local was changed (in debugger).
  702. /// If the function does not have any locals, returns -1.
  703. ///
  704. int32
  705. Func::GetHasLocalVarChangedOffset()
  706. {
  707. this->EnsureLocalVarSlots();
  708. return m_hasLocalVarChangedOffset;
  709. }
  710. bool
  711. Func::IsJitInDebugMode() const
  712. {
  713. return m_workItem->IsJitInDebugMode();
  714. }
  715. bool
  716. Func::IsNonTempLocalVar(uint32 slotIndex)
  717. {
  718. return GetJITFunctionBody()->IsNonTempLocalVar(slotIndex);
  719. }
  720. int32
  721. Func::AdjustOffsetValue(int32 offset)
  722. {
  723. #ifdef MD_GROW_LOCALS_AREA_UP
  724. return -(offset + BailOutInfo::StackSymBias);
  725. #else
  726. // Stack offset are negative, includes the PUSH EBP and return address
  727. return offset - (2 * MachPtr);
  728. #endif
  729. }
  730. #ifdef MD_GROW_LOCALS_AREA_UP
  731. // Note: this is called during jit-compile when we finalize bail out record.
  732. void
  733. Func::AjustLocalVarSlotOffset()
  734. {
  735. if (GetJITFunctionBody()->GetNonTempLocalVarCount())
  736. {
  737. // Turn positive SP-relative base locals offset into negative frame-pointer-relative offset
  738. // This is changing value for restoring the locals when read due to locals inspection.
  739. int localsOffset = m_localVarSlotsOffset - (m_localStackHeight + m_ArgumentsOffset);
  740. int valueChangeOffset = m_hasLocalVarChangedOffset - (m_localStackHeight + m_ArgumentsOffset);
  741. m_output.SetVarSlotsOffset(localsOffset);
  742. m_output.SetVarChangedOffset(valueChangeOffset);
  743. }
  744. }
  745. #endif
  746. bool
  747. Func::DoGlobOptsForGeneratorFunc() const
  748. {
  749. // Disable GlobOpt optimizations for generators initially. Will visit and enable each one by one.
  750. return !GetJITFunctionBody()->IsCoroutine();
  751. }
  752. bool
  753. Func::DoSimpleJitDynamicProfile() const
  754. {
  755. return IsSimpleJit() && !PHASE_OFF(Js::SimpleJitDynamicProfilePhase, GetTopFunc()) && !CONFIG_FLAG(NewSimpleJit);
  756. }
  757. void
  758. Func::SetDoFastPaths()
  759. {
  760. // Make sure we only call this once!
  761. Assert(!this->hasCalledSetDoFastPaths);
  762. bool doFastPaths = false;
  763. if(!PHASE_OFF(Js::FastPathPhase, this) && (!IsSimpleJit() || CONFIG_FLAG(NewSimpleJit)))
  764. {
  765. doFastPaths = true;
  766. }
  767. this->m_doFastPaths = doFastPaths;
  768. #ifdef DBG
  769. this->hasCalledSetDoFastPaths = true;
  770. #endif
  771. }
  772. #if LOWER_SPLIT_INT64
  773. Int64RegPair Func::FindOrCreateInt64Pair(IR::Opnd* opnd)
  774. {
  775. if (!this->IsTopFunc())
  776. {
  777. return GetTopFunc()->FindOrCreateInt64Pair(opnd);
  778. }
  779. AssertMsg(currentPhases.Top() == Js::LowererPhase, "New Int64 sym map is only allowed during lower");
  780. Int64RegPair pair;
  781. IRType pairType = opnd->GetType();
  782. if (opnd->IsInt64())
  783. {
  784. pairType = IRType_IsSignedInt(pairType) ? TyInt32 : TyUint32;
  785. }
  786. if (opnd->IsIndirOpnd())
  787. {
  788. IR::IndirOpnd* indir = opnd->AsIndirOpnd();
  789. indir->SetType(pairType);
  790. pair.low = indir;
  791. pair.high = indir->Copy(this)->AsIndirOpnd();
  792. pair.high->AsIndirOpnd()->SetOffset(indir->GetOffset() + 4);
  793. return pair;
  794. }
  795. // Only indir opnd can have a type other than int64
  796. Assert(opnd->IsInt64());
  797. if (opnd->IsImmediateOpnd())
  798. {
  799. int64 value = opnd->GetImmediateValue(this);
  800. pair.low = IR::IntConstOpnd::New((int32)value, pairType, this);
  801. pair.high = IR::IntConstOpnd::New((int32)(value >> 32), pairType, this);
  802. return pair;
  803. }
  804. Int64SymPair symPair;
  805. if (!m_int64SymPairMap)
  806. {
  807. m_int64SymPairMap = Anew(m_alloc, Int64SymPairMap, m_alloc);
  808. }
  809. StackSym* stackSym = opnd->GetStackSym();
  810. AssertOrFailFastMsg(stackSym, "Invalid int64 operand type");
  811. SymID symId = stackSym->m_id;
  812. if (!m_int64SymPairMap->TryGetValue(symId, &symPair))
  813. {
  814. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym())
  815. {
  816. const bool isArg = stackSym->IsArgSlotSym();
  817. if (isArg)
  818. {
  819. Js::ArgSlot slotNumber = stackSym->GetArgSlotNum();
  820. symPair.low = StackSym::NewArgSlotSym(slotNumber, this, pairType);
  821. symPair.high = StackSym::NewArgSlotSym(slotNumber, this, pairType);
  822. }
  823. else
  824. {
  825. Js::ArgSlot slotNumber = stackSym->GetParamSlotNum();
  826. symPair.low = StackSym::NewParamSlotSym(slotNumber, this, pairType);
  827. symPair.high = StackSym::NewParamSlotSym(slotNumber + 1, this, pairType);
  828. }
  829. symPair.low->m_allocated = true;
  830. symPair.low->m_offset = stackSym->m_offset;
  831. symPair.high->m_allocated = true;
  832. symPair.high->m_offset = stackSym->m_offset + 4;
  833. }
  834. else
  835. {
  836. symPair.low = StackSym::New(pairType, this);
  837. symPair.high = StackSym::New(pairType, this);
  838. }
  839. m_int64SymPairMap->Add(symId, symPair);
  840. }
  841. if (opnd->IsSymOpnd())
  842. {
  843. pair.low = IR::SymOpnd::New(symPair.low, opnd->AsSymOpnd()->m_offset, pairType, this);
  844. pair.high = IR::SymOpnd::New(symPair.high, opnd->AsSymOpnd()->m_offset, pairType, this);
  845. }
  846. else
  847. {
  848. pair.low = IR::RegOpnd::New(symPair.low, pairType, this);
  849. pair.high = IR::RegOpnd::New(symPair.high, pairType, this);
  850. }
  851. return pair;
  852. }
  853. void Func::Int64SplitExtendLoopLifetime(Loop* loop)
  854. {
  855. if (!this->IsTopFunc())
  856. {
  857. GetTopFunc()->Int64SplitExtendLoopLifetime(loop);
  858. return;
  859. }
  860. if (m_int64SymPairMap)
  861. {
  862. BVSparse<JitArenaAllocator> *liveOnBackEdgeSyms = loop->regAlloc.liveOnBackEdgeSyms;
  863. FOREACH_BITSET_IN_SPARSEBV(symId, liveOnBackEdgeSyms)
  864. {
  865. Int64SymPair pair;
  866. if (m_int64SymPairMap->TryGetValue(symId, &pair))
  867. {
  868. // If we have replaced any sym that was live on the back edge for 2 other syms
  869. // these 2 syms needs to be live on back edge as well.
  870. liveOnBackEdgeSyms->Set(pair.low->m_id);
  871. liveOnBackEdgeSyms->Set(pair.high->m_id);
  872. }
  873. } NEXT_BITSET_IN_SPARSEBV;
  874. }
  875. }
  876. #endif
  877. #if defined(_M_ARM32_OR_ARM64)
  878. RegNum
  879. Func::GetLocalsPointer() const
  880. {
  881. #ifdef DBG
  882. if (Js::Configuration::Global.flags.IsEnabled(Js::ForceLocalsPtrFlag))
  883. {
  884. return ALT_LOCALS_PTR;
  885. }
  886. #endif
  887. if (GetJITFunctionBody()->HasTry())
  888. {
  889. return ALT_LOCALS_PTR;
  890. }
  891. return RegSP;
  892. }
  893. #endif
  894. void Func::AddSlotArrayCheck(IR::SymOpnd *fieldOpnd)
  895. {
  896. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  897. {
  898. return;
  899. }
  900. Assert(IsTopFunc());
  901. if (this->slotArrayCheckTable == nullptr)
  902. {
  903. this->slotArrayCheckTable = SlotArrayCheckTable::New(m_alloc, 4);
  904. }
  905. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  906. uint32 slot = propertySym->m_propertyId;
  907. uint32 *pSlotId = this->slotArrayCheckTable->FindOrInsert(slot, propertySym->m_stackSym->m_id);
  908. if (pSlotId && (*pSlotId == (uint32)-1 || *pSlotId < slot))
  909. {
  910. *pSlotId = propertySym->m_propertyId;
  911. }
  912. }
  913. void Func::AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId)
  914. {
  915. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  916. {
  917. return;
  918. }
  919. Assert(IsTopFunc());
  920. if (this->frameDisplayCheckTable == nullptr)
  921. {
  922. this->frameDisplayCheckTable = FrameDisplayCheckTable::New(m_alloc, 4);
  923. }
  924. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  925. FrameDisplayCheckRecord **record = this->frameDisplayCheckTable->FindOrInsertNew(propertySym->m_stackSym->m_id);
  926. if (*record == nullptr)
  927. {
  928. *record = JitAnew(m_alloc, FrameDisplayCheckRecord);
  929. }
  930. uint32 frameDisplaySlot = propertySym->m_propertyId;
  931. if ((*record)->table == nullptr || (*record)->slotId < frameDisplaySlot)
  932. {
  933. (*record)->slotId = frameDisplaySlot;
  934. }
  935. if (slotId != (uint32)-1)
  936. {
  937. if ((*record)->table == nullptr)
  938. {
  939. (*record)->table = SlotArrayCheckTable::New(m_alloc, 4);
  940. }
  941. uint32 *pSlotId = (*record)->table->FindOrInsert(slotId, frameDisplaySlot);
  942. if (pSlotId && *pSlotId < slotId)
  943. {
  944. *pSlotId = slotId;
  945. }
  946. }
  947. }
  948. void Func::InitLocalClosureSyms()
  949. {
  950. Assert(this->m_localClosureSym == nullptr);
  951. // Allocate stack space for closure pointers. Do this only if we're jitting for stack closures, and
  952. // tell bailout that these are not byte code symbols so that we don't try to encode them in the bailout record,
  953. // as they don't have normal lifetimes.
  954. Js::RegSlot regSlot = GetJITFunctionBody()->GetLocalClosureReg();
  955. if (regSlot != Js::Constants::NoRegister)
  956. {
  957. this->m_localClosureSym =
  958. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  959. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  960. this);
  961. }
  962. regSlot = this->GetJITFunctionBody()->GetParamClosureReg();
  963. if (regSlot != Js::Constants::NoRegister)
  964. {
  965. Assert(this->GetParamClosureSym() == nullptr && !this->GetJITFunctionBody()->IsParamAndBodyScopeMerged());
  966. this->m_paramClosureSym =
  967. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  968. this->DoStackFrameDisplay() ? (Js::RegSlot) - 1 : regSlot,
  969. this);
  970. }
  971. regSlot = GetJITFunctionBody()->GetLocalFrameDisplayReg();
  972. if (regSlot != Js::Constants::NoRegister)
  973. {
  974. this->m_localFrameDisplaySym =
  975. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  976. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  977. this);
  978. }
  979. }
  980. void
  981. Func::AddInlineeFrameDisplaySym(StackSym *inlineeFrameDisplaySym)
  982. {
  983. if (m_inlineeFrameDisplaySyms == nullptr)
  984. {
  985. m_inlineeFrameDisplaySyms = JitAnew(this->m_alloc, SList<StackSym*>, this->m_alloc);
  986. }
  987. m_inlineeFrameDisplaySyms->Prepend(inlineeFrameDisplaySym);
  988. }
  989. bool
  990. Func::IsTrackCompoundedIntOverflowDisabled() const
  991. {
  992. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsTrackCompoundedIntOverflowDisabled()) || m_output.IsTrackCompoundedIntOverflowDisabled();
  993. }
  994. bool
  995. Func::IsMemOpDisabled() const
  996. {
  997. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsMemOpDisabled()) || m_output.IsMemOpDisabled();
  998. }
  999. bool
  1000. Func::IsArrayCheckHoistDisabled() const
  1001. {
  1002. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsArrayCheckHoistDisabled(IsLoopBody())) || m_output.IsArrayCheckHoistDisabled();
  1003. }
  1004. bool
  1005. Func::IsStackArgOptDisabled() const
  1006. {
  1007. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsStackArgOptDisabled()) || m_output.IsStackArgOptDisabled();
  1008. }
  1009. bool
  1010. Func::IsSwitchOptDisabled() const
  1011. {
  1012. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsSwitchOptDisabled()) || m_output.IsSwitchOptDisabled();
  1013. }
  1014. bool
  1015. Func::IsAggressiveIntTypeSpecDisabled() const
  1016. {
  1017. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsAggressiveIntTypeSpecDisabled(IsLoopBody())) || m_output.IsAggressiveIntTypeSpecDisabled();
  1018. }
  1019. bool Func::CanAllocInPreReservedHeapPageSegment ()
  1020. {
  1021. #ifdef _CONTROL_FLOW_GUARD
  1022. return PHASE_FORCE1(Js::PreReservedHeapAllocPhase) || (!PHASE_OFF1(Js::PreReservedHeapAllocPhase) &&
  1023. !IsJitInDebugMode()
  1024. #if _M_IX86
  1025. && m_workItem->GetJitMode() == ExecutionMode::FullJit
  1026. #if ENABLE_OOP_NATIVE_CODEGEN
  1027. && (JITManager::GetJITManager()->IsJITServer()
  1028. ? GetOOPCodeGenAllocators()->canCreatePreReservedSegment
  1029. : GetInProcCodeGenAllocators()->canCreatePreReservedSegment)
  1030. #else
  1031. && GetInProcCodeGenAllocators()->canCreatePreReservedSegment
  1032. #endif
  1033. );
  1034. #elif TARGET_64
  1035. && true);
  1036. #else
  1037. && false); //Not yet implemented for architectures other than x86 and amd64.
  1038. #endif //_M_ARCH
  1039. #else
  1040. return false;
  1041. #endif//_CONTROL_FLOW_GUARD
  1042. }
  1043. ///----------------------------------------------------------------------------
  1044. ///
  1045. /// Func::GetInstrCount
  1046. ///
  1047. /// Returns the number of instrs.
  1048. /// Note: It counts all instrs for now, including labels, etc.
  1049. ///
  1050. ///----------------------------------------------------------------------------
  1051. uint32
  1052. Func::GetInstrCount()
  1053. {
  1054. uint instrCount = 0;
  1055. FOREACH_INSTR_IN_FUNC(instr, this)
  1056. {
  1057. instrCount++;
  1058. }NEXT_INSTR_IN_FUNC;
  1059. return instrCount;
  1060. }
  1061. ///----------------------------------------------------------------------------
  1062. ///
  1063. /// Func::NumberInstrs
  1064. ///
  1065. /// Number each instruction in order of appearance in the function.
  1066. ///
  1067. ///----------------------------------------------------------------------------
  1068. void
  1069. Func::NumberInstrs()
  1070. {
  1071. #if DBG_DUMP
  1072. Assert(this->IsTopFunc());
  1073. Assert(!this->hasInstrNumber);
  1074. this->hasInstrNumber = true;
  1075. #endif
  1076. uint instrCount = 1;
  1077. FOREACH_INSTR_IN_FUNC(instr, this)
  1078. {
  1079. instr->SetNumber(instrCount++);
  1080. }
  1081. NEXT_INSTR_IN_FUNC;
  1082. }
  1083. #if DBG
  1084. BVSparse<JitArenaAllocator>* Func::GetByteCodeOffsetUses(uint offset) const
  1085. {
  1086. InstrByteCodeRegisterUses uses;
  1087. if (byteCodeRegisterUses->TryGetValue(offset, &uses))
  1088. {
  1089. return uses.bv;
  1090. }
  1091. return nullptr;
  1092. }
  1093. ///----------------------------------------------------------------------------
  1094. ///
  1095. /// Func::IsInPhase
  1096. ///
  1097. /// Determines whether the function is currently in the provided phase
  1098. ///
  1099. ///----------------------------------------------------------------------------
  1100. bool
  1101. Func::IsInPhase(Js::Phase tag)
  1102. {
  1103. return this->GetTopFunc()->currentPhases.Contains(tag);
  1104. }
  1105. #endif
  1106. ///----------------------------------------------------------------------------
  1107. ///
  1108. /// Func::BeginPhase
  1109. ///
  1110. /// Takes care of the profiler
  1111. ///
  1112. ///----------------------------------------------------------------------------
  1113. void
  1114. Func::BeginPhase(Js::Phase tag)
  1115. {
  1116. #ifdef DBG
  1117. this->GetTopFunc()->currentPhases.Push(tag);
  1118. if (PHASE_DEBUGBREAK_ON_PHASE_BEGIN(tag, this))
  1119. {
  1120. __debugbreak();
  1121. }
  1122. #endif
  1123. #ifdef PROFILE_EXEC
  1124. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  1125. "Profiler tag is supplied but the profiler pointer is NULL");
  1126. if (this->m_codeGenProfiler)
  1127. {
  1128. this->m_codeGenProfiler->ProfileBegin(tag);
  1129. }
  1130. #endif
  1131. }
  1132. ///----------------------------------------------------------------------------
  1133. ///
  1134. /// Func::EndPhase
  1135. ///
  1136. /// Takes care of the profiler and dumper
  1137. ///
  1138. ///----------------------------------------------------------------------------
  1139. void
  1140. Func::EndProfiler(Js::Phase tag)
  1141. {
  1142. #ifdef DBG
  1143. Assert(this->GetTopFunc()->currentPhases.Count() > 0);
  1144. Js::Phase popped = this->GetTopFunc()->currentPhases.Pop();
  1145. Assert(tag == popped);
  1146. #endif
  1147. #ifdef PROFILE_EXEC
  1148. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  1149. "Profiler tag is supplied but the profiler pointer is NULL");
  1150. if (this->m_codeGenProfiler)
  1151. {
  1152. this->m_codeGenProfiler->ProfileEnd(tag);
  1153. }
  1154. #endif
  1155. }
  1156. void
  1157. Func::EndPhase(Js::Phase tag, bool dump)
  1158. {
  1159. this->EndProfiler(tag);
  1160. #if DBG_DUMP
  1161. if(dump && (PHASE_DUMP(tag, this)
  1162. || PHASE_DUMP(Js::BackEndPhase, this)))
  1163. {
  1164. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1165. if (IsLoopBody())
  1166. {
  1167. Output::Print(_u("************ IR after %s (%S) Loop %d ************\n"),
  1168. Js::PhaseNames[tag],
  1169. ExecutionModeName(m_workItem->GetJitMode()),
  1170. m_workItem->GetLoopNumber());
  1171. }
  1172. else
  1173. {
  1174. Output::Print(_u("************ IR after %s (%S) ************\n"),
  1175. Js::PhaseNames[tag],
  1176. ExecutionModeName(m_workItem->GetJitMode()));
  1177. }
  1178. this->Dump(Js::Configuration::Global.flags.AsmDiff? IRDumpFlags_AsmDumpMode : IRDumpFlags_None);
  1179. }
  1180. #endif
  1181. if (tag == Js::RegAllocPhase)
  1182. {
  1183. this->legalizePostRegAlloc = true;
  1184. }
  1185. #if DBG
  1186. if (tag == Js::LowererPhase)
  1187. {
  1188. Assert(!this->isPostLower);
  1189. this->isPostLower = true;
  1190. }
  1191. else if (tag == Js::RegAllocPhase)
  1192. {
  1193. Assert(!this->isPostRegAlloc);
  1194. this->isPostRegAlloc = true;
  1195. }
  1196. else if (tag == Js::PeepsPhase)
  1197. {
  1198. Assert(this->isPostLower && !this->isPostLayout);
  1199. this->isPostPeeps = true;
  1200. }
  1201. else if (tag == Js::LayoutPhase)
  1202. {
  1203. Assert(this->isPostPeeps && !this->isPostLayout);
  1204. this->isPostLayout = true;
  1205. }
  1206. else if (tag == Js::FinalLowerPhase)
  1207. {
  1208. Assert(this->isPostLayout && !this->isPostFinalLower);
  1209. this->isPostFinalLower = true;
  1210. }
  1211. if (this->isPostLower)
  1212. {
  1213. #ifndef _M_ARM // Need to verify ARM is clean.
  1214. DbCheckPostLower dbCheck(this);
  1215. dbCheck.Check();
  1216. #endif
  1217. }
  1218. this->m_alloc->MergeDelayFreeList();
  1219. #endif
  1220. }
  1221. StackSym *
  1222. Func::EnsureLoopParamSym()
  1223. {
  1224. if (this->m_loopParamSym == nullptr)
  1225. {
  1226. this->m_loopParamSym = StackSym::New(TyMachPtr, this);
  1227. }
  1228. return this->m_loopParamSym;
  1229. }
  1230. void
  1231. Func::UpdateMaxInlineeArgOutSize(uint inlineeArgOutSize)
  1232. {
  1233. if (this->maxInlineeArgOutSize < inlineeArgOutSize)
  1234. {
  1235. this->maxInlineeArgOutSize = inlineeArgOutSize;
  1236. }
  1237. }
  1238. void
  1239. Func::BeginClone(Lowerer * lowerer, JitArenaAllocator *alloc)
  1240. {
  1241. Assert(this->IsTopFunc());
  1242. AssertMsg(m_cloner == nullptr, "Starting new clone while one is in progress");
  1243. m_cloner = JitAnew(alloc, Cloner, lowerer, alloc);
  1244. if (m_cloneMap == nullptr)
  1245. {
  1246. m_cloneMap = JitAnew(alloc, InstrMap, alloc, 7);
  1247. }
  1248. }
  1249. void
  1250. Func::EndClone()
  1251. {
  1252. Assert(this->IsTopFunc());
  1253. if (m_cloner)
  1254. {
  1255. m_cloner->Finish();
  1256. JitAdelete(m_cloner->alloc, m_cloner);
  1257. m_cloner = nullptr;
  1258. }
  1259. }
  1260. IR::SymOpnd *
  1261. Func::GetInlineeOpndAtOffset(int32 offset)
  1262. {
  1263. Assert(IsInlinee());
  1264. StackSym *stackSym = CreateInlineeStackSym();
  1265. this->SetArgOffset(stackSym, stackSym->m_offset + offset);
  1266. Assert(stackSym->m_offset >= 0);
  1267. return IR::SymOpnd::New(stackSym, 0, TyMachReg, this);
  1268. }
  1269. StackSym *
  1270. Func::CreateInlineeStackSym()
  1271. {
  1272. // Make sure this is an inlinee and that GlobOpt has initialized the offset
  1273. // in the inlinee's frame.
  1274. Assert(IsInlinee());
  1275. Assert(m_inlineeFrameStartSym->m_offset != -1);
  1276. StackSym *stackSym = m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1277. stackSym->m_isInlinedArgSlot = true;
  1278. stackSym->m_offset = m_inlineeFrameStartSym->m_offset;
  1279. stackSym->m_allocated = true;
  1280. return stackSym;
  1281. }
  1282. uint16
  1283. Func::GetArgUsedForBranch() const
  1284. {
  1285. // this value can change while JITing, so or these together
  1286. return GetJITFunctionBody()->GetArgUsedForBranch() | GetJITOutput()->GetArgUsedForBranch();
  1287. }
  1288. intptr_t
  1289. Func::GetJittedLoopIterationsSinceLastBailoutAddress() const
  1290. {
  1291. Assert(this->m_workItem->Type() == JsLoopBodyWorkItemType);
  1292. return m_workItem->GetJittedLoopIterationsSinceLastBailoutAddr();
  1293. }
  1294. intptr_t
  1295. Func::GetWeakFuncRef() const
  1296. {
  1297. // TODO: OOP JIT figure out if this can be null
  1298. return m_workItem->GetJITTimeInfo()->GetWeakFuncRef();
  1299. }
  1300. intptr_t
  1301. Func::GetRuntimeInlineCache(const uint index) const
  1302. {
  1303. if(m_runtimeInfo != nullptr && m_runtimeInfo->HasClonedInlineCaches())
  1304. {
  1305. intptr_t inlineCache = m_runtimeInfo->GetClonedInlineCache(index);
  1306. if(inlineCache)
  1307. {
  1308. return inlineCache;
  1309. }
  1310. }
  1311. return GetJITFunctionBody()->GetInlineCache(index);
  1312. }
  1313. JITTimePolymorphicInlineCache *
  1314. Func::GetRuntimePolymorphicInlineCache(const uint index) const
  1315. {
  1316. if (this->m_polymorphicInlineCacheInfo && this->m_polymorphicInlineCacheInfo->HasInlineCaches())
  1317. {
  1318. return this->m_polymorphicInlineCacheInfo->GetInlineCache(index);
  1319. }
  1320. return nullptr;
  1321. }
  1322. byte
  1323. Func::GetPolyCacheUtilToInitialize(const uint index) const
  1324. {
  1325. return this->GetRuntimePolymorphicInlineCache(index) ? this->GetPolyCacheUtil(index) : PolymorphicInlineCacheUtilizationMinValue;
  1326. }
  1327. byte
  1328. Func::GetPolyCacheUtil(const uint index) const
  1329. {
  1330. return this->m_polymorphicInlineCacheInfo->GetUtil(index);
  1331. }
  1332. ObjTypeSpecFldInfo*
  1333. Func::GetObjTypeSpecFldInfo(const uint index) const
  1334. {
  1335. if (GetJITFunctionBody()->GetInlineCacheCount() == 0)
  1336. {
  1337. Assert(UNREACHED);
  1338. return nullptr;
  1339. }
  1340. return GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(index);
  1341. }
  1342. void
  1343. Func::ClearObjTypeSpecFldInfo(const uint index)
  1344. {
  1345. GetWorkItem()->GetJITTimeInfo()->ClearObjTypeSpecFldInfo(index);
  1346. }
  1347. ObjTypeSpecFldInfo*
  1348. Func::GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const
  1349. {
  1350. Assert(propertyInfoId < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  1351. return GetTopFunc()->m_globalObjTypeSpecFldInfoArray[propertyInfoId];
  1352. }
  1353. void
  1354. Func::EnsurePinnedTypeRefs()
  1355. {
  1356. if (this->pinnedTypeRefs == nullptr)
  1357. {
  1358. this->pinnedTypeRefs = JitAnew(this->m_alloc, TypeRefSet, this->m_alloc);
  1359. }
  1360. }
  1361. void
  1362. Func::PinTypeRef(void* typeRef)
  1363. {
  1364. EnsurePinnedTypeRefs();
  1365. this->pinnedTypeRefs->AddNew(typeRef);
  1366. }
  1367. void
  1368. Func::EnsureSingleTypeGuards()
  1369. {
  1370. if (this->singleTypeGuards == nullptr)
  1371. {
  1372. this->singleTypeGuards = JitAnew(this->m_alloc, TypePropertyGuardDictionary, this->m_alloc);
  1373. }
  1374. }
  1375. Js::JitTypePropertyGuard*
  1376. Func::GetOrCreateSingleTypeGuard(intptr_t typeAddr)
  1377. {
  1378. EnsureSingleTypeGuards();
  1379. Js::JitTypePropertyGuard* guard = nullptr;
  1380. if (!this->singleTypeGuards->TryGetValue(typeAddr, &guard))
  1381. {
  1382. // Property guards are allocated by NativeCodeData::Allocator so that their lifetime extends as long as the EntryPointInfo is alive.
  1383. guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitTypePropertyGuard, typeAddr, this->indexedPropertyGuardCount++);
  1384. this->singleTypeGuards->Add(typeAddr, guard);
  1385. }
  1386. else
  1387. {
  1388. Assert(guard->GetTypeAddr() == typeAddr);
  1389. }
  1390. return guard;
  1391. }
  1392. void
  1393. Func::EnsureEquivalentTypeGuards()
  1394. {
  1395. AssertMsg(!PHASE_OFF(Js::EquivObjTypeSpecPhase, this), "Why do we have equivalent type guards if we don't do equivalent object type spec?");
  1396. if (this->equivalentTypeGuards == nullptr)
  1397. {
  1398. this->equivalentTypeGuards = JitAnew(this->m_alloc, EquivalentTypeGuardList, this->m_alloc);
  1399. }
  1400. }
  1401. Js::JitEquivalentTypeGuard*
  1402. Func::CreateEquivalentTypeGuard(JITTypeHolder type, uint32 objTypeSpecFldId)
  1403. {
  1404. EnsureEquivalentTypeGuards();
  1405. Js::JitEquivalentTypeGuard* guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitEquivalentTypeGuard, type->GetAddr(), this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1406. this->InitializeEquivalentTypeGuard(guard);
  1407. return guard;
  1408. }
  1409. Js::JitPolyEquivalentTypeGuard*
  1410. Func::CreatePolyEquivalentTypeGuard(uint32 objTypeSpecFldId)
  1411. {
  1412. EnsureEquivalentTypeGuards();
  1413. Js::JitPolyEquivalentTypeGuard* guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitPolyEquivalentTypeGuard, this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1414. this->InitializeEquivalentTypeGuard(guard);
  1415. return guard;
  1416. }
  1417. void
  1418. Func::InitializeEquivalentTypeGuard(Js::JitEquivalentTypeGuard * guard)
  1419. {
  1420. // If we want to hard code the address of the cache, we will need to go back to allocating it from the native code data allocator.
  1421. // We would then need to maintain consistency (double write) to both the recycler allocated cache and the one on the heap.
  1422. Js::EquivalentTypeCache* cache = nullptr;
  1423. if (this->IsOOPJIT())
  1424. {
  1425. cache = JitAnewZ(this->m_alloc, Js::EquivalentTypeCache);
  1426. }
  1427. else
  1428. {
  1429. cache = NativeCodeDataNewZNoFixup(GetTransferDataAllocator(), Js::EquivalentTypeCache);
  1430. }
  1431. guard->SetCache(cache);
  1432. // Give the cache a back-pointer to the guard so that the guard can be cleared at runtime if necessary.
  1433. cache->SetGuard(guard);
  1434. this->equivalentTypeGuards->Prepend(guard);
  1435. }
  1436. void
  1437. Func::EnsurePropertyGuardsByPropertyId()
  1438. {
  1439. if (this->propertyGuardsByPropertyId == nullptr)
  1440. {
  1441. this->propertyGuardsByPropertyId = JitAnew(this->m_alloc, PropertyGuardByPropertyIdMap, this->m_alloc);
  1442. }
  1443. }
  1444. void
  1445. Func::EnsureCtorCachesByPropertyId()
  1446. {
  1447. if (this->ctorCachesByPropertyId == nullptr)
  1448. {
  1449. this->ctorCachesByPropertyId = JitAnew(this->m_alloc, CtorCachesByPropertyIdMap, this->m_alloc);
  1450. }
  1451. }
  1452. void
  1453. Func::LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard)
  1454. {
  1455. Assert(guard != nullptr);
  1456. Assert(guard->GetValue() != NULL);
  1457. Assert(this->propertyGuardsByPropertyId != nullptr);
  1458. IndexedPropertyGuardSet* set;
  1459. if (!this->propertyGuardsByPropertyId->TryGetValue(propertyId, &set))
  1460. {
  1461. set = JitAnew(this->m_alloc, IndexedPropertyGuardSet, this->m_alloc);
  1462. this->propertyGuardsByPropertyId->Add(propertyId, set);
  1463. }
  1464. set->Item(guard);
  1465. }
  1466. void
  1467. Func::LinkCtorCacheToPropertyId(Js::PropertyId propertyId, JITTimeConstructorCache* cache)
  1468. {
  1469. Assert(cache != nullptr);
  1470. Assert(this->ctorCachesByPropertyId != nullptr);
  1471. CtorCacheSet* set;
  1472. if (!this->ctorCachesByPropertyId->TryGetValue(propertyId, &set))
  1473. {
  1474. set = JitAnew(this->m_alloc, CtorCacheSet, this->m_alloc);
  1475. this->ctorCachesByPropertyId->Add(propertyId, set);
  1476. }
  1477. set->Item(cache->GetRuntimeCacheAddr());
  1478. }
  1479. JITTimeConstructorCache* Func::GetConstructorCache(const Js::ProfileId profiledCallSiteId)
  1480. {
  1481. AssertOrFailFast(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1482. Assert(this->constructorCaches != nullptr);
  1483. return this->constructorCaches[profiledCallSiteId];
  1484. }
  1485. void Func::SetConstructorCache(const Js::ProfileId profiledCallSiteId, JITTimeConstructorCache* constructorCache)
  1486. {
  1487. AssertOrFailFast(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1488. Assert(constructorCache != nullptr);
  1489. Assert(this->constructorCaches != nullptr);
  1490. Assert(this->constructorCaches[profiledCallSiteId] == nullptr);
  1491. this->constructorCacheCount++;
  1492. this->constructorCaches[profiledCallSiteId] = constructorCache;
  1493. }
  1494. void Func::EnsurePropertiesWrittenTo()
  1495. {
  1496. if (this->propertiesWrittenTo == nullptr)
  1497. {
  1498. this->propertiesWrittenTo = JitAnew(this->m_alloc, PropertyIdSet, this->m_alloc);
  1499. }
  1500. }
  1501. void Func::EnsureCallSiteToArgumentsOffsetFixupMap()
  1502. {
  1503. if (this->callSiteToArgumentsOffsetFixupMap == nullptr)
  1504. {
  1505. this->callSiteToArgumentsOffsetFixupMap = JitAnew(this->m_alloc, CallSiteToArgumentsOffsetFixupMap, this->m_alloc);
  1506. }
  1507. }
  1508. IR::LabelInstr *
  1509. Func::GetFuncStartLabel()
  1510. {
  1511. return m_funcStartLabel;
  1512. }
  1513. IR::LabelInstr *
  1514. Func::EnsureFuncStartLabel()
  1515. {
  1516. if(m_funcStartLabel == nullptr)
  1517. {
  1518. m_funcStartLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1519. m_funcStartLabel->m_isDataLabel = true;
  1520. }
  1521. return m_funcStartLabel;
  1522. }
  1523. IR::LabelInstr *
  1524. Func::GetFuncEndLabel()
  1525. {
  1526. return m_funcEndLabel;
  1527. }
  1528. IR::LabelInstr *
  1529. Func::EnsureFuncEndLabel()
  1530. {
  1531. if(m_funcEndLabel == nullptr)
  1532. {
  1533. m_funcEndLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1534. m_funcEndLabel->m_isDataLabel = true;
  1535. }
  1536. return m_funcEndLabel;
  1537. }
  1538. void
  1539. Func::EnsureStackArgWithFormalsTracker()
  1540. {
  1541. if (stackArgWithFormalsTracker == nullptr)
  1542. {
  1543. stackArgWithFormalsTracker = JitAnew(m_alloc, StackArgWithFormalsTracker, m_alloc);
  1544. }
  1545. }
  1546. BOOL
  1547. Func::IsFormalsArraySym(SymID symId)
  1548. {
  1549. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsArraySyms() == nullptr)
  1550. {
  1551. return false;
  1552. }
  1553. return stackArgWithFormalsTracker->GetFormalsArraySyms()->Test(symId);
  1554. }
  1555. void
  1556. Func::TrackFormalsArraySym(SymID symId)
  1557. {
  1558. EnsureStackArgWithFormalsTracker();
  1559. stackArgWithFormalsTracker->SetFormalsArraySyms(symId);
  1560. }
  1561. void
  1562. Func::TrackStackSymForFormalIndex(Js::ArgSlot formalsIndex, StackSym * sym)
  1563. {
  1564. EnsureStackArgWithFormalsTracker();
  1565. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1566. stackArgWithFormalsTracker->SetStackSymInFormalsIndexMap(sym, formalsIndex, formalsCount);
  1567. }
  1568. StackSym *
  1569. Func::GetStackSymForFormal(Js::ArgSlot formalsIndex)
  1570. {
  1571. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1572. {
  1573. return nullptr;
  1574. }
  1575. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1576. StackSym ** formalsIndexToStackSymMap = stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap();
  1577. AssertMsg(formalsIndex < formalsCount, "OutOfRange ? ");
  1578. return formalsIndexToStackSymMap[formalsIndex];
  1579. }
  1580. bool
  1581. Func::HasStackSymForFormal(Js::ArgSlot formalsIndex)
  1582. {
  1583. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1584. {
  1585. return false;
  1586. }
  1587. return GetStackSymForFormal(formalsIndex) != nullptr;
  1588. }
  1589. void
  1590. Func::SetScopeObjSym(StackSym * sym)
  1591. {
  1592. EnsureStackArgWithFormalsTracker();
  1593. stackArgWithFormalsTracker->SetScopeObjSym(sym);
  1594. }
  1595. StackSym *
  1596. Func::GetNativeCodeDataSym() const
  1597. {
  1598. Assert(IsOOPJIT());
  1599. return m_nativeCodeDataSym;
  1600. }
  1601. void
  1602. Func::SetNativeCodeDataSym(StackSym * opnd)
  1603. {
  1604. Assert(IsOOPJIT());
  1605. m_nativeCodeDataSym = opnd;
  1606. }
  1607. StackSym*
  1608. Func::GetScopeObjSym()
  1609. {
  1610. if (stackArgWithFormalsTracker == nullptr)
  1611. {
  1612. return nullptr;
  1613. }
  1614. return stackArgWithFormalsTracker->GetScopeObjSym();
  1615. }
  1616. BVSparse<JitArenaAllocator> *
  1617. StackArgWithFormalsTracker::GetFormalsArraySyms()
  1618. {
  1619. return formalsArraySyms;
  1620. }
  1621. void
  1622. StackArgWithFormalsTracker::SetFormalsArraySyms(SymID symId)
  1623. {
  1624. if (formalsArraySyms == nullptr)
  1625. {
  1626. formalsArraySyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1627. }
  1628. formalsArraySyms->Set(symId);
  1629. }
  1630. StackSym **
  1631. StackArgWithFormalsTracker::GetFormalsIndexToStackSymMap()
  1632. {
  1633. return formalsIndexToStackSymMap;
  1634. }
  1635. void
  1636. StackArgWithFormalsTracker::SetStackSymInFormalsIndexMap(StackSym * sym, Js::ArgSlot formalsIndex, Js::ArgSlot formalsCount)
  1637. {
  1638. if(formalsIndexToStackSymMap == nullptr)
  1639. {
  1640. formalsIndexToStackSymMap = JitAnewArrayZ(alloc, StackSym*, formalsCount);
  1641. }
  1642. AssertMsg(formalsIndex < formalsCount, "Out of range ?");
  1643. formalsIndexToStackSymMap[formalsIndex] = sym;
  1644. }
  1645. void
  1646. StackArgWithFormalsTracker::SetScopeObjSym(StackSym * sym)
  1647. {
  1648. m_scopeObjSym = sym;
  1649. }
  1650. StackSym *
  1651. StackArgWithFormalsTracker::GetScopeObjSym()
  1652. {
  1653. return m_scopeObjSym;
  1654. }
  1655. void
  1656. Cloner::AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone)
  1657. {
  1658. if (!this->instrFirst)
  1659. {
  1660. this->instrFirst = instrClone;
  1661. }
  1662. this->instrLast = instrClone;
  1663. }
  1664. void
  1665. Cloner::Finish()
  1666. {
  1667. this->RetargetClonedBranches();
  1668. if (this->lowerer)
  1669. {
  1670. lowerer->LowerRange(this->instrFirst, this->instrLast, false, false);
  1671. }
  1672. }
  1673. void
  1674. Cloner::RetargetClonedBranches()
  1675. {
  1676. if (!this->fRetargetClonedBranch)
  1677. {
  1678. return;
  1679. }
  1680. FOREACH_INSTR_IN_RANGE(instr, this->instrFirst, this->instrLast)
  1681. {
  1682. if (instr->IsBranchInstr())
  1683. {
  1684. instr->AsBranchInstr()->RetargetClonedBranch();
  1685. }
  1686. }
  1687. NEXT_INSTR_IN_RANGE;
  1688. }
  1689. void Func::ThrowIfScriptClosed()
  1690. {
  1691. if (GetScriptContextInfo()->IsClosed())
  1692. {
  1693. // Should not be jitting something in the foreground when the script context is actually closed
  1694. Assert(IsBackgroundJIT() || !GetScriptContext()->IsActuallyClosed());
  1695. throw Js::OperationAbortedException();
  1696. }
  1697. }
  1698. IR::IndirOpnd * Func::GetConstantAddressIndirOpnd(intptr_t address, IR::Opnd * largeConstOpnd, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode)
  1699. {
  1700. Assert(this->GetTopFunc() == this);
  1701. if (!canHoistConstantAddressLoad)
  1702. {
  1703. // We can't hoist constant address load after lower, as we can't mark the sym as
  1704. // live on back edge
  1705. return nullptr;
  1706. }
  1707. int offset = 0;
  1708. IR::RegOpnd ** foundRegOpnd = this->constantAddressRegOpnd.Find([address, &offset](IR::RegOpnd * regOpnd)
  1709. {
  1710. Assert(regOpnd->m_sym->IsSingleDef());
  1711. Assert(regOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd() || regOpnd->m_sym->m_instrDef->GetSrc1()->IsIntConstOpnd());
  1712. void * curr = regOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd() ?
  1713. regOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address :
  1714. (void *)regOpnd->m_sym->m_instrDef->GetSrc1()->AsIntConstOpnd()->GetValue();
  1715. ptrdiff_t diff = (uintptr_t)address - (uintptr_t)curr;
  1716. if (!Math::FitsInDWord(diff))
  1717. {
  1718. return false;
  1719. }
  1720. offset = (int)diff;
  1721. return true;
  1722. });
  1723. IR::RegOpnd * addressRegOpnd;
  1724. if (foundRegOpnd != nullptr)
  1725. {
  1726. addressRegOpnd = *foundRegOpnd;
  1727. }
  1728. else
  1729. {
  1730. Assert(offset == 0);
  1731. addressRegOpnd = IR::RegOpnd::New(TyMachPtr, this);
  1732. IR::Instr *const newInstr =
  1733. IR::Instr::New(
  1734. loadOpCode,
  1735. addressRegOpnd,
  1736. largeConstOpnd,
  1737. this);
  1738. this->constantAddressRegOpnd.Prepend(addressRegOpnd);
  1739. IR::Instr * insertBeforeInstr = this->lastConstantAddressRegLoadInstr;
  1740. if (insertBeforeInstr == nullptr)
  1741. {
  1742. insertBeforeInstr = this->GetFunctionEntryInsertionPoint();
  1743. this->lastConstantAddressRegLoadInstr = newInstr;
  1744. }
  1745. insertBeforeInstr->InsertBefore(newInstr);
  1746. }
  1747. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(addressRegOpnd, offset, type, this, true);
  1748. #if DBG_DUMP
  1749. // TODO: michhol make intptr_t
  1750. indirOpnd->SetAddrKind(kind, (void*)address);
  1751. #endif
  1752. return indirOpnd;
  1753. }
  1754. void Func::MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv)
  1755. {
  1756. Assert(this->GetTopFunc() == this);
  1757. this->constantAddressRegOpnd.Iterate([bv](IR::RegOpnd * regOpnd)
  1758. {
  1759. bv->Set(regOpnd->m_sym->m_id);
  1760. });
  1761. }
  1762. IR::Instr *
  1763. Func::GetFunctionEntryInsertionPoint()
  1764. {
  1765. Assert(this->GetTopFunc() == this);
  1766. IR::Instr * insertInsert = this->lastConstantAddressRegLoadInstr;
  1767. if (insertInsert != nullptr)
  1768. {
  1769. return insertInsert->m_next;
  1770. }
  1771. insertInsert = this->m_headInstr;
  1772. if (this->HasTry())
  1773. {
  1774. // Insert it inside the root region
  1775. insertInsert = insertInsert->m_next;
  1776. Assert(insertInsert->IsLabelInstr() && insertInsert->AsLabelInstr()->GetRegion()->GetType() == RegionTypeRoot);
  1777. }
  1778. return insertInsert->m_next;
  1779. }
  1780. Js::Var
  1781. Func::AllocateNumber(double value)
  1782. {
  1783. Js::Var number = nullptr;
  1784. #if FLOATVAR
  1785. number = Js::JavascriptNumber::NewCodeGenInstance((double)value, nullptr);
  1786. #else
  1787. if (!IsOOPJIT()) // in-proc jit
  1788. {
  1789. number = Js::JavascriptNumber::NewCodeGenInstance(GetNumberAllocator(), (double)value, GetScriptContext());
  1790. }
  1791. else // OOP JIT
  1792. {
  1793. number = GetXProcNumberAllocator()->AllocateNumber(this, value);
  1794. }
  1795. #endif
  1796. return number;
  1797. }
  1798. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1799. void
  1800. Func::DumpFullFunctionName()
  1801. {
  1802. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1803. Output::Print(_u("Function %s (%s)"), GetJITFunctionBody()->GetDisplayName(), GetDebugNumberSet(debugStringBuffer));
  1804. }
  1805. #endif
  1806. void
  1807. Func::UpdateForInLoopMaxDepth(uint forInLoopMaxDepth)
  1808. {
  1809. Assert(this->IsTopFunc());
  1810. this->m_forInLoopMaxDepth = max(this->m_forInLoopMaxDepth, forInLoopMaxDepth);
  1811. }
  1812. int
  1813. Func::GetForInEnumeratorArrayOffset() const
  1814. {
  1815. Func const* topFunc = this->GetTopFunc();
  1816. Assert(this->m_forInLoopBaseDepth + this->GetJITFunctionBody()->GetForInLoopDepth() <= topFunc->m_forInLoopMaxDepth);
  1817. return topFunc->m_forInEnumeratorArrayOffset
  1818. + this->m_forInLoopBaseDepth * sizeof(Js::ForInObjectEnumerator);
  1819. }
  1820. #if DBG_DUMP
  1821. ///----------------------------------------------------------------------------
  1822. ///
  1823. /// Func::DumpHeader
  1824. ///
  1825. ///----------------------------------------------------------------------------
  1826. void
  1827. Func::DumpHeader()
  1828. {
  1829. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1830. DumpFullFunctionName();
  1831. Output::SkipToColumn(50);
  1832. Output::Print(_u("Instr Count:%d"), GetInstrCount());
  1833. if(m_codeSize > 0)
  1834. {
  1835. Output::Print(_u("\t\tSize:%d\n\n"), m_codeSize);
  1836. }
  1837. else
  1838. {
  1839. Output::Print(_u("\n\n"));
  1840. }
  1841. }
  1842. ///----------------------------------------------------------------------------
  1843. ///
  1844. /// Func::Dump
  1845. ///
  1846. ///----------------------------------------------------------------------------
  1847. void
  1848. Func::Dump(IRDumpFlags flags)
  1849. {
  1850. this->DumpHeader();
  1851. FOREACH_INSTR_IN_FUNC(instr, this)
  1852. {
  1853. instr->DumpGlobOptInstrString();
  1854. instr->Dump(flags);
  1855. }NEXT_INSTR_IN_FUNC;
  1856. Output::Flush();
  1857. }
  1858. void
  1859. Func::Dump()
  1860. {
  1861. this->Dump(IRDumpFlags_None);
  1862. }
  1863. #endif
  1864. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  1865. LPCSTR
  1866. Func::GetVtableName(INT_PTR address)
  1867. {
  1868. #if DBG
  1869. if (vtableMap == nullptr)
  1870. {
  1871. vtableMap = VirtualTableRegistry::CreateVtableHashMap(this->m_alloc);
  1872. };
  1873. LPCSTR name = vtableMap->Lookup(address, nullptr);
  1874. if (name)
  1875. {
  1876. if (strncmp(name, "class ", _countof("class ") - 1) == 0)
  1877. {
  1878. name += _countof("class ") - 1;
  1879. }
  1880. }
  1881. return name;
  1882. #else
  1883. return "";
  1884. #endif
  1885. }
  1886. #endif
  1887. #if DBG_DUMP | defined(VTUNE_PROFILING)
  1888. bool Func::DoRecordNativeMap() const
  1889. {
  1890. #if defined(VTUNE_PROFILING)
  1891. if (VTuneChakraProfile::isJitProfilingActive)
  1892. {
  1893. return true;
  1894. }
  1895. #endif
  1896. #if DBG_DUMP
  1897. return PHASE_DUMP(Js::EncoderPhase, this) && Js::Configuration::Global.flags.Verbose;
  1898. #else
  1899. return false;
  1900. #endif
  1901. }
  1902. #endif
  1903. #ifdef PERF_HINT
  1904. void WritePerfHint(PerfHints hint, Func* func, uint byteCodeOffset /*= Js::Constants::NoByteCodeOffset*/)
  1905. {
  1906. if (!func->IsOOPJIT())
  1907. {
  1908. WritePerfHint(hint, (Js::FunctionBody*)func->GetJITFunctionBody()->GetAddr(), byteCodeOffset);
  1909. }
  1910. }
  1911. #endif