Func.cpp 62 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Base/EtwTrace.h"
  7. #include "Base/ScriptContextProfiler.h"
  8. #ifdef VTUNE_PROFILING
  9. #include "Base/VTuneChakraProfile.h"
  10. #endif
  11. #include "Library/ForInObjectEnumerator.h"
  12. Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  13. ThreadContextInfo * threadContextInfo,
  14. ScriptContextInfo * scriptContextInfo,
  15. JITOutputIDL * outputData,
  16. Js::EntryPointInfo* epInfo,
  17. const FunctionJITRuntimeInfo *const runtimeInfo,
  18. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  19. #if !FLOATVAR
  20. CodeGenNumberAllocator * numberAllocator,
  21. #endif
  22. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc,
  23. uint postCallByteCodeOffset, Js::RegSlot returnValueRegSlot, const bool isInlinedConstructor,
  24. Js::ProfileId callSiteIdInParentFunc, bool isGetterSetter) :
  25. m_alloc(alloc),
  26. m_workItem(workItem),
  27. m_output(outputData),
  28. m_entryPointInfo(epInfo),
  29. m_threadContextInfo(threadContextInfo),
  30. m_scriptContextInfo(scriptContextInfo),
  31. m_runtimeInfo(runtimeInfo),
  32. m_polymorphicInlineCacheInfo(polymorphicInlineCacheInfo),
  33. m_codeGenAllocators(codeGenAllocators),
  34. m_inlineeId(0),
  35. pinnedTypeRefs(nullptr),
  36. singleTypeGuards(nullptr),
  37. equivalentTypeGuards(nullptr),
  38. propertyGuardsByPropertyId(nullptr),
  39. ctorCachesByPropertyId(nullptr),
  40. callSiteToArgumentsOffsetFixupMap(nullptr),
  41. indexedPropertyGuardCount(0),
  42. propertiesWrittenTo(nullptr),
  43. lazyBailoutProperties(alloc),
  44. anyPropertyMayBeWrittenTo(false),
  45. #ifdef PROFILE_EXEC
  46. m_codeGenProfiler(codeGenProfiler),
  47. #endif
  48. m_isBackgroundJIT(isBackgroundJIT),
  49. m_cloner(nullptr),
  50. m_cloneMap(nullptr),
  51. m_loopParamSym(nullptr),
  52. m_funcObjSym(nullptr),
  53. m_localClosureSym(nullptr),
  54. m_paramClosureSym(nullptr),
  55. m_localFrameDisplaySym(nullptr),
  56. m_bailoutReturnValueSym(nullptr),
  57. m_hasBailedOutSym(nullptr),
  58. m_inlineeFrameStartSym(nullptr),
  59. m_regsUsed(0),
  60. m_fg(nullptr),
  61. m_labelCount(0),
  62. m_argSlotsForFunctionsCalled(0),
  63. m_isLeaf(false),
  64. m_hasCalls(false),
  65. m_hasInlineArgsOpt(false),
  66. m_canDoInlineArgsOpt(true),
  67. m_doFastPaths(false),
  68. hasBailout(false),
  69. hasBailoutInEHRegion(false),
  70. hasInstrNumber(false),
  71. maintainByteCodeOffset(true),
  72. frameSize(0),
  73. parentFunc(parentFunc),
  74. argObjSyms(nullptr),
  75. m_nonTempLocalVars(nullptr),
  76. hasAnyStackNestedFunc(false),
  77. hasMarkTempObjects(false),
  78. postCallByteCodeOffset(postCallByteCodeOffset),
  79. maxInlineeArgOutCount(0),
  80. returnValueRegSlot(returnValueRegSlot),
  81. firstActualStackOffset(-1),
  82. m_localVarSlotsOffset(Js::Constants::InvalidOffset),
  83. m_hasLocalVarChangedOffset(Js::Constants::InvalidOffset),
  84. actualCount((Js::ArgSlot) - 1),
  85. tryCatchNestingLevel(0),
  86. m_localStackHeight(0),
  87. tempSymDouble(nullptr),
  88. tempSymBool(nullptr),
  89. hasInlinee(false),
  90. thisOrParentInlinerHasArguments(false),
  91. hasStackArgs(false),
  92. hasImplicitParamLoad(false),
  93. hasThrow(false),
  94. hasNonSimpleParams(false),
  95. hasUnoptimizedArgumentsAccess(false),
  96. applyTargetInliningRemovedArgumentsAccess(false),
  97. hasImplicitCalls(false),
  98. hasTempObjectProducingInstr(false),
  99. isInlinedConstructor(isInlinedConstructor),
  100. #if !FLOATVAR
  101. numberAllocator(numberAllocator),
  102. #endif
  103. loopCount(0),
  104. callSiteIdInParentFunc(callSiteIdInParentFunc),
  105. isGetterSetter(isGetterSetter),
  106. frameInfo(nullptr),
  107. isTJLoopBody(false),
  108. m_nativeCodeDataSym(nullptr),
  109. isFlowGraphValid(false),
  110. #if DBG
  111. m_callSiteCount(0),
  112. #endif
  113. stackNestedFunc(false),
  114. stackClosure(false)
  115. #if defined(_M_ARM32_OR_ARM64)
  116. , m_ArgumentsOffset(0)
  117. , m_epilogLabel(nullptr)
  118. #endif
  119. , m_funcStartLabel(nullptr)
  120. , m_funcEndLabel(nullptr)
  121. #if DBG
  122. , hasCalledSetDoFastPaths(false)
  123. , allowRemoveBailOutArgInstr(false)
  124. , currentPhases(alloc)
  125. , isPostLower(false)
  126. , isPostRegAlloc(false)
  127. , isPostPeeps(false)
  128. , isPostLayout(false)
  129. , isPostFinalLower(false)
  130. , vtableMap(nullptr)
  131. #endif
  132. , m_yieldOffsetResumeLabelList(nullptr)
  133. , m_bailOutNoSaveLabel(nullptr)
  134. , constantAddressRegOpnd(alloc)
  135. , lastConstantAddressRegLoadInstr(nullptr)
  136. , m_totalJumpTableSizeInBytesForSwitchStatements(0)
  137. , slotArrayCheckTable(nullptr)
  138. , frameDisplayCheckTable(nullptr)
  139. , stackArgWithFormalsTracker(nullptr)
  140. , m_forInLoopBaseDepth(0)
  141. , m_forInEnumeratorArrayOffset(-1)
  142. , argInsCount(0)
  143. , m_globalObjTypeSpecFldInfoArray(nullptr)
  144. #if LOWER_SPLIT_INT64
  145. , m_int64SymPairMap(nullptr)
  146. #endif
  147. #ifdef RECYCLER_WRITE_BARRIER_JIT
  148. , m_lowerer(nullptr)
  149. #endif
  150. {
  151. Assert(this->IsInlined() == !!runtimeInfo);
  152. AssertOrFailFast(!HasProfileInfo() || GetReadOnlyProfileInfo()->GetLoopCount() == GetJITFunctionBody()->GetLoopCount());
  153. Js::RegSlot tmpResult;
  154. AssertOrFailFast(!UInt32Math::Add(GetJITFunctionBody()->GetConstCount(), GetJITFunctionBody()->GetVarCount(), &tmpResult));
  155. AssertOrFailFast(GetJITFunctionBody()->IsAsmJsMode() || GetJITFunctionBody()->GetFirstTmpReg() <= GetJITFunctionBody()->GetLocalsCount());
  156. AssertOrFailFast(!IsLoopBody() || m_workItem->GetLoopNumber() < GetJITFunctionBody()->GetLoopCount());
  157. AssertOrFailFast(CONFIG_FLAG(Prejit) || CONFIG_ISENABLED(Js::ForceNativeFlag) || GetJITFunctionBody()->GetByteCodeLength() < (uint)CONFIG_FLAG(MaxJITFunctionBytecodeByteLength));
  158. GetJITFunctionBody()->EnsureConsistentConstCount();
  159. if (this->IsTopFunc())
  160. {
  161. outputData->hasJittedStackClosure = false;
  162. outputData->localVarSlotsOffset = m_localVarSlotsOffset;
  163. outputData->localVarChangedOffset = m_hasLocalVarChangedOffset;
  164. }
  165. if (this->IsInlined())
  166. {
  167. m_inlineeId = ++(GetTopFunc()->m_inlineeId);
  168. }
  169. bool doStackNestedFunc = GetJITFunctionBody()->DoStackNestedFunc();
  170. bool doStackClosure = GetJITFunctionBody()->DoStackClosure() && !PHASE_OFF(Js::FrameDisplayFastPathPhase, this) && !PHASE_OFF(Js::StackClosurePhase, this);
  171. Assert(!doStackClosure || doStackNestedFunc);
  172. this->stackClosure = doStackClosure && this->IsTopFunc();
  173. if (this->stackClosure)
  174. {
  175. // TODO: calculate on runtime side?
  176. m_output.SetHasJITStackClosure();
  177. }
  178. if (m_workItem->Type() == JsFunctionType &&
  179. GetJITFunctionBody()->DoBackendArgumentsOptimization() &&
  180. !GetJITFunctionBody()->HasTry())
  181. {
  182. // doBackendArgumentsOptimization bit is set when there is no eval inside a function
  183. // as determined by the bytecode generator.
  184. SetHasStackArgs(true);
  185. }
  186. if (doStackNestedFunc && GetJITFunctionBody()->GetNestedCount() != 0 &&
  187. (this->IsTopFunc() || this->GetTopFunc()->m_workItem->Type() != JsLoopBodyWorkItemType)) // make sure none of the functions inlined in a jitted loop body allocate nested functions on the stack
  188. {
  189. Assert(!(this->IsJitInDebugMode() && !GetJITFunctionBody()->IsLibraryCode()));
  190. stackNestedFunc = true;
  191. this->GetTopFunc()->hasAnyStackNestedFunc = true;
  192. }
  193. if (GetJITFunctionBody()->HasOrParentHasArguments() || (parentFunc && parentFunc->thisOrParentInlinerHasArguments))
  194. {
  195. thisOrParentInlinerHasArguments = true;
  196. }
  197. if (parentFunc == nullptr)
  198. {
  199. inlineDepth = 0;
  200. m_symTable = JitAnew(alloc, SymTable);
  201. m_symTable->Init(this);
  202. m_symTable->SetStartingID(static_cast<SymID>(workItem->GetJITFunctionBody()->GetLocalsCount() + 1));
  203. Assert(Js::Constants::NoByteCodeOffset == postCallByteCodeOffset);
  204. Assert(Js::Constants::NoRegister == returnValueRegSlot);
  205. #if defined(_M_IX86) || defined(_M_X64)
  206. if (HasArgumentSlot())
  207. {
  208. // Pre-allocate the single argument slot we'll reserve for the arguments object.
  209. // For ARM, the argument slot is not part of the local but part of the register saves
  210. m_localStackHeight = MachArgsSlotOffset;
  211. }
  212. #endif
  213. }
  214. else
  215. {
  216. inlineDepth = parentFunc->inlineDepth + 1;
  217. Assert(Js::Constants::NoByteCodeOffset != postCallByteCodeOffset);
  218. }
  219. this->constructorCacheCount = 0;
  220. this->constructorCaches = AnewArrayZ(this->m_alloc, JITTimeConstructorCache*, GetJITFunctionBody()->GetProfiledCallSiteCount());
  221. #if DBG_DUMP
  222. m_codeSize = -1;
  223. #endif
  224. #if defined(_M_X64)
  225. m_spillSize = -1;
  226. m_argsSize = -1;
  227. m_savedRegSize = -1;
  228. #endif
  229. if (this->IsJitInDebugMode())
  230. {
  231. m_nonTempLocalVars = Anew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  232. }
  233. if (GetJITFunctionBody()->IsCoroutine())
  234. {
  235. m_yieldOffsetResumeLabelList = YieldOffsetResumeLabelList::New(this->m_alloc);
  236. }
  237. if (this->IsTopFunc())
  238. {
  239. m_globalObjTypeSpecFldInfoArray = JitAnewArrayZ(this->m_alloc, ObjTypeSpecFldInfo*, GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  240. }
  241. for (uint i = 0; i < GetJITFunctionBody()->GetInlineCacheCount(); ++i)
  242. {
  243. ObjTypeSpecFldInfo * info = GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(i);
  244. if (info != nullptr)
  245. {
  246. Assert(info->GetObjTypeSpecFldId() < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  247. GetTopFunc()->m_globalObjTypeSpecFldInfoArray[info->GetObjTypeSpecFldId()] = info;
  248. }
  249. }
  250. canHoistConstantAddressLoad = !PHASE_OFF(Js::HoistConstAddrPhase, this);
  251. m_forInLoopMaxDepth = this->GetJITFunctionBody()->GetForInLoopDepth();
  252. }
  253. bool
  254. Func::IsLoopBodyInTry() const
  255. {
  256. return IsLoopBody() && m_workItem->GetLoopHeader()->isInTry;
  257. }
  258. /* static */
  259. void
  260. Func::Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  261. ThreadContextInfo * threadContextInfo,
  262. ScriptContextInfo * scriptContextInfo,
  263. JITOutputIDL * outputData,
  264. Js::EntryPointInfo* epInfo, // for in-proc jit only
  265. const FunctionJITRuntimeInfo *const runtimeInfo,
  266. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  267. #if !FLOATVAR
  268. CodeGenNumberAllocator * numberAllocator,
  269. #endif
  270. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT)
  271. {
  272. bool rejit;
  273. do
  274. {
  275. Func func(alloc, workItem, threadContextInfo,
  276. scriptContextInfo, outputData, epInfo, runtimeInfo,
  277. polymorphicInlineCacheInfo, codeGenAllocators,
  278. #if !FLOATVAR
  279. numberAllocator,
  280. #endif
  281. codeGenProfiler, isBackgroundJIT);
  282. try
  283. {
  284. func.TryCodegen();
  285. rejit = false;
  286. }
  287. catch (Js::RejitException ex)
  288. {
  289. // The work item needs to be rejitted, likely due to some optimization that was too aggressive
  290. switch (ex.Reason())
  291. {
  292. case RejitReason::AggressiveIntTypeSpecDisabled:
  293. outputData->disableAggressiveIntTypeSpec = TRUE;
  294. break;
  295. case RejitReason::InlineApplyDisabled:
  296. workItem->GetJITFunctionBody()->DisableInlineApply();
  297. outputData->disableInlineApply = TRUE;
  298. break;
  299. case RejitReason::InlineSpreadDisabled:
  300. workItem->GetJITFunctionBody()->DisableInlineSpread();
  301. outputData->disableInlineSpread = TRUE;
  302. break;
  303. case RejitReason::DisableStackArgOpt:
  304. outputData->disableStackArgOpt = TRUE;
  305. break;
  306. case RejitReason::DisableSwitchOptExpectingInteger:
  307. case RejitReason::DisableSwitchOptExpectingString:
  308. outputData->disableSwitchOpt = TRUE;
  309. break;
  310. case RejitReason::ArrayCheckHoistDisabled:
  311. case RejitReason::ArrayAccessHelperCallEliminationDisabled:
  312. outputData->disableArrayCheckHoist = TRUE;
  313. break;
  314. case RejitReason::TrackIntOverflowDisabled:
  315. outputData->disableTrackCompoundedIntOverflow = TRUE;
  316. break;
  317. default:
  318. Assume(UNREACHED);
  319. }
  320. if (PHASE_TRACE(Js::ReJITPhase, &func))
  321. {
  322. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  323. Output::Print(
  324. _u("Rejit (compile-time): function: %s (%s) reason: %S\n"),
  325. workItem->GetJITFunctionBody()->GetDisplayName(),
  326. workItem->GetJITTimeInfo()->GetDebugNumberSet(debugStringBuffer),
  327. ex.ReasonName());
  328. }
  329. rejit = true;
  330. }
  331. // Either the entry point has a reference to the number now, or we failed to code gen and we
  332. // don't need to numbers, we can flush the completed page now.
  333. //
  334. // If the number allocator is NULL then we are shutting down the thread context and so too the
  335. // code generator. The number allocator must be freed before the recycler (and thus before the
  336. // code generator) so we can't and don't need to flush it.
  337. // TODO: OOP JIT, allocator cleanup
  338. } while (rejit);
  339. }
  340. ///----------------------------------------------------------------------------
  341. ///
  342. /// Func::TryCodegen
  343. ///
  344. /// Attempt to Codegen this function.
  345. ///
  346. ///----------------------------------------------------------------------------
  347. void
  348. Func::TryCodegen()
  349. {
  350. Assert(!IsJitInDebugMode() || !GetJITFunctionBody()->HasTry());
  351. BEGIN_CODEGEN_PHASE(this, Js::BackEndPhase);
  352. {
  353. // IRBuilder
  354. BEGIN_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  355. #ifdef ASMJS_PLAT
  356. if (GetJITFunctionBody()->IsAsmJsMode())
  357. {
  358. IRBuilderAsmJs asmIrBuilder(this);
  359. asmIrBuilder.Build();
  360. }
  361. else
  362. #endif
  363. {
  364. IRBuilder irBuilder(this);
  365. irBuilder.Build();
  366. }
  367. END_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  368. #ifdef IR_VIEWER
  369. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::IRBuilderPhase);
  370. #endif /* IR_VIEWER */
  371. BEGIN_CODEGEN_PHASE(this, Js::InlinePhase);
  372. InliningHeuristics heuristics(GetWorkItem()->GetJITTimeInfo(), this->IsLoopBody());
  373. Inline inliner(this, heuristics);
  374. inliner.Optimize();
  375. END_CODEGEN_PHASE(this, Js::InlinePhase);
  376. ThrowIfScriptClosed();
  377. // FlowGraph
  378. {
  379. // Scope for FlowGraph arena
  380. NoRecoverMemoryJitArenaAllocator fgAlloc(_u("BE-FlowGraph"), m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  381. BEGIN_CODEGEN_PHASE(this, Js::FGBuildPhase);
  382. this->m_fg = FlowGraph::New(this, &fgAlloc);
  383. this->m_fg->Build();
  384. END_CODEGEN_PHASE(this, Js::FGBuildPhase);
  385. // Global Optimization and Type Specialization
  386. BEGIN_CODEGEN_PHASE(this, Js::GlobOptPhase);
  387. GlobOpt globOpt(this);
  388. globOpt.Optimize();
  389. END_CODEGEN_PHASE(this, Js::GlobOptPhase);
  390. // Delete flowGraph now
  391. this->m_fg->Destroy();
  392. this->m_fg = nullptr;
  393. }
  394. #ifdef IR_VIEWER
  395. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::GlobOptPhase);
  396. #endif /* IR_VIEWER */
  397. ThrowIfScriptClosed();
  398. // Lowering
  399. Lowerer lowerer(this);
  400. BEGIN_CODEGEN_PHASE(this, Js::LowererPhase);
  401. lowerer.Lower();
  402. END_CODEGEN_PHASE(this, Js::LowererPhase);
  403. #ifdef IR_VIEWER
  404. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::LowererPhase);
  405. #endif /* IR_VIEWER */
  406. // Encode constants
  407. Security security(this);
  408. BEGIN_CODEGEN_PHASE(this, Js::EncodeConstantsPhase)
  409. security.EncodeLargeConstants();
  410. END_CODEGEN_PHASE(this, Js::EncodeConstantsPhase);
  411. if (GetJITFunctionBody()->DoInterruptProbe())
  412. {
  413. BEGIN_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  414. lowerer.DoInterruptProbes();
  415. END_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  416. }
  417. // Register Allocation
  418. BEGIN_CODEGEN_PHASE(this, Js::RegAllocPhase);
  419. LinearScan linearScan(this);
  420. linearScan.RegAlloc();
  421. END_CODEGEN_PHASE(this, Js::RegAllocPhase);
  422. #ifdef IR_VIEWER
  423. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::RegAllocPhase);
  424. #endif /* IR_VIEWER */
  425. ThrowIfScriptClosed();
  426. // Peephole optimizations
  427. BEGIN_CODEGEN_PHASE(this, Js::PeepsPhase);
  428. Peeps peeps(this);
  429. peeps.PeepFunc();
  430. END_CODEGEN_PHASE(this, Js::PeepsPhase);
  431. // Layout
  432. BEGIN_CODEGEN_PHASE(this, Js::LayoutPhase);
  433. SimpleLayout layout(this);
  434. layout.Layout();
  435. END_CODEGEN_PHASE(this, Js::LayoutPhase);
  436. if (this->HasTry() && this->hasBailoutInEHRegion)
  437. {
  438. BEGIN_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  439. lowerer.EHBailoutPatchUp();
  440. END_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  441. }
  442. // Insert NOPs (moving this before prolog/epilog for AMD64 and possibly ARM).
  443. BEGIN_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  444. security.InsertNOPs();
  445. END_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  446. // Prolog/Epilog
  447. BEGIN_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  448. if (GetJITFunctionBody()->IsAsmJsMode())
  449. {
  450. lowerer.LowerPrologEpilogAsmJs();
  451. }
  452. else
  453. {
  454. lowerer.LowerPrologEpilog();
  455. }
  456. END_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  457. BEGIN_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  458. lowerer.FinalLower();
  459. END_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  460. // Encoder
  461. BEGIN_CODEGEN_PHASE(this, Js::EncoderPhase);
  462. Encoder encoder(this);
  463. encoder.Encode();
  464. END_CODEGEN_PHASE_NO_DUMP(this, Js::EncoderPhase);
  465. #ifdef IR_VIEWER
  466. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::EncoderPhase);
  467. #endif /* IR_VIEWER */
  468. }
  469. #if DBG_DUMP
  470. if (Js::Configuration::Global.flags.IsEnabled(Js::AsmDumpModeFlag))
  471. {
  472. FILE * oldFile = 0;
  473. FILE * asmFile = GetScriptContext()->GetNativeCodeGenerator()->asmFile;
  474. if (asmFile)
  475. {
  476. oldFile = Output::SetFile(asmFile);
  477. }
  478. this->Dump(IRDumpFlags_AsmDumpMode);
  479. Output::Flush();
  480. if (asmFile)
  481. {
  482. FILE *openedFile = Output::SetFile(oldFile);
  483. Assert(openedFile == asmFile);
  484. }
  485. }
  486. #endif
  487. if (this->IsOOPJIT())
  488. {
  489. BEGIN_CODEGEN_PHASE(this, Js::NativeCodeDataPhase);
  490. auto dataAllocator = this->GetNativeCodeDataAllocator();
  491. if (dataAllocator->allocCount > 0)
  492. {
  493. NativeCodeData::DataChunk *chunk = (NativeCodeData::DataChunk*)dataAllocator->chunkList;
  494. NativeCodeData::DataChunk *next1 = chunk;
  495. while (next1)
  496. {
  497. if (next1->fixupFunc)
  498. {
  499. next1->fixupFunc(next1->data, chunk);
  500. }
  501. #if DBG
  502. if (CONFIG_FLAG(OOPJITFixupValidate))
  503. {
  504. // Scan memory to see if there's missing pointer needs to be fixed up
  505. // This can hit false positive if some data field happens to have value
  506. // falls into the NativeCodeData memory range.
  507. NativeCodeData::DataChunk *next2 = chunk;
  508. while (next2)
  509. {
  510. for (unsigned int i = 0; i < next1->len / sizeof(void*); i++)
  511. {
  512. if (((void**)next1->data)[i] == (void*)next2->data)
  513. {
  514. NativeCodeData::VerifyExistFixupEntry((void*)next2->data, &((void**)next1->data)[i], next1->data);
  515. }
  516. }
  517. next2 = next2->next;
  518. }
  519. }
  520. #endif
  521. next1 = next1->next;
  522. }
  523. JITOutputIDL* jitOutputData = m_output.GetOutputData();
  524. size_t allocSize = offsetof(NativeDataFixupTable, fixupRecords) + sizeof(NativeDataFixupRecord)* (dataAllocator->allocCount);
  525. jitOutputData->nativeDataFixupTable = (NativeDataFixupTable*)midl_user_allocate(allocSize);
  526. if (!jitOutputData->nativeDataFixupTable)
  527. {
  528. Js::Throw::OutOfMemory();
  529. }
  530. __analysis_assume(jitOutputData->nativeDataFixupTable);
  531. jitOutputData->nativeDataFixupTable->count = dataAllocator->allocCount;
  532. jitOutputData->buffer = (NativeDataBuffer*)midl_user_allocate(offsetof(NativeDataBuffer, data) + dataAllocator->totalSize);
  533. if (!jitOutputData->buffer)
  534. {
  535. Js::Throw::OutOfMemory();
  536. }
  537. __analysis_assume(jitOutputData->buffer);
  538. jitOutputData->buffer->len = dataAllocator->totalSize;
  539. unsigned int len = 0;
  540. unsigned int count = 0;
  541. next1 = chunk;
  542. while (next1)
  543. {
  544. memcpy(jitOutputData->buffer->data + len, next1->data, next1->len);
  545. len += next1->len;
  546. jitOutputData->nativeDataFixupTable->fixupRecords[count].index = next1->allocIndex;
  547. jitOutputData->nativeDataFixupTable->fixupRecords[count].length = next1->len;
  548. jitOutputData->nativeDataFixupTable->fixupRecords[count].startOffset = next1->offset;
  549. jitOutputData->nativeDataFixupTable->fixupRecords[count].updateList = next1->fixupList;
  550. count++;
  551. next1 = next1->next;
  552. }
  553. #if DBG
  554. if (PHASE_TRACE1(Js::NativeCodeDataPhase))
  555. {
  556. Output::Print(_u("NativeCodeData Server Buffer: %p, len: %x, chunk head: %p\n"), jitOutputData->buffer->data, jitOutputData->buffer->len, chunk);
  557. }
  558. #endif
  559. }
  560. END_CODEGEN_PHASE(this, Js::NativeCodeDataPhase);
  561. }
  562. END_CODEGEN_PHASE(this, Js::BackEndPhase);
  563. }
  564. ///----------------------------------------------------------------------------
  565. /// Func::StackAllocate
  566. /// Allocate stack space of given size.
  567. ///----------------------------------------------------------------------------
  568. int32
  569. Func::StackAllocate(int size)
  570. {
  571. Assert(this->IsTopFunc());
  572. int32 offset;
  573. #ifdef MD_GROW_LOCALS_AREA_UP
  574. // Locals have positive offsets and are allocated from bottom to top.
  575. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  576. offset = m_localStackHeight;
  577. m_localStackHeight += size;
  578. #else
  579. // Locals have negative offsets and are allocated from top to bottom.
  580. m_localStackHeight += size;
  581. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  582. offset = -m_localStackHeight;
  583. #endif
  584. return offset;
  585. }
  586. ///----------------------------------------------------------------------------
  587. ///
  588. /// Func::StackAllocate
  589. ///
  590. /// Allocate stack space for this symbol.
  591. ///
  592. ///----------------------------------------------------------------------------
  593. int32
  594. Func::StackAllocate(StackSym *stackSym, int size)
  595. {
  596. Assert(size > 0);
  597. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym() || stackSym->IsAllocated())
  598. {
  599. return stackSym->m_offset;
  600. }
  601. Assert(stackSym->m_offset == 0);
  602. stackSym->m_allocated = true;
  603. stackSym->m_offset = StackAllocate(size);
  604. return stackSym->m_offset;
  605. }
  606. void
  607. Func::SetArgOffset(StackSym *stackSym, int32 offset)
  608. {
  609. AssertMsg(offset >= 0, "Why is the offset, negative?");
  610. stackSym->m_offset = offset;
  611. stackSym->m_allocated = true;
  612. }
  613. ///
  614. /// Ensures that local var slots are created, if the function has locals.
  615. /// Allocate stack space for locals used for debugging
  616. /// (for local non-temp vars we write-through memory so that locals inspection can make use of that.).
  617. // On stack, after local slots we allocate space for metadata (in particular, whether any the locals was changed in debugger).
  618. ///
  619. void
  620. Func::EnsureLocalVarSlots()
  621. {
  622. Assert(IsJitInDebugMode());
  623. if (!this->HasLocalVarSlotCreated())
  624. {
  625. uint32 localSlotCount = GetJITFunctionBody()->GetNonTempLocalVarCount();
  626. if (localSlotCount && m_localVarSlotsOffset == Js::Constants::InvalidOffset)
  627. {
  628. // Allocate the slots.
  629. int32 size = localSlotCount * GetDiagLocalSlotSize();
  630. m_localVarSlotsOffset = StackAllocate(size);
  631. m_hasLocalVarChangedOffset = StackAllocate(max(1, MachStackAlignment)); // Can't alloc less than StackAlignment bytes.
  632. Assert(m_workItem->Type() == JsFunctionType);
  633. m_output.SetVarSlotsOffset(AdjustOffsetValue(m_localVarSlotsOffset));
  634. m_output.SetVarChangedOffset(AdjustOffsetValue(m_hasLocalVarChangedOffset));
  635. }
  636. }
  637. }
  638. void Func::SetFirstArgOffset(IR::Instr* inlineeStart)
  639. {
  640. Assert(inlineeStart->m_func == this);
  641. Assert(!IsTopFunc());
  642. int32 lastOffset;
  643. IR::Instr* arg = inlineeStart->GetNextArg();
  644. const auto lastArgOutStackSym = arg->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  645. lastOffset = lastArgOutStackSym->m_offset;
  646. Assert(lastArgOutStackSym->m_isSingleDef);
  647. const auto secondLastArgOutOpnd = lastArgOutStackSym->m_instrDef->GetSrc2();
  648. if (secondLastArgOutOpnd->IsSymOpnd())
  649. {
  650. const auto secondLastOffset = secondLastArgOutOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_offset;
  651. if (secondLastOffset > lastOffset)
  652. {
  653. lastOffset = secondLastOffset;
  654. }
  655. }
  656. lastOffset += MachPtr;
  657. int32 firstActualStackOffset = lastOffset - ((this->actualCount + Js::Constants::InlineeMetaArgCount) * MachPtr);
  658. Assert((this->firstActualStackOffset == -1) || (this->firstActualStackOffset == firstActualStackOffset));
  659. this->firstActualStackOffset = firstActualStackOffset;
  660. }
  661. int32
  662. Func::GetLocalVarSlotOffset(int32 slotId)
  663. {
  664. this->EnsureLocalVarSlots();
  665. Assert(m_localVarSlotsOffset != Js::Constants::InvalidOffset);
  666. int32 slotOffset = slotId * GetDiagLocalSlotSize();
  667. return m_localVarSlotsOffset + slotOffset;
  668. }
  669. void Func::OnAddSym(Sym* sym)
  670. {
  671. Assert(sym);
  672. if (this->IsJitInDebugMode() && this->IsNonTempLocalVar(sym->m_id))
  673. {
  674. Assert(m_nonTempLocalVars);
  675. m_nonTempLocalVars->Set(sym->m_id);
  676. }
  677. }
  678. ///
  679. /// Returns offset of the flag (1 byte) whether any local was changed (in debugger).
  680. /// If the function does not have any locals, returns -1.
  681. ///
  682. int32
  683. Func::GetHasLocalVarChangedOffset()
  684. {
  685. this->EnsureLocalVarSlots();
  686. return m_hasLocalVarChangedOffset;
  687. }
  688. bool
  689. Func::IsJitInDebugMode() const
  690. {
  691. return m_workItem->IsJitInDebugMode();
  692. }
  693. bool
  694. Func::IsNonTempLocalVar(uint32 slotIndex)
  695. {
  696. return GetJITFunctionBody()->IsNonTempLocalVar(slotIndex);
  697. }
  698. int32
  699. Func::AdjustOffsetValue(int32 offset)
  700. {
  701. #ifdef MD_GROW_LOCALS_AREA_UP
  702. return -(offset + BailOutInfo::StackSymBias);
  703. #else
  704. // Stack offset are negative, includes the PUSH EBP and return address
  705. return offset - (2 * MachPtr);
  706. #endif
  707. }
  708. #ifdef MD_GROW_LOCALS_AREA_UP
  709. // Note: this is called during jit-compile when we finalize bail out record.
  710. void
  711. Func::AjustLocalVarSlotOffset()
  712. {
  713. if (GetJITFunctionBody()->GetNonTempLocalVarCount())
  714. {
  715. // Turn positive SP-relative base locals offset into negative frame-pointer-relative offset
  716. // This is changing value for restoring the locals when read due to locals inspection.
  717. int localsOffset = m_localVarSlotsOffset - (m_localStackHeight + m_ArgumentsOffset);
  718. int valueChangeOffset = m_hasLocalVarChangedOffset - (m_localStackHeight + m_ArgumentsOffset);
  719. m_output.SetVarSlotsOffset(localsOffset);
  720. m_output.SetVarChangedOffset(valueChangeOffset);
  721. }
  722. }
  723. #endif
  724. bool
  725. Func::DoGlobOptsForGeneratorFunc() const
  726. {
  727. // Disable GlobOpt optimizations for generators initially. Will visit and enable each one by one.
  728. return !GetJITFunctionBody()->IsCoroutine();
  729. }
  730. bool
  731. Func::DoSimpleJitDynamicProfile() const
  732. {
  733. return IsSimpleJit() && !PHASE_OFF(Js::SimpleJitDynamicProfilePhase, GetTopFunc()) && !CONFIG_FLAG(NewSimpleJit);
  734. }
  735. void
  736. Func::SetDoFastPaths()
  737. {
  738. // Make sure we only call this once!
  739. Assert(!this->hasCalledSetDoFastPaths);
  740. bool doFastPaths = false;
  741. if(!PHASE_OFF(Js::FastPathPhase, this) && (!IsSimpleJit() || CONFIG_FLAG(NewSimpleJit)))
  742. {
  743. doFastPaths = true;
  744. }
  745. this->m_doFastPaths = doFastPaths;
  746. #ifdef DBG
  747. this->hasCalledSetDoFastPaths = true;
  748. #endif
  749. }
  750. #if LOWER_SPLIT_INT64
  751. Int64RegPair Func::FindOrCreateInt64Pair(IR::Opnd* opnd)
  752. {
  753. AssertMsg(this->GetTopFunc()->currentPhases.Top() == Js::LowererPhase, "New Int64 sym map is only allowed during lower");
  754. Int64RegPair pair;
  755. IRType pairType = opnd->GetType();
  756. if (opnd->IsInt64())
  757. {
  758. pairType = IRType_IsSignedInt(pairType) ? TyInt32 : TyUint32;
  759. }
  760. if (opnd->IsIndirOpnd())
  761. {
  762. IR::IndirOpnd* indir = opnd->AsIndirOpnd();
  763. indir->SetType(pairType);
  764. pair.low = indir;
  765. pair.high = indir->Copy(this)->AsIndirOpnd();
  766. pair.high->AsIndirOpnd()->SetOffset(indir->GetOffset() + 4);
  767. return pair;
  768. }
  769. // Only indir opnd can have a type other than int64
  770. Assert(opnd->IsInt64());
  771. if (opnd->IsImmediateOpnd())
  772. {
  773. int64 value = opnd->GetImmediateValue(this);
  774. pair.low = IR::IntConstOpnd::New((int32)value, pairType, this);
  775. pair.high = IR::IntConstOpnd::New((int32)(value >> 32), pairType, this);
  776. return pair;
  777. }
  778. Int64SymPair symPair;
  779. if (!m_int64SymPairMap)
  780. {
  781. m_int64SymPairMap = Anew(m_alloc, Int64SymPairMap, m_alloc);
  782. }
  783. StackSym* stackSym = opnd->GetStackSym();
  784. AssertOrFailFastMsg(stackSym, "Invalid int64 operand type");
  785. SymID symId = stackSym->m_id;
  786. if (!m_int64SymPairMap->TryGetValue(symId, &symPair))
  787. {
  788. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym())
  789. {
  790. const bool isArg = stackSym->IsArgSlotSym();
  791. if (isArg)
  792. {
  793. Js::ArgSlot slotNumber = stackSym->GetArgSlotNum();
  794. symPair.low = StackSym::NewArgSlotSym(slotNumber, this, pairType);
  795. symPair.high = StackSym::NewArgSlotSym(slotNumber + 1, this, pairType);
  796. }
  797. else
  798. {
  799. Js::ArgSlot slotNumber = stackSym->GetParamSlotNum();
  800. symPair.low = StackSym::NewParamSlotSym(slotNumber, this, pairType);
  801. symPair.high = StackSym::NewParamSlotSym(slotNumber + 1, this, pairType);
  802. }
  803. symPair.low->m_allocated = true;
  804. symPair.low->m_offset = stackSym->m_offset;
  805. symPair.high->m_allocated = true;
  806. symPair.high->m_offset = stackSym->m_offset + 4;
  807. }
  808. else
  809. {
  810. symPair.low = StackSym::New(pairType, this);
  811. symPair.high = StackSym::New(pairType, this);
  812. }
  813. m_int64SymPairMap->Add(symId, symPair);
  814. }
  815. if (opnd->IsSymOpnd())
  816. {
  817. pair.low = IR::SymOpnd::New(symPair.low, opnd->AsSymOpnd()->m_offset, pairType, this);
  818. pair.high = IR::SymOpnd::New(symPair.high, opnd->AsSymOpnd()->m_offset, pairType, this);
  819. }
  820. else
  821. {
  822. pair.low = IR::RegOpnd::New(symPair.low, pairType, this);
  823. pair.high = IR::RegOpnd::New(symPair.high, pairType, this);
  824. }
  825. return pair;
  826. }
  827. void Func::Int64SplitExtendLoopLifetime(Loop* loop)
  828. {
  829. if (m_int64SymPairMap)
  830. {
  831. BVSparse<JitArenaAllocator> *liveOnBackEdgeSyms = loop->regAlloc.liveOnBackEdgeSyms;
  832. FOREACH_BITSET_IN_SPARSEBV(symId, liveOnBackEdgeSyms)
  833. {
  834. Int64SymPair pair;
  835. if (m_int64SymPairMap->TryGetValue(symId, &pair))
  836. {
  837. // If we have replaced any sym that was live on the back edge for 2 other syms
  838. // these 2 syms needs to be live on back edge as well.
  839. liveOnBackEdgeSyms->Set(pair.low->m_id);
  840. liveOnBackEdgeSyms->Set(pair.high->m_id);
  841. }
  842. } NEXT_BITSET_IN_SPARSEBV;
  843. }
  844. }
  845. #endif
  846. #ifdef _M_ARM
  847. RegNum
  848. Func::GetLocalsPointer() const
  849. {
  850. #ifdef DBG
  851. if (Js::Configuration::Global.flags.IsEnabled(Js::ForceLocalsPtrFlag))
  852. {
  853. return ALT_LOCALS_PTR;
  854. }
  855. #endif
  856. if (GetJITFunctionBody()->HasTry())
  857. {
  858. return ALT_LOCALS_PTR;
  859. }
  860. return RegSP;
  861. }
  862. #endif
  863. void Func::AddSlotArrayCheck(IR::SymOpnd *fieldOpnd)
  864. {
  865. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  866. {
  867. return;
  868. }
  869. Assert(IsTopFunc());
  870. if (this->slotArrayCheckTable == nullptr)
  871. {
  872. this->slotArrayCheckTable = SlotArrayCheckTable::New(m_alloc, 4);
  873. }
  874. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  875. uint32 slot = propertySym->m_propertyId;
  876. uint32 *pSlotId = this->slotArrayCheckTable->FindOrInsert(slot, propertySym->m_stackSym->m_id);
  877. if (pSlotId && (*pSlotId == (uint32)-1 || *pSlotId < slot))
  878. {
  879. *pSlotId = propertySym->m_propertyId;
  880. }
  881. }
  882. void Func::AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId)
  883. {
  884. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  885. {
  886. return;
  887. }
  888. Assert(IsTopFunc());
  889. if (this->frameDisplayCheckTable == nullptr)
  890. {
  891. this->frameDisplayCheckTable = FrameDisplayCheckTable::New(m_alloc, 4);
  892. }
  893. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  894. FrameDisplayCheckRecord **record = this->frameDisplayCheckTable->FindOrInsertNew(propertySym->m_stackSym->m_id);
  895. if (*record == nullptr)
  896. {
  897. *record = JitAnew(m_alloc, FrameDisplayCheckRecord);
  898. }
  899. uint32 frameDisplaySlot = propertySym->m_propertyId;
  900. if ((*record)->table == nullptr || (*record)->slotId < frameDisplaySlot)
  901. {
  902. (*record)->slotId = frameDisplaySlot;
  903. }
  904. if (slotId != (uint32)-1)
  905. {
  906. if ((*record)->table == nullptr)
  907. {
  908. (*record)->table = SlotArrayCheckTable::New(m_alloc, 4);
  909. }
  910. uint32 *pSlotId = (*record)->table->FindOrInsert(slotId, frameDisplaySlot);
  911. if (pSlotId && *pSlotId < slotId)
  912. {
  913. *pSlotId = slotId;
  914. }
  915. }
  916. }
  917. void Func::InitLocalClosureSyms()
  918. {
  919. Assert(this->m_localClosureSym == nullptr);
  920. // Allocate stack space for closure pointers. Do this only if we're jitting for stack closures, and
  921. // tell bailout that these are not byte code symbols so that we don't try to encode them in the bailout record,
  922. // as they don't have normal lifetimes.
  923. Js::RegSlot regSlot = GetJITFunctionBody()->GetLocalClosureReg();
  924. if (regSlot != Js::Constants::NoRegister)
  925. {
  926. this->m_localClosureSym =
  927. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  928. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  929. this);
  930. }
  931. regSlot = this->GetJITFunctionBody()->GetParamClosureReg();
  932. if (regSlot != Js::Constants::NoRegister)
  933. {
  934. Assert(this->GetParamClosureSym() == nullptr && !this->GetJITFunctionBody()->IsParamAndBodyScopeMerged());
  935. this->m_paramClosureSym =
  936. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  937. this->DoStackFrameDisplay() ? (Js::RegSlot) - 1 : regSlot,
  938. this);
  939. }
  940. regSlot = GetJITFunctionBody()->GetLocalFrameDisplayReg();
  941. if (regSlot != Js::Constants::NoRegister)
  942. {
  943. this->m_localFrameDisplaySym =
  944. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  945. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  946. this);
  947. }
  948. }
  949. bool
  950. Func::IsTrackCompoundedIntOverflowDisabled() const
  951. {
  952. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsTrackCompoundedIntOverflowDisabled()) || m_output.IsTrackCompoundedIntOverflowDisabled();
  953. }
  954. bool
  955. Func::IsArrayCheckHoistDisabled() const
  956. {
  957. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsArrayCheckHoistDisabled(IsLoopBody())) || m_output.IsArrayCheckHoistDisabled();
  958. }
  959. bool
  960. Func::IsStackArgOptDisabled() const
  961. {
  962. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsStackArgOptDisabled()) || m_output.IsStackArgOptDisabled();
  963. }
  964. bool
  965. Func::IsSwitchOptDisabled() const
  966. {
  967. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsSwitchOptDisabled()) || m_output.IsSwitchOptDisabled();
  968. }
  969. bool
  970. Func::IsAggressiveIntTypeSpecDisabled() const
  971. {
  972. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsAggressiveIntTypeSpecDisabled(IsLoopBody())) || m_output.IsAggressiveIntTypeSpecDisabled();
  973. }
  974. bool Func::CanAllocInPreReservedHeapPageSegment ()
  975. {
  976. #ifdef _CONTROL_FLOW_GUARD
  977. return PHASE_FORCE1(Js::PreReservedHeapAllocPhase) || (!PHASE_OFF1(Js::PreReservedHeapAllocPhase) &&
  978. !IsJitInDebugMode() && GetThreadContextInfo()->IsCFGEnabled()
  979. //&& !GetScriptContext()->IsScriptContextInDebugMode()
  980. #if _M_IX86
  981. && m_workItem->GetJitMode() == ExecutionMode::FullJit
  982. #if ENABLE_OOP_NATIVE_CODEGEN
  983. && (JITManager::GetJITManager()->IsJITServer()
  984. ? GetOOPCodeGenAllocators()->canCreatePreReservedSegment
  985. : GetInProcCodeGenAllocators()->canCreatePreReservedSegment)
  986. #else
  987. && GetInProcCodeGenAllocators()->canCreatePreReservedSegment
  988. #endif
  989. );
  990. #elif _M_X64
  991. && true);
  992. #else
  993. && false); //Not yet implemented for architectures other than x86 and amd64.
  994. #endif //_M_ARCH
  995. #else
  996. return false;
  997. #endif//_CONTROL_FLOW_GUARD
  998. }
  999. ///----------------------------------------------------------------------------
  1000. ///
  1001. /// Func::GetInstrCount
  1002. ///
  1003. /// Returns the number of instrs.
  1004. /// Note: It counts all instrs for now, including labels, etc.
  1005. ///
  1006. ///----------------------------------------------------------------------------
  1007. uint32
  1008. Func::GetInstrCount()
  1009. {
  1010. uint instrCount = 0;
  1011. FOREACH_INSTR_IN_FUNC(instr, this)
  1012. {
  1013. instrCount++;
  1014. }NEXT_INSTR_IN_FUNC;
  1015. return instrCount;
  1016. }
  1017. ///----------------------------------------------------------------------------
  1018. ///
  1019. /// Func::NumberInstrs
  1020. ///
  1021. /// Number each instruction in order of appearance in the function.
  1022. ///
  1023. ///----------------------------------------------------------------------------
  1024. void
  1025. Func::NumberInstrs()
  1026. {
  1027. #if DBG_DUMP
  1028. Assert(this->IsTopFunc());
  1029. Assert(!this->hasInstrNumber);
  1030. this->hasInstrNumber = true;
  1031. #endif
  1032. uint instrCount = 1;
  1033. FOREACH_INSTR_IN_FUNC(instr, this)
  1034. {
  1035. instr->SetNumber(instrCount++);
  1036. }
  1037. NEXT_INSTR_IN_FUNC;
  1038. }
  1039. ///----------------------------------------------------------------------------
  1040. ///
  1041. /// Func::IsInPhase
  1042. ///
  1043. /// Determines whether the function is currently in the provided phase
  1044. ///
  1045. ///----------------------------------------------------------------------------
  1046. #if DBG
  1047. bool
  1048. Func::IsInPhase(Js::Phase tag)
  1049. {
  1050. return this->GetTopFunc()->currentPhases.Contains(tag);
  1051. }
  1052. #endif
  1053. ///----------------------------------------------------------------------------
  1054. ///
  1055. /// Func::BeginPhase
  1056. ///
  1057. /// Takes care of the profiler
  1058. ///
  1059. ///----------------------------------------------------------------------------
  1060. void
  1061. Func::BeginPhase(Js::Phase tag)
  1062. {
  1063. #ifdef DBG
  1064. this->GetTopFunc()->currentPhases.Push(tag);
  1065. if (PHASE_DEBUGBREAK_ON_PHASE_BEGIN(tag, this))
  1066. {
  1067. __debugbreak();
  1068. }
  1069. #endif
  1070. #ifdef PROFILE_EXEC
  1071. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  1072. "Profiler tag is supplied but the profiler pointer is NULL");
  1073. if (this->m_codeGenProfiler)
  1074. {
  1075. this->m_codeGenProfiler->ProfileBegin(tag);
  1076. }
  1077. #endif
  1078. }
  1079. ///----------------------------------------------------------------------------
  1080. ///
  1081. /// Func::EndPhase
  1082. ///
  1083. /// Takes care of the profiler and dumper
  1084. ///
  1085. ///----------------------------------------------------------------------------
  1086. void
  1087. Func::EndProfiler(Js::Phase tag)
  1088. {
  1089. #ifdef DBG
  1090. Assert(this->GetTopFunc()->currentPhases.Count() > 0);
  1091. Js::Phase popped = this->GetTopFunc()->currentPhases.Pop();
  1092. Assert(tag == popped);
  1093. #endif
  1094. #ifdef PROFILE_EXEC
  1095. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  1096. "Profiler tag is supplied but the profiler pointer is NULL");
  1097. if (this->m_codeGenProfiler)
  1098. {
  1099. this->m_codeGenProfiler->ProfileEnd(tag);
  1100. }
  1101. #endif
  1102. }
  1103. void
  1104. Func::EndPhase(Js::Phase tag, bool dump)
  1105. {
  1106. this->EndProfiler(tag);
  1107. #if DBG_DUMP
  1108. if(dump && (PHASE_DUMP(tag, this)
  1109. || PHASE_DUMP(Js::BackEndPhase, this)))
  1110. {
  1111. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1112. if (IsLoopBody())
  1113. {
  1114. Output::Print(_u("************ IR after %s (%S) Loop %d ************\n"),
  1115. Js::PhaseNames[tag],
  1116. ExecutionModeName(m_workItem->GetJitMode()),
  1117. m_workItem->GetLoopNumber());
  1118. }
  1119. else
  1120. {
  1121. Output::Print(_u("************ IR after %s (%S) ************\n"),
  1122. Js::PhaseNames[tag],
  1123. ExecutionModeName(m_workItem->GetJitMode()));
  1124. }
  1125. this->Dump(Js::Configuration::Global.flags.AsmDiff? IRDumpFlags_AsmDumpMode : IRDumpFlags_None);
  1126. }
  1127. #endif
  1128. #if DBG
  1129. if (tag == Js::LowererPhase)
  1130. {
  1131. Assert(!this->isPostLower);
  1132. this->isPostLower = true;
  1133. }
  1134. else if (tag == Js::RegAllocPhase)
  1135. {
  1136. Assert(!this->isPostRegAlloc);
  1137. this->isPostRegAlloc = true;
  1138. }
  1139. else if (tag == Js::PeepsPhase)
  1140. {
  1141. Assert(this->isPostLower && !this->isPostLayout);
  1142. this->isPostPeeps = true;
  1143. }
  1144. else if (tag == Js::LayoutPhase)
  1145. {
  1146. Assert(this->isPostPeeps && !this->isPostLayout);
  1147. this->isPostLayout = true;
  1148. }
  1149. else if (tag == Js::FinalLowerPhase)
  1150. {
  1151. Assert(this->isPostLayout && !this->isPostFinalLower);
  1152. this->isPostFinalLower = true;
  1153. }
  1154. if (this->isPostLower)
  1155. {
  1156. #ifndef _M_ARM // Need to verify ARM is clean.
  1157. DbCheckPostLower dbCheck(this);
  1158. dbCheck.Check();
  1159. #endif
  1160. }
  1161. this->m_alloc->MergeDelayFreeList();
  1162. #endif
  1163. }
  1164. Func const *
  1165. Func::GetTopFunc() const
  1166. {
  1167. Func const * func = this;
  1168. while (!func->IsTopFunc())
  1169. {
  1170. func = func->parentFunc;
  1171. }
  1172. return func;
  1173. }
  1174. Func *
  1175. Func::GetTopFunc()
  1176. {
  1177. Func * func = this;
  1178. while (!func->IsTopFunc())
  1179. {
  1180. func = func->parentFunc;
  1181. }
  1182. return func;
  1183. }
  1184. StackSym *
  1185. Func::EnsureLoopParamSym()
  1186. {
  1187. if (this->m_loopParamSym == nullptr)
  1188. {
  1189. this->m_loopParamSym = StackSym::New(TyMachPtr, this);
  1190. }
  1191. return this->m_loopParamSym;
  1192. }
  1193. void
  1194. Func::UpdateMaxInlineeArgOutCount(uint inlineeArgOutCount)
  1195. {
  1196. if (maxInlineeArgOutCount < inlineeArgOutCount)
  1197. {
  1198. maxInlineeArgOutCount = inlineeArgOutCount;
  1199. }
  1200. }
  1201. void
  1202. Func::BeginClone(Lowerer * lowerer, JitArenaAllocator *alloc)
  1203. {
  1204. Assert(this->IsTopFunc());
  1205. AssertMsg(m_cloner == nullptr, "Starting new clone while one is in progress");
  1206. m_cloner = JitAnew(alloc, Cloner, lowerer, alloc);
  1207. if (m_cloneMap == nullptr)
  1208. {
  1209. m_cloneMap = JitAnew(alloc, InstrMap, alloc, 7);
  1210. }
  1211. }
  1212. void
  1213. Func::EndClone()
  1214. {
  1215. Assert(this->IsTopFunc());
  1216. if (m_cloner)
  1217. {
  1218. m_cloner->Finish();
  1219. JitAdelete(m_cloner->alloc, m_cloner);
  1220. m_cloner = nullptr;
  1221. }
  1222. }
  1223. IR::SymOpnd *
  1224. Func::GetInlineeOpndAtOffset(int32 offset)
  1225. {
  1226. Assert(IsInlinee());
  1227. StackSym *stackSym = CreateInlineeStackSym();
  1228. this->SetArgOffset(stackSym, stackSym->m_offset + offset);
  1229. Assert(stackSym->m_offset >= 0);
  1230. return IR::SymOpnd::New(stackSym, 0, TyMachReg, this);
  1231. }
  1232. StackSym *
  1233. Func::CreateInlineeStackSym()
  1234. {
  1235. // Make sure this is an inlinee and that GlobOpt has initialized the offset
  1236. // in the inlinee's frame.
  1237. Assert(IsInlinee());
  1238. Assert(m_inlineeFrameStartSym->m_offset != -1);
  1239. StackSym *stackSym = m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1240. stackSym->m_isInlinedArgSlot = true;
  1241. stackSym->m_offset = m_inlineeFrameStartSym->m_offset;
  1242. stackSym->m_allocated = true;
  1243. return stackSym;
  1244. }
  1245. uint16
  1246. Func::GetArgUsedForBranch() const
  1247. {
  1248. // this value can change while JITing, so or these together
  1249. return GetJITFunctionBody()->GetArgUsedForBranch() | GetJITOutput()->GetArgUsedForBranch();
  1250. }
  1251. intptr_t
  1252. Func::GetJittedLoopIterationsSinceLastBailoutAddress() const
  1253. {
  1254. Assert(this->m_workItem->Type() == JsLoopBodyWorkItemType);
  1255. return m_workItem->GetJittedLoopIterationsSinceLastBailoutAddr();
  1256. }
  1257. intptr_t
  1258. Func::GetWeakFuncRef() const
  1259. {
  1260. // TODO: OOP JIT figure out if this can be null
  1261. return m_workItem->GetJITTimeInfo()->GetWeakFuncRef();
  1262. }
  1263. intptr_t
  1264. Func::GetRuntimeInlineCache(const uint index) const
  1265. {
  1266. if(m_runtimeInfo != nullptr && m_runtimeInfo->HasClonedInlineCaches())
  1267. {
  1268. intptr_t inlineCache = m_runtimeInfo->GetClonedInlineCache(index);
  1269. if(inlineCache)
  1270. {
  1271. return inlineCache;
  1272. }
  1273. }
  1274. return GetJITFunctionBody()->GetInlineCache(index);
  1275. }
  1276. JITTimePolymorphicInlineCache *
  1277. Func::GetRuntimePolymorphicInlineCache(const uint index) const
  1278. {
  1279. if (this->m_polymorphicInlineCacheInfo && this->m_polymorphicInlineCacheInfo->HasInlineCaches())
  1280. {
  1281. return this->m_polymorphicInlineCacheInfo->GetInlineCache(index);
  1282. }
  1283. return nullptr;
  1284. }
  1285. byte
  1286. Func::GetPolyCacheUtilToInitialize(const uint index) const
  1287. {
  1288. return this->GetRuntimePolymorphicInlineCache(index) ? this->GetPolyCacheUtil(index) : PolymorphicInlineCacheUtilizationMinValue;
  1289. }
  1290. byte
  1291. Func::GetPolyCacheUtil(const uint index) const
  1292. {
  1293. return this->m_polymorphicInlineCacheInfo->GetUtil(index);
  1294. }
  1295. ObjTypeSpecFldInfo*
  1296. Func::GetObjTypeSpecFldInfo(const uint index) const
  1297. {
  1298. if (GetJITFunctionBody()->GetInlineCacheCount() == 0)
  1299. {
  1300. Assert(UNREACHED);
  1301. return nullptr;
  1302. }
  1303. return GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(index);
  1304. }
  1305. ObjTypeSpecFldInfo*
  1306. Func::GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const
  1307. {
  1308. Assert(propertyInfoId < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  1309. return GetTopFunc()->m_globalObjTypeSpecFldInfoArray[propertyInfoId];
  1310. }
  1311. void
  1312. Func::EnsurePinnedTypeRefs()
  1313. {
  1314. if (this->pinnedTypeRefs == nullptr)
  1315. {
  1316. this->pinnedTypeRefs = JitAnew(this->m_alloc, TypeRefSet, this->m_alloc);
  1317. }
  1318. }
  1319. void
  1320. Func::PinTypeRef(void* typeRef)
  1321. {
  1322. EnsurePinnedTypeRefs();
  1323. this->pinnedTypeRefs->AddNew(typeRef);
  1324. }
  1325. void
  1326. Func::EnsureSingleTypeGuards()
  1327. {
  1328. if (this->singleTypeGuards == nullptr)
  1329. {
  1330. this->singleTypeGuards = JitAnew(this->m_alloc, TypePropertyGuardDictionary, this->m_alloc);
  1331. }
  1332. }
  1333. Js::JitTypePropertyGuard*
  1334. Func::GetOrCreateSingleTypeGuard(intptr_t typeAddr)
  1335. {
  1336. EnsureSingleTypeGuards();
  1337. Js::JitTypePropertyGuard* guard = nullptr;
  1338. if (!this->singleTypeGuards->TryGetValue(typeAddr, &guard))
  1339. {
  1340. // Property guards are allocated by NativeCodeData::Allocator so that their lifetime extends as long as the EntryPointInfo is alive.
  1341. guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitTypePropertyGuard, typeAddr, this->indexedPropertyGuardCount++);
  1342. this->singleTypeGuards->Add(typeAddr, guard);
  1343. }
  1344. else
  1345. {
  1346. Assert(guard->GetTypeAddr() == typeAddr);
  1347. }
  1348. return guard;
  1349. }
  1350. void
  1351. Func::EnsureEquivalentTypeGuards()
  1352. {
  1353. if (this->equivalentTypeGuards == nullptr)
  1354. {
  1355. this->equivalentTypeGuards = JitAnew(this->m_alloc, EquivalentTypeGuardList, this->m_alloc);
  1356. }
  1357. }
  1358. Js::JitEquivalentTypeGuard*
  1359. Func::CreateEquivalentTypeGuard(JITTypeHolder type, uint32 objTypeSpecFldId)
  1360. {
  1361. EnsureEquivalentTypeGuards();
  1362. Js::JitEquivalentTypeGuard* guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitEquivalentTypeGuard, type->GetAddr(), this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1363. // If we want to hard code the address of the cache, we will need to go back to allocating it from the native code data allocator.
  1364. // We would then need to maintain consistency (double write) to both the recycler allocated cache and the one on the heap.
  1365. Js::EquivalentTypeCache* cache = nullptr;
  1366. if (this->IsOOPJIT())
  1367. {
  1368. cache = JitAnewZ(this->m_alloc, Js::EquivalentTypeCache);
  1369. }
  1370. else
  1371. {
  1372. cache = NativeCodeDataNewZNoFixup(GetTransferDataAllocator(), Js::EquivalentTypeCache);
  1373. }
  1374. guard->SetCache(cache);
  1375. // Give the cache a back-pointer to the guard so that the guard can be cleared at runtime if necessary.
  1376. cache->SetGuard(guard);
  1377. this->equivalentTypeGuards->Prepend(guard);
  1378. return guard;
  1379. }
  1380. void
  1381. Func::EnsurePropertyGuardsByPropertyId()
  1382. {
  1383. if (this->propertyGuardsByPropertyId == nullptr)
  1384. {
  1385. this->propertyGuardsByPropertyId = JitAnew(this->m_alloc, PropertyGuardByPropertyIdMap, this->m_alloc);
  1386. }
  1387. }
  1388. void
  1389. Func::EnsureCtorCachesByPropertyId()
  1390. {
  1391. if (this->ctorCachesByPropertyId == nullptr)
  1392. {
  1393. this->ctorCachesByPropertyId = JitAnew(this->m_alloc, CtorCachesByPropertyIdMap, this->m_alloc);
  1394. }
  1395. }
  1396. void
  1397. Func::LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard)
  1398. {
  1399. Assert(guard != nullptr);
  1400. Assert(guard->GetValue() != NULL);
  1401. Assert(this->propertyGuardsByPropertyId != nullptr);
  1402. IndexedPropertyGuardSet* set;
  1403. if (!this->propertyGuardsByPropertyId->TryGetValue(propertyId, &set))
  1404. {
  1405. set = JitAnew(this->m_alloc, IndexedPropertyGuardSet, this->m_alloc);
  1406. this->propertyGuardsByPropertyId->Add(propertyId, set);
  1407. }
  1408. set->Item(guard);
  1409. }
  1410. void
  1411. Func::LinkCtorCacheToPropertyId(Js::PropertyId propertyId, JITTimeConstructorCache* cache)
  1412. {
  1413. Assert(cache != nullptr);
  1414. Assert(this->ctorCachesByPropertyId != nullptr);
  1415. CtorCacheSet* set;
  1416. if (!this->ctorCachesByPropertyId->TryGetValue(propertyId, &set))
  1417. {
  1418. set = JitAnew(this->m_alloc, CtorCacheSet, this->m_alloc);
  1419. this->ctorCachesByPropertyId->Add(propertyId, set);
  1420. }
  1421. set->Item(cache->GetRuntimeCacheAddr());
  1422. }
  1423. JITTimeConstructorCache* Func::GetConstructorCache(const Js::ProfileId profiledCallSiteId)
  1424. {
  1425. Assert(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1426. Assert(this->constructorCaches != nullptr);
  1427. return this->constructorCaches[profiledCallSiteId];
  1428. }
  1429. void Func::SetConstructorCache(const Js::ProfileId profiledCallSiteId, JITTimeConstructorCache* constructorCache)
  1430. {
  1431. Assert(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1432. Assert(constructorCache != nullptr);
  1433. Assert(this->constructorCaches != nullptr);
  1434. Assert(this->constructorCaches[profiledCallSiteId] == nullptr);
  1435. this->constructorCacheCount++;
  1436. this->constructorCaches[profiledCallSiteId] = constructorCache;
  1437. }
  1438. void Func::EnsurePropertiesWrittenTo()
  1439. {
  1440. if (this->propertiesWrittenTo == nullptr)
  1441. {
  1442. this->propertiesWrittenTo = JitAnew(this->m_alloc, PropertyIdSet, this->m_alloc);
  1443. }
  1444. }
  1445. void Func::EnsureCallSiteToArgumentsOffsetFixupMap()
  1446. {
  1447. if (this->callSiteToArgumentsOffsetFixupMap == nullptr)
  1448. {
  1449. this->callSiteToArgumentsOffsetFixupMap = JitAnew(this->m_alloc, CallSiteToArgumentsOffsetFixupMap, this->m_alloc);
  1450. }
  1451. }
  1452. IR::LabelInstr *
  1453. Func::GetFuncStartLabel()
  1454. {
  1455. return m_funcStartLabel;
  1456. }
  1457. IR::LabelInstr *
  1458. Func::EnsureFuncStartLabel()
  1459. {
  1460. if(m_funcStartLabel == nullptr)
  1461. {
  1462. m_funcStartLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1463. }
  1464. return m_funcStartLabel;
  1465. }
  1466. IR::LabelInstr *
  1467. Func::GetFuncEndLabel()
  1468. {
  1469. return m_funcEndLabel;
  1470. }
  1471. IR::LabelInstr *
  1472. Func::EnsureFuncEndLabel()
  1473. {
  1474. if(m_funcEndLabel == nullptr)
  1475. {
  1476. m_funcEndLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1477. }
  1478. return m_funcEndLabel;
  1479. }
  1480. void
  1481. Func::EnsureStackArgWithFormalsTracker()
  1482. {
  1483. if (stackArgWithFormalsTracker == nullptr)
  1484. {
  1485. stackArgWithFormalsTracker = JitAnew(m_alloc, StackArgWithFormalsTracker, m_alloc);
  1486. }
  1487. }
  1488. BOOL
  1489. Func::IsFormalsArraySym(SymID symId)
  1490. {
  1491. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsArraySyms() == nullptr)
  1492. {
  1493. return false;
  1494. }
  1495. return stackArgWithFormalsTracker->GetFormalsArraySyms()->Test(symId);
  1496. }
  1497. void
  1498. Func::TrackFormalsArraySym(SymID symId)
  1499. {
  1500. EnsureStackArgWithFormalsTracker();
  1501. stackArgWithFormalsTracker->SetFormalsArraySyms(symId);
  1502. }
  1503. void
  1504. Func::TrackStackSymForFormalIndex(Js::ArgSlot formalsIndex, StackSym * sym)
  1505. {
  1506. EnsureStackArgWithFormalsTracker();
  1507. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1508. stackArgWithFormalsTracker->SetStackSymInFormalsIndexMap(sym, formalsIndex, formalsCount);
  1509. }
  1510. StackSym *
  1511. Func::GetStackSymForFormal(Js::ArgSlot formalsIndex)
  1512. {
  1513. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1514. {
  1515. return nullptr;
  1516. }
  1517. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1518. StackSym ** formalsIndexToStackSymMap = stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap();
  1519. AssertMsg(formalsIndex < formalsCount, "OutOfRange ? ");
  1520. return formalsIndexToStackSymMap[formalsIndex];
  1521. }
  1522. bool
  1523. Func::HasStackSymForFormal(Js::ArgSlot formalsIndex)
  1524. {
  1525. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1526. {
  1527. return false;
  1528. }
  1529. return GetStackSymForFormal(formalsIndex) != nullptr;
  1530. }
  1531. void
  1532. Func::SetScopeObjSym(StackSym * sym)
  1533. {
  1534. EnsureStackArgWithFormalsTracker();
  1535. stackArgWithFormalsTracker->SetScopeObjSym(sym);
  1536. }
  1537. StackSym *
  1538. Func::GetNativeCodeDataSym() const
  1539. {
  1540. Assert(IsOOPJIT());
  1541. return m_nativeCodeDataSym;
  1542. }
  1543. void
  1544. Func::SetNativeCodeDataSym(StackSym * opnd)
  1545. {
  1546. Assert(IsOOPJIT());
  1547. m_nativeCodeDataSym = opnd;
  1548. }
  1549. StackSym*
  1550. Func::GetScopeObjSym()
  1551. {
  1552. if (stackArgWithFormalsTracker == nullptr)
  1553. {
  1554. return nullptr;
  1555. }
  1556. return stackArgWithFormalsTracker->GetScopeObjSym();
  1557. }
  1558. BVSparse<JitArenaAllocator> *
  1559. StackArgWithFormalsTracker::GetFormalsArraySyms()
  1560. {
  1561. return formalsArraySyms;
  1562. }
  1563. void
  1564. StackArgWithFormalsTracker::SetFormalsArraySyms(SymID symId)
  1565. {
  1566. if (formalsArraySyms == nullptr)
  1567. {
  1568. formalsArraySyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1569. }
  1570. formalsArraySyms->Set(symId);
  1571. }
  1572. StackSym **
  1573. StackArgWithFormalsTracker::GetFormalsIndexToStackSymMap()
  1574. {
  1575. return formalsIndexToStackSymMap;
  1576. }
  1577. void
  1578. StackArgWithFormalsTracker::SetStackSymInFormalsIndexMap(StackSym * sym, Js::ArgSlot formalsIndex, Js::ArgSlot formalsCount)
  1579. {
  1580. if(formalsIndexToStackSymMap == nullptr)
  1581. {
  1582. formalsIndexToStackSymMap = JitAnewArrayZ(alloc, StackSym*, formalsCount);
  1583. }
  1584. AssertMsg(formalsIndex < formalsCount, "Out of range ?");
  1585. formalsIndexToStackSymMap[formalsIndex] = sym;
  1586. }
  1587. void
  1588. StackArgWithFormalsTracker::SetScopeObjSym(StackSym * sym)
  1589. {
  1590. m_scopeObjSym = sym;
  1591. }
  1592. StackSym *
  1593. StackArgWithFormalsTracker::GetScopeObjSym()
  1594. {
  1595. return m_scopeObjSym;
  1596. }
  1597. void
  1598. Cloner::AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone)
  1599. {
  1600. if (!this->instrFirst)
  1601. {
  1602. this->instrFirst = instrClone;
  1603. }
  1604. this->instrLast = instrClone;
  1605. }
  1606. void
  1607. Cloner::Finish()
  1608. {
  1609. this->RetargetClonedBranches();
  1610. if (this->lowerer)
  1611. {
  1612. lowerer->LowerRange(this->instrFirst, this->instrLast, false, false);
  1613. }
  1614. }
  1615. void
  1616. Cloner::RetargetClonedBranches()
  1617. {
  1618. if (!this->fRetargetClonedBranch)
  1619. {
  1620. return;
  1621. }
  1622. FOREACH_INSTR_IN_RANGE(instr, this->instrFirst, this->instrLast)
  1623. {
  1624. if (instr->IsBranchInstr())
  1625. {
  1626. instr->AsBranchInstr()->RetargetClonedBranch();
  1627. }
  1628. }
  1629. NEXT_INSTR_IN_RANGE;
  1630. }
  1631. void Func::ThrowIfScriptClosed()
  1632. {
  1633. if (GetScriptContextInfo()->IsClosed())
  1634. {
  1635. // Should not be jitting something in the foreground when the script context is actually closed
  1636. Assert(IsBackgroundJIT() || !GetScriptContext()->IsActuallyClosed());
  1637. throw Js::OperationAbortedException();
  1638. }
  1639. }
  1640. IR::IndirOpnd * Func::GetConstantAddressIndirOpnd(intptr_t address, IR::Opnd * largeConstOpnd, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode)
  1641. {
  1642. Assert(this->GetTopFunc() == this);
  1643. if (!canHoistConstantAddressLoad)
  1644. {
  1645. // We can't hoist constant address load after lower, as we can't mark the sym as
  1646. // live on back edge
  1647. return nullptr;
  1648. }
  1649. int offset = 0;
  1650. IR::RegOpnd ** foundRegOpnd = this->constantAddressRegOpnd.Find([address, &offset](IR::RegOpnd * regOpnd)
  1651. {
  1652. Assert(regOpnd->m_sym->IsSingleDef());
  1653. Assert(regOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd() || regOpnd->m_sym->m_instrDef->GetSrc1()->IsIntConstOpnd());
  1654. void * curr = regOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd() ?
  1655. regOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address :
  1656. (void *)regOpnd->m_sym->m_instrDef->GetSrc1()->AsIntConstOpnd()->GetValue();
  1657. ptrdiff_t diff = (uintptr_t)address - (uintptr_t)curr;
  1658. if (!Math::FitsInDWord(diff))
  1659. {
  1660. return false;
  1661. }
  1662. offset = (int)diff;
  1663. return true;
  1664. });
  1665. IR::RegOpnd * addressRegOpnd;
  1666. if (foundRegOpnd != nullptr)
  1667. {
  1668. addressRegOpnd = *foundRegOpnd;
  1669. }
  1670. else
  1671. {
  1672. Assert(offset == 0);
  1673. addressRegOpnd = IR::RegOpnd::New(TyMachPtr, this);
  1674. IR::Instr *const newInstr =
  1675. IR::Instr::New(
  1676. loadOpCode,
  1677. addressRegOpnd,
  1678. largeConstOpnd,
  1679. this);
  1680. this->constantAddressRegOpnd.Prepend(addressRegOpnd);
  1681. IR::Instr * insertBeforeInstr = this->lastConstantAddressRegLoadInstr;
  1682. if (insertBeforeInstr == nullptr)
  1683. {
  1684. insertBeforeInstr = this->GetFunctionEntryInsertionPoint();
  1685. this->lastConstantAddressRegLoadInstr = newInstr;
  1686. }
  1687. insertBeforeInstr->InsertBefore(newInstr);
  1688. }
  1689. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(addressRegOpnd, offset, type, this, true);
  1690. #if DBG_DUMP
  1691. // TODO: michhol make intptr_t
  1692. indirOpnd->SetAddrKind(kind, (void*)address);
  1693. #endif
  1694. return indirOpnd;
  1695. }
  1696. void Func::MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv)
  1697. {
  1698. Assert(this->GetTopFunc() == this);
  1699. this->constantAddressRegOpnd.Iterate([bv](IR::RegOpnd * regOpnd)
  1700. {
  1701. bv->Set(regOpnd->m_sym->m_id);
  1702. });
  1703. }
  1704. IR::Instr *
  1705. Func::GetFunctionEntryInsertionPoint()
  1706. {
  1707. Assert(this->GetTopFunc() == this);
  1708. IR::Instr * insertInsert = this->lastConstantAddressRegLoadInstr;
  1709. if (insertInsert != nullptr)
  1710. {
  1711. return insertInsert->m_next;
  1712. }
  1713. insertInsert = this->m_headInstr;
  1714. if (this->HasTry())
  1715. {
  1716. // Insert it inside the root region
  1717. insertInsert = insertInsert->m_next;
  1718. Assert(insertInsert->IsLabelInstr() && insertInsert->AsLabelInstr()->GetRegion()->GetType() == RegionTypeRoot);
  1719. }
  1720. return insertInsert->m_next;
  1721. }
  1722. Js::Var
  1723. Func::AllocateNumber(double value)
  1724. {
  1725. Js::Var number = nullptr;
  1726. #if FLOATVAR
  1727. number = Js::JavascriptNumber::NewCodeGenInstance((double)value, nullptr);
  1728. #else
  1729. if (!IsOOPJIT()) // in-proc jit
  1730. {
  1731. number = Js::JavascriptNumber::NewCodeGenInstance(GetNumberAllocator(), (double)value, GetScriptContext());
  1732. }
  1733. else // OOP JIT
  1734. {
  1735. number = GetXProcNumberAllocator()->AllocateNumber(this, value);
  1736. }
  1737. #endif
  1738. return number;
  1739. }
  1740. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1741. void
  1742. Func::DumpFullFunctionName()
  1743. {
  1744. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1745. Output::Print(_u("Function %s (%s)"), GetJITFunctionBody()->GetDisplayName(), GetDebugNumberSet(debugStringBuffer));
  1746. }
  1747. #endif
  1748. void
  1749. Func::UpdateForInLoopMaxDepth(uint forInLoopMaxDepth)
  1750. {
  1751. Assert(this->IsTopFunc());
  1752. this->m_forInLoopMaxDepth = max(this->m_forInLoopMaxDepth, forInLoopMaxDepth);
  1753. }
  1754. int
  1755. Func::GetForInEnumeratorArrayOffset() const
  1756. {
  1757. Func const* topFunc = this->GetTopFunc();
  1758. Assert(this->m_forInLoopBaseDepth + this->GetJITFunctionBody()->GetForInLoopDepth() <= topFunc->m_forInLoopMaxDepth);
  1759. return topFunc->m_forInEnumeratorArrayOffset
  1760. + this->m_forInLoopBaseDepth * sizeof(Js::ForInObjectEnumerator);
  1761. }
  1762. #if DBG_DUMP
  1763. ///----------------------------------------------------------------------------
  1764. ///
  1765. /// Func::DumpHeader
  1766. ///
  1767. ///----------------------------------------------------------------------------
  1768. void
  1769. Func::DumpHeader()
  1770. {
  1771. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1772. DumpFullFunctionName();
  1773. Output::SkipToColumn(50);
  1774. Output::Print(_u("Instr Count:%d"), GetInstrCount());
  1775. if(m_codeSize > 0)
  1776. {
  1777. Output::Print(_u("\t\tSize:%d\n\n"), m_codeSize);
  1778. }
  1779. else
  1780. {
  1781. Output::Print(_u("\n\n"));
  1782. }
  1783. }
  1784. ///----------------------------------------------------------------------------
  1785. ///
  1786. /// Func::Dump
  1787. ///
  1788. ///----------------------------------------------------------------------------
  1789. void
  1790. Func::Dump(IRDumpFlags flags)
  1791. {
  1792. this->DumpHeader();
  1793. FOREACH_INSTR_IN_FUNC(instr, this)
  1794. {
  1795. instr->DumpGlobOptInstrString();
  1796. instr->Dump(flags);
  1797. }NEXT_INSTR_IN_FUNC;
  1798. Output::Flush();
  1799. }
  1800. void
  1801. Func::Dump()
  1802. {
  1803. this->Dump(IRDumpFlags_None);
  1804. }
  1805. #endif
  1806. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  1807. LPCSTR
  1808. Func::GetVtableName(INT_PTR address)
  1809. {
  1810. #if DBG
  1811. if (vtableMap == nullptr)
  1812. {
  1813. vtableMap = VirtualTableRegistry::CreateVtableHashMap(this->m_alloc);
  1814. };
  1815. LPCSTR name = vtableMap->Lookup(address, nullptr);
  1816. if (name)
  1817. {
  1818. if (strncmp(name, "class ", _countof("class ") - 1) == 0)
  1819. {
  1820. name += _countof("class ") - 1;
  1821. }
  1822. }
  1823. return name;
  1824. #else
  1825. return "";
  1826. #endif
  1827. }
  1828. #endif
  1829. #if DBG_DUMP | defined(VTUNE_PROFILING)
  1830. bool Func::DoRecordNativeMap() const
  1831. {
  1832. #if defined(VTUNE_PROFILING)
  1833. if (VTuneChakraProfile::isJitProfilingActive)
  1834. {
  1835. return true;
  1836. }
  1837. #endif
  1838. #if DBG_DUMP
  1839. return PHASE_DUMP(Js::EncoderPhase, this) && Js::Configuration::Global.flags.Verbose;
  1840. #else
  1841. return false;
  1842. #endif
  1843. }
  1844. #endif
  1845. #ifdef PERF_HINT
  1846. void WritePerfHint(PerfHints hint, Func* func, uint byteCodeOffset /*= Js::Constants::NoByteCodeOffset*/)
  1847. {
  1848. if (!func->IsOOPJIT())
  1849. {
  1850. WritePerfHint(hint, (Js::FunctionBody*)func->GetJITFunctionBody()->GetAddr(), byteCodeOffset);
  1851. }
  1852. }
  1853. #endif