Func.cpp 65 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Base/EtwTrace.h"
  7. #include "Base/ScriptContextProfiler.h"
  8. #ifdef VTUNE_PROFILING
  9. #include "Base/VTuneChakraProfile.h"
  10. #endif
  11. #include "Library/ForInObjectEnumerator.h"
  12. Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  13. ThreadContextInfo * threadContextInfo,
  14. ScriptContextInfo * scriptContextInfo,
  15. JITOutputIDL * outputData,
  16. Js::EntryPointInfo* epInfo,
  17. const FunctionJITRuntimeInfo *const runtimeInfo,
  18. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  19. #if !FLOATVAR
  20. CodeGenNumberAllocator * numberAllocator,
  21. #endif
  22. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc,
  23. uint postCallByteCodeOffset, Js::RegSlot returnValueRegSlot, const bool isInlinedConstructor,
  24. Js::ProfileId callSiteIdInParentFunc, bool isGetterSetter) :
  25. m_alloc(alloc),
  26. m_workItem(workItem),
  27. m_output(outputData),
  28. m_entryPointInfo(epInfo),
  29. m_threadContextInfo(threadContextInfo),
  30. m_scriptContextInfo(scriptContextInfo),
  31. m_runtimeInfo(runtimeInfo),
  32. m_polymorphicInlineCacheInfo(polymorphicInlineCacheInfo),
  33. m_codeGenAllocators(codeGenAllocators),
  34. m_inlineeId(0),
  35. pinnedTypeRefs(nullptr),
  36. singleTypeGuards(nullptr),
  37. equivalentTypeGuards(nullptr),
  38. propertyGuardsByPropertyId(nullptr),
  39. ctorCachesByPropertyId(nullptr),
  40. callSiteToArgumentsOffsetFixupMap(nullptr),
  41. indexedPropertyGuardCount(0),
  42. propertiesWrittenTo(nullptr),
  43. lazyBailoutProperties(alloc),
  44. anyPropertyMayBeWrittenTo(false),
  45. #ifdef PROFILE_EXEC
  46. m_codeGenProfiler(codeGenProfiler),
  47. #endif
  48. m_isBackgroundJIT(isBackgroundJIT),
  49. m_cloner(nullptr),
  50. m_cloneMap(nullptr),
  51. m_loopParamSym(nullptr),
  52. m_funcObjSym(nullptr),
  53. m_localClosureSym(nullptr),
  54. m_paramClosureSym(nullptr),
  55. m_localFrameDisplaySym(nullptr),
  56. m_bailoutReturnValueSym(nullptr),
  57. m_hasBailedOutSym(nullptr),
  58. m_inlineeFrameStartSym(nullptr),
  59. inlineeStart(nullptr),
  60. m_regsUsed(0),
  61. m_fg(nullptr),
  62. m_labelCount(0),
  63. m_argSlotsForFunctionsCalled(0),
  64. m_hasCalls(false),
  65. m_hasInlineArgsOpt(false),
  66. m_canDoInlineArgsOpt(true),
  67. unoptimizableArgumentsObjReference(0),
  68. unoptimizableArgumentsObjReferenceInInlinees(0),
  69. m_doFastPaths(false),
  70. hasBailout(false),
  71. firstIRTemp(0),
  72. hasBailoutInEHRegion(false),
  73. hasInstrNumber(false),
  74. maintainByteCodeOffset(true),
  75. frameSize(0),
  76. topFunc(parentFunc ? parentFunc->topFunc : this),
  77. parentFunc(parentFunc),
  78. argObjSyms(nullptr),
  79. m_nonTempLocalVars(nullptr),
  80. hasAnyStackNestedFunc(false),
  81. hasMarkTempObjects(false),
  82. postCallByteCodeOffset(postCallByteCodeOffset),
  83. maxInlineeArgOutSize(0),
  84. returnValueRegSlot(returnValueRegSlot),
  85. firstActualStackOffset(-1),
  86. m_localVarSlotsOffset(Js::Constants::InvalidOffset),
  87. m_hasLocalVarChangedOffset(Js::Constants::InvalidOffset),
  88. actualCount((Js::ArgSlot) - 1),
  89. tryCatchNestingLevel(0),
  90. m_localStackHeight(0),
  91. tempSymDouble(nullptr),
  92. tempSymBool(nullptr),
  93. hasInlinee(false),
  94. thisOrParentInlinerHasArguments(false),
  95. hasStackArgs(false),
  96. hasArgLenAndConstOpt(false),
  97. hasImplicitParamLoad(false),
  98. hasThrow(false),
  99. hasNonSimpleParams(false),
  100. hasUnoptimizedArgumentsAccess(false),
  101. applyTargetInliningRemovedArgumentsAccess(false),
  102. hasImplicitCalls(false),
  103. hasTempObjectProducingInstr(false),
  104. isInlinedConstructor(isInlinedConstructor),
  105. #if !FLOATVAR
  106. numberAllocator(numberAllocator),
  107. #endif
  108. loopCount(0),
  109. callSiteIdInParentFunc(callSiteIdInParentFunc),
  110. isGetterSetter(isGetterSetter),
  111. cachedInlineeFrameInfo(nullptr),
  112. frameInfo(nullptr),
  113. isTJLoopBody(false),
  114. m_nativeCodeDataSym(nullptr),
  115. isFlowGraphValid(false),
  116. legalizePostRegAlloc(false),
  117. #if DBG
  118. m_callSiteCount(0),
  119. #endif
  120. stackNestedFunc(false),
  121. stackClosure(false)
  122. #if defined(_M_ARM32_OR_ARM64)
  123. , m_ArgumentsOffset(0)
  124. , m_epilogLabel(nullptr)
  125. #endif
  126. , m_funcStartLabel(nullptr)
  127. , m_funcEndLabel(nullptr)
  128. #if DBG
  129. , hasCalledSetDoFastPaths(false)
  130. , allowRemoveBailOutArgInstr(false)
  131. , currentPhases(alloc)
  132. , isPostLower(false)
  133. , isPostRegAlloc(false)
  134. , isPostPeeps(false)
  135. , isPostLayout(false)
  136. , isPostFinalLower(false)
  137. , vtableMap(nullptr)
  138. #endif
  139. , m_yieldOffsetResumeLabelList(nullptr)
  140. , m_bailOutNoSaveLabel(nullptr)
  141. , constantAddressRegOpnd(alloc)
  142. , lastConstantAddressRegLoadInstr(nullptr)
  143. , m_totalJumpTableSizeInBytesForSwitchStatements(0)
  144. , frameDisplayCheckTable(nullptr)
  145. , stackArgWithFormalsTracker(nullptr)
  146. , m_forInLoopBaseDepth(0)
  147. , m_forInEnumeratorArrayOffset(-1)
  148. , argInsCount(0)
  149. , m_globalObjTypeSpecFldInfoArray(nullptr)
  150. #if LOWER_SPLIT_INT64
  151. , m_int64SymPairMap(nullptr)
  152. #endif
  153. #ifdef RECYCLER_WRITE_BARRIER_JIT
  154. , m_lowerer(nullptr)
  155. #endif
  156. , m_lazyBailOutRecordSlot(nullptr)
  157. , hasLazyBailOut(false)
  158. {
  159. Assert(this->IsInlined() == !!runtimeInfo);
  160. AssertOrFailFast(!HasProfileInfo() || GetReadOnlyProfileInfo()->GetLoopCount() == GetJITFunctionBody()->GetLoopCount());
  161. Js::RegSlot tmpResult;
  162. AssertOrFailFast(!UInt32Math::Add(GetJITFunctionBody()->GetConstCount(), GetJITFunctionBody()->GetVarCount(), &tmpResult));
  163. AssertOrFailFast(GetJITFunctionBody()->IsAsmJsMode() || GetJITFunctionBody()->GetFirstTmpReg() <= GetJITFunctionBody()->GetLocalsCount());
  164. AssertOrFailFast(!IsLoopBody() || m_workItem->GetLoopNumber() < GetJITFunctionBody()->GetLoopCount());
  165. AssertOrFailFast(CONFIG_FLAG(Prejit) || CONFIG_ISENABLED(Js::ForceNativeFlag) || GetJITFunctionBody()->GetByteCodeLength() < (uint)CONFIG_FLAG(MaxJITFunctionBytecodeByteLength));
  166. GetJITFunctionBody()->EnsureConsistentConstCount();
  167. if (this->IsTopFunc())
  168. {
  169. outputData->hasJittedStackClosure = false;
  170. outputData->localVarSlotsOffset = m_localVarSlotsOffset;
  171. outputData->localVarChangedOffset = m_hasLocalVarChangedOffset;
  172. }
  173. if (this->IsInlined())
  174. {
  175. m_inlineeId = ++(GetTopFunc()->m_inlineeId);
  176. }
  177. bool doStackNestedFunc = GetJITFunctionBody()->DoStackNestedFunc();
  178. bool doStackClosure = GetJITFunctionBody()->DoStackClosure() && !PHASE_OFF(Js::FrameDisplayFastPathPhase, this) && !PHASE_OFF(Js::StackClosurePhase, this);
  179. Assert(!doStackClosure || doStackNestedFunc);
  180. this->stackClosure = doStackClosure && this->IsTopFunc();
  181. if (this->stackClosure)
  182. {
  183. // TODO: calculate on runtime side?
  184. m_output.SetHasJITStackClosure();
  185. }
  186. if (m_workItem->Type() == JsFunctionType &&
  187. GetJITFunctionBody()->DoBackendArgumentsOptimization() &&
  188. (!GetJITFunctionBody()->HasTry() || this->DoOptimizeTry()))
  189. {
  190. // doBackendArgumentsOptimization bit is set when there is no eval inside a function
  191. // as determined by the bytecode generator.
  192. SetHasStackArgs(true);
  193. }
  194. if (doStackNestedFunc && GetJITFunctionBody()->GetNestedCount() != 0 &&
  195. (this->IsTopFunc() || this->GetTopFunc()->m_workItem->Type() != JsLoopBodyWorkItemType)) // make sure none of the functions inlined in a jitted loop body allocate nested functions on the stack
  196. {
  197. Assert(!(this->IsJitInDebugMode() && !GetJITFunctionBody()->IsLibraryCode()));
  198. stackNestedFunc = true;
  199. this->GetTopFunc()->hasAnyStackNestedFunc = true;
  200. }
  201. if (GetJITFunctionBody()->HasOrParentHasArguments() || (parentFunc && parentFunc->thisOrParentInlinerHasArguments))
  202. {
  203. thisOrParentInlinerHasArguments = true;
  204. }
  205. if (parentFunc == nullptr)
  206. {
  207. inlineDepth = 0;
  208. m_symTable = JitAnew(alloc, SymTable);
  209. m_symTable->Init(this);
  210. m_symTable->SetStartingID(static_cast<SymID>(workItem->GetJITFunctionBody()->GetLocalsCount() + 1));
  211. Assert(Js::Constants::NoByteCodeOffset == postCallByteCodeOffset);
  212. Assert(Js::Constants::NoRegister == returnValueRegSlot);
  213. #if defined(_M_IX86) || defined(_M_X64)
  214. if (HasArgumentSlot())
  215. {
  216. // Pre-allocate the single argument slot we'll reserve for the arguments object.
  217. // For ARM, the argument slot is not part of the local but part of the register saves
  218. m_localStackHeight = MachArgsSlotOffset;
  219. }
  220. #endif
  221. }
  222. else
  223. {
  224. inlineDepth = parentFunc->inlineDepth + 1;
  225. Assert(Js::Constants::NoByteCodeOffset != postCallByteCodeOffset);
  226. }
  227. this->constructorCacheCount = 0;
  228. this->constructorCaches = AnewArrayZ(this->m_alloc, JITTimeConstructorCache*, GetJITFunctionBody()->GetProfiledCallSiteCount());
  229. #if DBG_DUMP
  230. m_codeSize = -1;
  231. #endif
  232. #if defined(_M_X64)
  233. m_spillSize = -1;
  234. m_argsSize = -1;
  235. m_savedRegSize = -1;
  236. #endif
  237. if (this->IsJitInDebugMode())
  238. {
  239. m_nonTempLocalVars = Anew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  240. }
  241. if (GetJITFunctionBody()->IsCoroutine())
  242. {
  243. m_yieldOffsetResumeLabelList = YieldOffsetResumeLabelList::New(this->m_alloc);
  244. }
  245. if (this->IsTopFunc())
  246. {
  247. m_globalObjTypeSpecFldInfoArray = JitAnewArrayZ(this->m_alloc, ObjTypeSpecFldInfo*, GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  248. }
  249. for (uint i = 0; i < GetJITFunctionBody()->GetInlineCacheCount(); ++i)
  250. {
  251. ObjTypeSpecFldInfo * info = GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(i);
  252. if (info != nullptr)
  253. {
  254. AssertOrFailFast(info->GetObjTypeSpecFldId() < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  255. GetTopFunc()->m_globalObjTypeSpecFldInfoArray[info->GetObjTypeSpecFldId()] = info;
  256. }
  257. }
  258. canHoistConstantAddressLoad = !PHASE_OFF(Js::HoistConstAddrPhase, this);
  259. m_forInLoopMaxDepth = this->GetJITFunctionBody()->GetForInLoopDepth();
  260. }
  261. bool
  262. Func::IsLoopBodyInTry() const
  263. {
  264. return IsLoopBody() && m_workItem->GetLoopHeader()->isInTry;
  265. }
  266. bool
  267. Func::IsLoopBodyInTryFinally() const
  268. {
  269. return IsLoopBody() && m_workItem->GetLoopHeader()->isInTryFinally;
  270. }
  271. /* static */
  272. void
  273. Func::Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  274. ThreadContextInfo * threadContextInfo,
  275. ScriptContextInfo * scriptContextInfo,
  276. JITOutputIDL * outputData,
  277. Js::EntryPointInfo* epInfo, // for in-proc jit only
  278. const FunctionJITRuntimeInfo *const runtimeInfo,
  279. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  280. #if !FLOATVAR
  281. CodeGenNumberAllocator * numberAllocator,
  282. #endif
  283. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT)
  284. {
  285. bool rejit;
  286. int rejitCounter = 0;
  287. do
  288. {
  289. Assert(rejitCounter < 25);
  290. Func func(alloc, workItem, threadContextInfo,
  291. scriptContextInfo, outputData, epInfo, runtimeInfo,
  292. polymorphicInlineCacheInfo, codeGenAllocators,
  293. #if !FLOATVAR
  294. numberAllocator,
  295. #endif
  296. codeGenProfiler, isBackgroundJIT);
  297. try
  298. {
  299. func.TryCodegen();
  300. rejit = false;
  301. }
  302. catch (Js::RejitException ex)
  303. {
  304. // The work item needs to be rejitted, likely due to some optimization that was too aggressive
  305. switch (ex.Reason())
  306. {
  307. case RejitReason::AggressiveIntTypeSpecDisabled:
  308. outputData->disableAggressiveIntTypeSpec = TRUE;
  309. break;
  310. case RejitReason::InlineApplyDisabled:
  311. workItem->GetJITFunctionBody()->DisableInlineApply();
  312. outputData->disableInlineApply = TRUE;
  313. break;
  314. case RejitReason::InlineSpreadDisabled:
  315. workItem->GetJITFunctionBody()->DisableInlineSpread();
  316. outputData->disableInlineSpread = TRUE;
  317. break;
  318. case RejitReason::DisableStackArgOpt:
  319. outputData->disableStackArgOpt = TRUE;
  320. break;
  321. case RejitReason::DisableStackArgLenAndConstOpt:
  322. break;
  323. case RejitReason::DisableSwitchOptExpectingInteger:
  324. case RejitReason::DisableSwitchOptExpectingString:
  325. outputData->disableSwitchOpt = TRUE;
  326. break;
  327. case RejitReason::ArrayCheckHoistDisabled:
  328. case RejitReason::ArrayAccessHelperCallEliminationDisabled:
  329. outputData->disableArrayCheckHoist = TRUE;
  330. break;
  331. case RejitReason::TrackIntOverflowDisabled:
  332. outputData->disableTrackCompoundedIntOverflow = TRUE;
  333. break;
  334. default:
  335. Assume(UNREACHED);
  336. }
  337. if (PHASE_TRACE(Js::ReJITPhase, &func))
  338. {
  339. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  340. Output::Print(
  341. _u("Rejit (compile-time): function: %s (%s) reason: %S\n"),
  342. workItem->GetJITFunctionBody()->GetDisplayName(),
  343. workItem->GetJITTimeInfo()->GetDebugNumberSet(debugStringBuffer),
  344. ex.ReasonName());
  345. }
  346. rejit = true;
  347. rejitCounter++;
  348. }
  349. // Either the entry point has a reference to the number now, or we failed to code gen and we
  350. // don't need to numbers, we can flush the completed page now.
  351. //
  352. // If the number allocator is NULL then we are shutting down the thread context and so too the
  353. // code generator. The number allocator must be freed before the recycler (and thus before the
  354. // code generator) so we can't and don't need to flush it.
  355. // TODO: OOP JIT, allocator cleanup
  356. } while (rejit);
  357. }
  358. ///----------------------------------------------------------------------------
  359. ///
  360. /// Func::TryCodegen
  361. ///
  362. /// Attempt to Codegen this function.
  363. ///
  364. ///----------------------------------------------------------------------------
  365. void
  366. Func::TryCodegen()
  367. {
  368. Assert(!IsJitInDebugMode() || !GetJITFunctionBody()->HasTry());
  369. BEGIN_CODEGEN_PHASE(this, Js::BackEndPhase);
  370. {
  371. // IRBuilder
  372. BEGIN_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  373. #ifdef ASMJS_PLAT
  374. if (GetJITFunctionBody()->IsAsmJsMode())
  375. {
  376. IRBuilderAsmJs asmIrBuilder(this);
  377. asmIrBuilder.Build();
  378. }
  379. else
  380. #endif
  381. {
  382. IRBuilder irBuilder(this);
  383. irBuilder.Build();
  384. }
  385. END_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  386. #ifdef IR_VIEWER
  387. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::IRBuilderPhase);
  388. #endif /* IR_VIEWER */
  389. BEGIN_CODEGEN_PHASE(this, Js::InlinePhase);
  390. InliningHeuristics heuristics(GetWorkItem()->GetJITTimeInfo(), this->IsLoopBody());
  391. Inline inliner(this, heuristics);
  392. inliner.Optimize();
  393. END_CODEGEN_PHASE(this, Js::InlinePhase);
  394. ThrowIfScriptClosed();
  395. // FlowGraph
  396. {
  397. // Scope for FlowGraph arena
  398. NoRecoverMemoryJitArenaAllocator fgAlloc(_u("BE-FlowGraph"), m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  399. BEGIN_CODEGEN_PHASE(this, Js::FGBuildPhase);
  400. this->m_fg = FlowGraph::New(this, &fgAlloc);
  401. this->m_fg->Build();
  402. END_CODEGEN_PHASE(this, Js::FGBuildPhase);
  403. // Global Optimization and Type Specialization
  404. BEGIN_CODEGEN_PHASE(this, Js::GlobOptPhase);
  405. GlobOpt globOpt(this);
  406. globOpt.Optimize();
  407. END_CODEGEN_PHASE(this, Js::GlobOptPhase);
  408. // Delete flowGraph now
  409. this->m_fg->Destroy();
  410. this->m_fg = nullptr;
  411. }
  412. #ifdef IR_VIEWER
  413. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::GlobOptPhase);
  414. #endif /* IR_VIEWER */
  415. ThrowIfScriptClosed();
  416. // Lowering
  417. Lowerer lowerer(this);
  418. BEGIN_CODEGEN_PHASE(this, Js::LowererPhase);
  419. lowerer.Lower();
  420. END_CODEGEN_PHASE(this, Js::LowererPhase);
  421. #ifdef IR_VIEWER
  422. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::LowererPhase);
  423. #endif /* IR_VIEWER */
  424. // Encode constants
  425. Security security(this);
  426. BEGIN_CODEGEN_PHASE(this, Js::EncodeConstantsPhase)
  427. security.EncodeLargeConstants();
  428. END_CODEGEN_PHASE(this, Js::EncodeConstantsPhase);
  429. if (GetJITFunctionBody()->DoInterruptProbe())
  430. {
  431. BEGIN_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  432. lowerer.DoInterruptProbes();
  433. END_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  434. }
  435. // Register Allocation
  436. BEGIN_CODEGEN_PHASE(this, Js::RegAllocPhase);
  437. LinearScan linearScan(this);
  438. linearScan.RegAlloc();
  439. END_CODEGEN_PHASE(this, Js::RegAllocPhase);
  440. #ifdef IR_VIEWER
  441. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::RegAllocPhase);
  442. #endif /* IR_VIEWER */
  443. ThrowIfScriptClosed();
  444. // Peephole optimizations
  445. BEGIN_CODEGEN_PHASE(this, Js::PeepsPhase);
  446. Peeps peeps(this);
  447. peeps.PeepFunc();
  448. END_CODEGEN_PHASE(this, Js::PeepsPhase);
  449. // Layout
  450. BEGIN_CODEGEN_PHASE(this, Js::LayoutPhase);
  451. SimpleLayout layout(this);
  452. layout.Layout();
  453. END_CODEGEN_PHASE(this, Js::LayoutPhase);
  454. if (this->HasTry() && this->hasBailoutInEHRegion)
  455. {
  456. BEGIN_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  457. lowerer.EHBailoutPatchUp();
  458. END_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  459. }
  460. // Insert NOPs (moving this before prolog/epilog for AMD64 and possibly ARM).
  461. BEGIN_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  462. security.InsertNOPs();
  463. END_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  464. // Prolog/Epilog
  465. BEGIN_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  466. if (GetJITFunctionBody()->IsAsmJsMode())
  467. {
  468. lowerer.LowerPrologEpilogAsmJs();
  469. }
  470. else
  471. {
  472. lowerer.LowerPrologEpilog();
  473. }
  474. END_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  475. BEGIN_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  476. lowerer.FinalLower();
  477. END_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  478. // Encoder
  479. BEGIN_CODEGEN_PHASE(this, Js::EncoderPhase);
  480. Encoder encoder(this);
  481. encoder.Encode();
  482. END_CODEGEN_PHASE_NO_DUMP(this, Js::EncoderPhase);
  483. #ifdef IR_VIEWER
  484. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::EncoderPhase);
  485. #endif /* IR_VIEWER */
  486. }
  487. #if DBG_DUMP
  488. if (Js::Configuration::Global.flags.IsEnabled(Js::AsmDumpModeFlag))
  489. {
  490. FILE * oldFile = 0;
  491. FILE * asmFile = GetScriptContext()->GetNativeCodeGenerator()->asmFile;
  492. if (asmFile)
  493. {
  494. oldFile = Output::SetFile(asmFile);
  495. }
  496. this->Dump(IRDumpFlags_AsmDumpMode);
  497. Output::Flush();
  498. if (asmFile)
  499. {
  500. FILE *openedFile = Output::SetFile(oldFile);
  501. Assert(openedFile == asmFile);
  502. }
  503. }
  504. #endif
  505. if (this->IsOOPJIT())
  506. {
  507. BEGIN_CODEGEN_PHASE(this, Js::NativeCodeDataPhase);
  508. auto dataAllocator = this->GetNativeCodeDataAllocator();
  509. if (dataAllocator->allocCount > 0)
  510. {
  511. NativeCodeData::DataChunk *chunk = (NativeCodeData::DataChunk*)dataAllocator->chunkList;
  512. NativeCodeData::DataChunk *next1 = chunk;
  513. while (next1)
  514. {
  515. if (next1->fixupFunc)
  516. {
  517. next1->fixupFunc(next1->data, chunk);
  518. }
  519. #if DBG
  520. if (CONFIG_FLAG(OOPJITFixupValidate))
  521. {
  522. // Scan memory to see if there's missing pointer needs to be fixed up
  523. // This can hit false positive if some data field happens to have value
  524. // falls into the NativeCodeData memory range.
  525. NativeCodeData::DataChunk *next2 = chunk;
  526. while (next2)
  527. {
  528. for (unsigned int i = 0; i < next1->len / sizeof(void*); i++)
  529. {
  530. if (((void**)next1->data)[i] == (void*)next2->data)
  531. {
  532. NativeCodeData::VerifyExistFixupEntry((void*)next2->data, &((void**)next1->data)[i], next1->data);
  533. }
  534. }
  535. next2 = next2->next;
  536. }
  537. }
  538. #endif
  539. next1 = next1->next;
  540. }
  541. JITOutputIDL* jitOutputData = m_output.GetOutputData();
  542. size_t allocSize = offsetof(NativeDataFixupTable, fixupRecords) + sizeof(NativeDataFixupRecord)* (dataAllocator->allocCount);
  543. jitOutputData->nativeDataFixupTable = (NativeDataFixupTable*)midl_user_allocate(allocSize);
  544. if (!jitOutputData->nativeDataFixupTable)
  545. {
  546. Js::Throw::OutOfMemory();
  547. }
  548. __analysis_assume(jitOutputData->nativeDataFixupTable);
  549. jitOutputData->nativeDataFixupTable->count = dataAllocator->allocCount;
  550. jitOutputData->buffer = (NativeDataBuffer*)midl_user_allocate(offsetof(NativeDataBuffer, data) + dataAllocator->totalSize);
  551. if (!jitOutputData->buffer)
  552. {
  553. Js::Throw::OutOfMemory();
  554. }
  555. __analysis_assume(jitOutputData->buffer);
  556. jitOutputData->buffer->len = dataAllocator->totalSize;
  557. unsigned int len = 0;
  558. unsigned int count = 0;
  559. next1 = chunk;
  560. while (next1)
  561. {
  562. memcpy(jitOutputData->buffer->data + len, next1->data, next1->len);
  563. len += next1->len;
  564. jitOutputData->nativeDataFixupTable->fixupRecords[count].index = next1->allocIndex;
  565. jitOutputData->nativeDataFixupTable->fixupRecords[count].length = next1->len;
  566. jitOutputData->nativeDataFixupTable->fixupRecords[count].startOffset = next1->offset;
  567. jitOutputData->nativeDataFixupTable->fixupRecords[count].updateList = next1->fixupList;
  568. count++;
  569. next1 = next1->next;
  570. }
  571. #if DBG
  572. if (PHASE_TRACE1(Js::NativeCodeDataPhase))
  573. {
  574. Output::Print(_u("NativeCodeData Server Buffer: %p, len: %x, chunk head: %p\n"), jitOutputData->buffer->data, jitOutputData->buffer->len, chunk);
  575. }
  576. #endif
  577. }
  578. END_CODEGEN_PHASE(this, Js::NativeCodeDataPhase);
  579. }
  580. END_CODEGEN_PHASE(this, Js::BackEndPhase);
  581. }
  582. ///----------------------------------------------------------------------------
  583. /// Func::StackAllocate
  584. /// Allocate stack space of given size.
  585. ///----------------------------------------------------------------------------
  586. int32
  587. Func::StackAllocate(int size)
  588. {
  589. Assert(this->IsTopFunc());
  590. int32 offset;
  591. #ifdef MD_GROW_LOCALS_AREA_UP
  592. // Locals have positive offsets and are allocated from bottom to top.
  593. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  594. offset = m_localStackHeight;
  595. m_localStackHeight += size;
  596. #else
  597. // Locals have negative offsets and are allocated from top to bottom.
  598. m_localStackHeight += size;
  599. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  600. offset = -m_localStackHeight;
  601. #endif
  602. return offset;
  603. }
  604. ///----------------------------------------------------------------------------
  605. ///
  606. /// Func::StackAllocate
  607. ///
  608. /// Allocate stack space for this symbol.
  609. ///
  610. ///----------------------------------------------------------------------------
  611. int32
  612. Func::StackAllocate(StackSym *stackSym, int size)
  613. {
  614. Assert(size > 0);
  615. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym() || stackSym->IsAllocated())
  616. {
  617. return stackSym->m_offset;
  618. }
  619. Assert(stackSym->m_offset == 0);
  620. stackSym->m_allocated = true;
  621. stackSym->m_offset = StackAllocate(size);
  622. return stackSym->m_offset;
  623. }
  624. void
  625. Func::SetArgOffset(StackSym *stackSym, int32 offset)
  626. {
  627. AssertMsg(offset >= 0, "Why is the offset, negative?");
  628. stackSym->m_offset = offset;
  629. stackSym->m_allocated = true;
  630. }
  631. ///
  632. /// Ensures that local var slots are created, if the function has locals.
  633. /// Allocate stack space for locals used for debugging
  634. /// (for local non-temp vars we write-through memory so that locals inspection can make use of that.).
  635. // On stack, after local slots we allocate space for metadata (in particular, whether any the locals was changed in debugger).
  636. ///
  637. void
  638. Func::EnsureLocalVarSlots()
  639. {
  640. Assert(IsJitInDebugMode());
  641. if (!this->HasLocalVarSlotCreated())
  642. {
  643. uint32 localSlotCount = GetJITFunctionBody()->GetNonTempLocalVarCount();
  644. if (localSlotCount && m_localVarSlotsOffset == Js::Constants::InvalidOffset)
  645. {
  646. // Allocate the slots.
  647. int32 size = localSlotCount * GetDiagLocalSlotSize();
  648. m_localVarSlotsOffset = StackAllocate(size);
  649. m_hasLocalVarChangedOffset = StackAllocate(max(1, MachStackAlignment)); // Can't alloc less than StackAlignment bytes.
  650. Assert(m_workItem->Type() == JsFunctionType);
  651. m_output.SetVarSlotsOffset(AdjustOffsetValue(m_localVarSlotsOffset));
  652. m_output.SetVarChangedOffset(AdjustOffsetValue(m_hasLocalVarChangedOffset));
  653. }
  654. }
  655. }
  656. void Func::SetFirstArgOffset(IR::Instr* inlineeStart)
  657. {
  658. Assert(inlineeStart->m_func == this);
  659. Assert(!IsTopFunc());
  660. int32 lastOffset;
  661. IR::Instr* arg = inlineeStart->GetNextArg();
  662. if (arg)
  663. {
  664. const auto lastArgOutStackSym = arg->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  665. lastOffset = lastArgOutStackSym->m_offset;
  666. Assert(lastArgOutStackSym->m_isSingleDef);
  667. const auto secondLastArgOutOpnd = lastArgOutStackSym->m_instrDef->GetSrc2();
  668. if (secondLastArgOutOpnd->IsSymOpnd())
  669. {
  670. const auto secondLastOffset = secondLastArgOutOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_offset;
  671. if (secondLastOffset > lastOffset)
  672. {
  673. lastOffset = secondLastOffset;
  674. }
  675. }
  676. lastOffset += MachPtr;
  677. }
  678. else
  679. {
  680. Assert(this->GetTopFunc()->GetJITFunctionBody()->IsAsmJsMode());
  681. lastOffset = MachPtr;
  682. }
  683. int32 firstActualStackOffset = lastOffset - ((this->actualCount + Js::Constants::InlineeMetaArgCount) * MachPtr);
  684. Assert((this->firstActualStackOffset == -1) || (this->firstActualStackOffset == firstActualStackOffset));
  685. this->firstActualStackOffset = firstActualStackOffset;
  686. }
  687. int32
  688. Func::GetLocalVarSlotOffset(int32 slotId)
  689. {
  690. this->EnsureLocalVarSlots();
  691. Assert(m_localVarSlotsOffset != Js::Constants::InvalidOffset);
  692. int32 slotOffset = slotId * GetDiagLocalSlotSize();
  693. return m_localVarSlotsOffset + slotOffset;
  694. }
  695. void Func::OnAddSym(Sym* sym)
  696. {
  697. Assert(sym);
  698. if (this->IsJitInDebugMode() && this->IsNonTempLocalVar(sym->m_id))
  699. {
  700. Assert(m_nonTempLocalVars);
  701. m_nonTempLocalVars->Set(sym->m_id);
  702. }
  703. }
  704. ///
  705. /// Returns offset of the flag (1 byte) whether any local was changed (in debugger).
  706. /// If the function does not have any locals, returns -1.
  707. ///
  708. int32
  709. Func::GetHasLocalVarChangedOffset()
  710. {
  711. this->EnsureLocalVarSlots();
  712. return m_hasLocalVarChangedOffset;
  713. }
  714. bool
  715. Func::IsJitInDebugMode() const
  716. {
  717. return m_workItem->IsJitInDebugMode();
  718. }
  719. bool
  720. Func::IsNonTempLocalVar(uint32 slotIndex)
  721. {
  722. return GetJITFunctionBody()->IsNonTempLocalVar(slotIndex);
  723. }
  724. int32
  725. Func::AdjustOffsetValue(int32 offset)
  726. {
  727. #ifdef MD_GROW_LOCALS_AREA_UP
  728. return -(offset + BailOutInfo::StackSymBias);
  729. #else
  730. // Stack offset are negative, includes the PUSH EBP and return address
  731. return offset - (2 * MachPtr);
  732. #endif
  733. }
  734. #ifdef MD_GROW_LOCALS_AREA_UP
  735. // Note: this is called during jit-compile when we finalize bail out record.
  736. void
  737. Func::AjustLocalVarSlotOffset()
  738. {
  739. if (GetJITFunctionBody()->GetNonTempLocalVarCount())
  740. {
  741. // Turn positive SP-relative base locals offset into negative frame-pointer-relative offset
  742. // This is changing value for restoring the locals when read due to locals inspection.
  743. int localsOffset = m_localVarSlotsOffset - (m_localStackHeight + m_ArgumentsOffset);
  744. int valueChangeOffset = m_hasLocalVarChangedOffset - (m_localStackHeight + m_ArgumentsOffset);
  745. m_output.SetVarSlotsOffset(localsOffset);
  746. m_output.SetVarChangedOffset(valueChangeOffset);
  747. }
  748. }
  749. #endif
  750. bool
  751. Func::DoGlobOptsForGeneratorFunc() const
  752. {
  753. // Disable GlobOpt optimizations for generators initially. Will visit and enable each one by one.
  754. return !GetJITFunctionBody()->IsCoroutine();
  755. }
  756. bool
  757. Func::DoSimpleJitDynamicProfile() const
  758. {
  759. return IsSimpleJit() && !PHASE_OFF(Js::SimpleJitDynamicProfilePhase, GetTopFunc()) && !CONFIG_FLAG(NewSimpleJit);
  760. }
  761. void
  762. Func::SetDoFastPaths()
  763. {
  764. // Make sure we only call this once!
  765. Assert(!this->hasCalledSetDoFastPaths);
  766. bool doFastPaths = false;
  767. if(!PHASE_OFF(Js::FastPathPhase, this) && (!IsSimpleJit() || CONFIG_FLAG(NewSimpleJit)))
  768. {
  769. doFastPaths = true;
  770. }
  771. this->m_doFastPaths = doFastPaths;
  772. #ifdef DBG
  773. this->hasCalledSetDoFastPaths = true;
  774. #endif
  775. }
  776. #if LOWER_SPLIT_INT64
  777. Int64RegPair Func::FindOrCreateInt64Pair(IR::Opnd* opnd)
  778. {
  779. if (!this->IsTopFunc())
  780. {
  781. return GetTopFunc()->FindOrCreateInt64Pair(opnd);
  782. }
  783. AssertMsg(currentPhases.Top() == Js::LowererPhase, "New Int64 sym map is only allowed during lower");
  784. Int64RegPair pair;
  785. IRType pairType = opnd->GetType();
  786. if (opnd->IsInt64())
  787. {
  788. pairType = IRType_IsSignedInt(pairType) ? TyInt32 : TyUint32;
  789. }
  790. if (opnd->IsIndirOpnd())
  791. {
  792. IR::IndirOpnd* indir = opnd->AsIndirOpnd();
  793. indir->SetType(pairType);
  794. pair.low = indir;
  795. pair.high = indir->Copy(this)->AsIndirOpnd();
  796. pair.high->AsIndirOpnd()->SetOffset(indir->GetOffset() + 4);
  797. return pair;
  798. }
  799. // Only indir opnd can have a type other than int64
  800. Assert(opnd->IsInt64());
  801. if (opnd->IsImmediateOpnd())
  802. {
  803. int64 value = opnd->GetImmediateValue(this);
  804. pair.low = IR::IntConstOpnd::New((int32)value, pairType, this);
  805. pair.high = IR::IntConstOpnd::New((int32)(value >> 32), pairType, this);
  806. return pair;
  807. }
  808. Int64SymPair symPair;
  809. if (!m_int64SymPairMap)
  810. {
  811. m_int64SymPairMap = Anew(m_alloc, Int64SymPairMap, m_alloc);
  812. }
  813. StackSym* stackSym = opnd->GetStackSym();
  814. AssertOrFailFastMsg(stackSym, "Invalid int64 operand type");
  815. SymID symId = stackSym->m_id;
  816. if (!m_int64SymPairMap->TryGetValue(symId, &symPair))
  817. {
  818. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym())
  819. {
  820. const bool isArg = stackSym->IsArgSlotSym();
  821. if (isArg)
  822. {
  823. Js::ArgSlot slotNumber = stackSym->GetArgSlotNum();
  824. symPair.low = StackSym::NewArgSlotSym(slotNumber, this, pairType);
  825. symPair.high = StackSym::NewArgSlotSym(slotNumber, this, pairType);
  826. }
  827. else
  828. {
  829. Js::ArgSlot slotNumber = stackSym->GetParamSlotNum();
  830. symPair.low = StackSym::NewParamSlotSym(slotNumber, this, pairType);
  831. symPair.high = StackSym::NewParamSlotSym(slotNumber + 1, this, pairType);
  832. }
  833. symPair.low->m_allocated = true;
  834. symPair.low->m_offset = stackSym->m_offset;
  835. symPair.high->m_allocated = true;
  836. symPair.high->m_offset = stackSym->m_offset + 4;
  837. }
  838. else
  839. {
  840. symPair.low = StackSym::New(pairType, this);
  841. symPair.high = StackSym::New(pairType, this);
  842. }
  843. m_int64SymPairMap->Add(symId, symPair);
  844. }
  845. if (opnd->IsSymOpnd())
  846. {
  847. pair.low = IR::SymOpnd::New(symPair.low, opnd->AsSymOpnd()->m_offset, pairType, this);
  848. pair.high = IR::SymOpnd::New(symPair.high, opnd->AsSymOpnd()->m_offset, pairType, this);
  849. }
  850. else
  851. {
  852. pair.low = IR::RegOpnd::New(symPair.low, pairType, this);
  853. pair.high = IR::RegOpnd::New(symPair.high, pairType, this);
  854. }
  855. return pair;
  856. }
  857. void Func::Int64SplitExtendLoopLifetime(Loop* loop)
  858. {
  859. if (!this->IsTopFunc())
  860. {
  861. GetTopFunc()->Int64SplitExtendLoopLifetime(loop);
  862. return;
  863. }
  864. if (m_int64SymPairMap)
  865. {
  866. BVSparse<JitArenaAllocator> *liveOnBackEdgeSyms = loop->regAlloc.liveOnBackEdgeSyms;
  867. FOREACH_BITSET_IN_SPARSEBV(symId, liveOnBackEdgeSyms)
  868. {
  869. Int64SymPair pair;
  870. if (m_int64SymPairMap->TryGetValue(symId, &pair))
  871. {
  872. // If we have replaced any sym that was live on the back edge for 2 other syms
  873. // these 2 syms needs to be live on back edge as well.
  874. liveOnBackEdgeSyms->Set(pair.low->m_id);
  875. liveOnBackEdgeSyms->Set(pair.high->m_id);
  876. }
  877. } NEXT_BITSET_IN_SPARSEBV;
  878. }
  879. }
  880. #endif
  881. #if defined(_M_ARM32_OR_ARM64)
  882. RegNum
  883. Func::GetLocalsPointer() const
  884. {
  885. #ifdef DBG
  886. if (Js::Configuration::Global.flags.IsEnabled(Js::ForceLocalsPtrFlag))
  887. {
  888. return ALT_LOCALS_PTR;
  889. }
  890. #endif
  891. if (GetJITFunctionBody()->HasTry())
  892. {
  893. return ALT_LOCALS_PTR;
  894. }
  895. return RegSP;
  896. }
  897. #endif
  898. void Func::AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId)
  899. {
  900. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  901. {
  902. return;
  903. }
  904. Assert(IsTopFunc());
  905. if (this->frameDisplayCheckTable == nullptr)
  906. {
  907. this->frameDisplayCheckTable = FrameDisplayCheckTable::New(m_alloc, 4);
  908. }
  909. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  910. FrameDisplayCheckRecord **record = this->frameDisplayCheckTable->FindOrInsertNew(propertySym->m_stackSym->m_id);
  911. if (*record == nullptr)
  912. {
  913. *record = JitAnew(m_alloc, FrameDisplayCheckRecord);
  914. }
  915. uint32 frameDisplaySlot = propertySym->m_propertyId;
  916. if ((*record)->table == nullptr || (*record)->slotId < frameDisplaySlot)
  917. {
  918. (*record)->slotId = frameDisplaySlot;
  919. }
  920. if (slotId != (uint32)-1)
  921. {
  922. if ((*record)->table == nullptr)
  923. {
  924. (*record)->table = SlotArrayCheckTable::New(m_alloc, 4);
  925. }
  926. uint32 *pSlotId = (*record)->table->FindOrInsert(slotId, frameDisplaySlot);
  927. if (pSlotId && *pSlotId < slotId)
  928. {
  929. *pSlotId = slotId;
  930. }
  931. }
  932. }
  933. void Func::InitLocalClosureSyms()
  934. {
  935. Assert(this->m_localClosureSym == nullptr);
  936. // Allocate stack space for closure pointers. Do this only if we're jitting for stack closures, and
  937. // tell bailout that these are not byte code symbols so that we don't try to encode them in the bailout record,
  938. // as they don't have normal lifetimes.
  939. Js::RegSlot regSlot = GetJITFunctionBody()->GetLocalClosureReg();
  940. if (regSlot != Js::Constants::NoRegister)
  941. {
  942. this->m_localClosureSym =
  943. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  944. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  945. this);
  946. }
  947. regSlot = this->GetJITFunctionBody()->GetParamClosureReg();
  948. if (regSlot != Js::Constants::NoRegister)
  949. {
  950. Assert(this->GetParamClosureSym() == nullptr && !this->GetJITFunctionBody()->IsParamAndBodyScopeMerged());
  951. this->m_paramClosureSym =
  952. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  953. this->DoStackFrameDisplay() ? (Js::RegSlot) - 1 : regSlot,
  954. this);
  955. }
  956. regSlot = GetJITFunctionBody()->GetLocalFrameDisplayReg();
  957. if (regSlot != Js::Constants::NoRegister)
  958. {
  959. this->m_localFrameDisplaySym =
  960. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  961. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  962. this);
  963. }
  964. }
  965. bool
  966. Func::IsTrackCompoundedIntOverflowDisabled() const
  967. {
  968. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsTrackCompoundedIntOverflowDisabled()) || m_output.IsTrackCompoundedIntOverflowDisabled();
  969. }
  970. bool
  971. Func::IsArrayCheckHoistDisabled() const
  972. {
  973. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsArrayCheckHoistDisabled(IsLoopBody())) || m_output.IsArrayCheckHoistDisabled();
  974. }
  975. bool
  976. Func::IsStackArgOptDisabled() const
  977. {
  978. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsStackArgOptDisabled()) || m_output.IsStackArgOptDisabled();
  979. }
  980. bool
  981. Func::IsSwitchOptDisabled() const
  982. {
  983. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsSwitchOptDisabled()) || m_output.IsSwitchOptDisabled();
  984. }
  985. bool
  986. Func::IsAggressiveIntTypeSpecDisabled() const
  987. {
  988. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsAggressiveIntTypeSpecDisabled(IsLoopBody())) || m_output.IsAggressiveIntTypeSpecDisabled();
  989. }
  990. bool Func::CanAllocInPreReservedHeapPageSegment ()
  991. {
  992. #ifdef _CONTROL_FLOW_GUARD
  993. return PHASE_FORCE1(Js::PreReservedHeapAllocPhase) || (!PHASE_OFF1(Js::PreReservedHeapAllocPhase) &&
  994. !IsJitInDebugMode()
  995. #if _M_IX86
  996. && m_workItem->GetJitMode() == ExecutionMode::FullJit
  997. #if ENABLE_OOP_NATIVE_CODEGEN
  998. && (JITManager::GetJITManager()->IsJITServer()
  999. ? GetOOPCodeGenAllocators()->canCreatePreReservedSegment
  1000. : GetInProcCodeGenAllocators()->canCreatePreReservedSegment)
  1001. #else
  1002. && GetInProcCodeGenAllocators()->canCreatePreReservedSegment
  1003. #endif
  1004. );
  1005. #elif TARGET_64
  1006. && true);
  1007. #else
  1008. && false); //Not yet implemented for architectures other than x86 and amd64.
  1009. #endif //_M_ARCH
  1010. #else
  1011. return false;
  1012. #endif//_CONTROL_FLOW_GUARD
  1013. }
  1014. ///----------------------------------------------------------------------------
  1015. ///
  1016. /// Func::GetInstrCount
  1017. ///
  1018. /// Returns the number of instrs.
  1019. /// Note: It counts all instrs for now, including labels, etc.
  1020. ///
  1021. ///----------------------------------------------------------------------------
  1022. uint32
  1023. Func::GetInstrCount()
  1024. {
  1025. uint instrCount = 0;
  1026. FOREACH_INSTR_IN_FUNC(instr, this)
  1027. {
  1028. instrCount++;
  1029. }NEXT_INSTR_IN_FUNC;
  1030. return instrCount;
  1031. }
  1032. ///----------------------------------------------------------------------------
  1033. ///
  1034. /// Func::NumberInstrs
  1035. ///
  1036. /// Number each instruction in order of appearance in the function.
  1037. ///
  1038. ///----------------------------------------------------------------------------
  1039. void
  1040. Func::NumberInstrs()
  1041. {
  1042. #if DBG_DUMP
  1043. Assert(this->IsTopFunc());
  1044. Assert(!this->hasInstrNumber);
  1045. this->hasInstrNumber = true;
  1046. #endif
  1047. uint instrCount = 1;
  1048. FOREACH_INSTR_IN_FUNC(instr, this)
  1049. {
  1050. instr->SetNumber(instrCount++);
  1051. }
  1052. NEXT_INSTR_IN_FUNC;
  1053. }
  1054. #if DBG
  1055. BVSparse<JitArenaAllocator>* Func::GetByteCodeOffsetUses(uint offset) const
  1056. {
  1057. InstrByteCodeRegisterUses uses;
  1058. if (byteCodeRegisterUses->TryGetValue(offset, &uses))
  1059. {
  1060. return uses.bv;
  1061. }
  1062. return nullptr;
  1063. }
  1064. ///----------------------------------------------------------------------------
  1065. ///
  1066. /// Func::IsInPhase
  1067. ///
  1068. /// Determines whether the function is currently in the provided phase
  1069. ///
  1070. ///----------------------------------------------------------------------------
  1071. bool
  1072. Func::IsInPhase(Js::Phase tag)
  1073. {
  1074. return this->GetTopFunc()->currentPhases.Contains(tag);
  1075. }
  1076. #endif
  1077. ///----------------------------------------------------------------------------
  1078. ///
  1079. /// Func::BeginPhase
  1080. ///
  1081. /// Takes care of the profiler
  1082. ///
  1083. ///----------------------------------------------------------------------------
  1084. void
  1085. Func::BeginPhase(Js::Phase tag)
  1086. {
  1087. #ifdef DBG
  1088. this->GetTopFunc()->currentPhases.Push(tag);
  1089. if (PHASE_DEBUGBREAK_ON_PHASE_BEGIN(tag, this))
  1090. {
  1091. __debugbreak();
  1092. }
  1093. #endif
  1094. #ifdef PROFILE_EXEC
  1095. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  1096. "Profiler tag is supplied but the profiler pointer is NULL");
  1097. if (this->m_codeGenProfiler)
  1098. {
  1099. this->m_codeGenProfiler->ProfileBegin(tag);
  1100. }
  1101. #endif
  1102. }
  1103. ///----------------------------------------------------------------------------
  1104. ///
  1105. /// Func::EndPhase
  1106. ///
  1107. /// Takes care of the profiler and dumper
  1108. ///
  1109. ///----------------------------------------------------------------------------
  1110. void
  1111. Func::EndProfiler(Js::Phase tag)
  1112. {
  1113. #ifdef DBG
  1114. Assert(this->GetTopFunc()->currentPhases.Count() > 0);
  1115. Js::Phase popped = this->GetTopFunc()->currentPhases.Pop();
  1116. Assert(tag == popped);
  1117. #endif
  1118. #ifdef PROFILE_EXEC
  1119. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  1120. "Profiler tag is supplied but the profiler pointer is NULL");
  1121. if (this->m_codeGenProfiler)
  1122. {
  1123. this->m_codeGenProfiler->ProfileEnd(tag);
  1124. }
  1125. #endif
  1126. }
  1127. void
  1128. Func::EndPhase(Js::Phase tag, bool dump)
  1129. {
  1130. this->EndProfiler(tag);
  1131. #if DBG_DUMP
  1132. if(dump && (PHASE_DUMP(tag, this)
  1133. || PHASE_DUMP(Js::BackEndPhase, this)))
  1134. {
  1135. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1136. if (IsLoopBody())
  1137. {
  1138. Output::Print(_u("************ IR after %s (%S) Loop %d ************\n"),
  1139. Js::PhaseNames[tag],
  1140. ExecutionModeName(m_workItem->GetJitMode()),
  1141. m_workItem->GetLoopNumber());
  1142. }
  1143. else
  1144. {
  1145. Output::Print(_u("************ IR after %s (%S) ************\n"),
  1146. Js::PhaseNames[tag],
  1147. ExecutionModeName(m_workItem->GetJitMode()));
  1148. }
  1149. this->Dump(Js::Configuration::Global.flags.AsmDiff? IRDumpFlags_AsmDumpMode : IRDumpFlags_None);
  1150. }
  1151. #endif
  1152. if (tag == Js::RegAllocPhase)
  1153. {
  1154. this->legalizePostRegAlloc = true;
  1155. }
  1156. #if DBG
  1157. if (tag == Js::LowererPhase)
  1158. {
  1159. Assert(!this->isPostLower);
  1160. this->isPostLower = true;
  1161. #ifndef _M_ARM // Need to verify ARM is clean.
  1162. DbCheckPostLower dbCheck(this);
  1163. dbCheck.CheckNestedHelperCalls();
  1164. #endif
  1165. }
  1166. else if (tag == Js::RegAllocPhase)
  1167. {
  1168. Assert(!this->isPostRegAlloc);
  1169. this->isPostRegAlloc = true;
  1170. }
  1171. else if (tag == Js::PeepsPhase)
  1172. {
  1173. Assert(this->isPostLower && !this->isPostLayout);
  1174. this->isPostPeeps = true;
  1175. }
  1176. else if (tag == Js::LayoutPhase)
  1177. {
  1178. Assert(this->isPostPeeps && !this->isPostLayout);
  1179. this->isPostLayout = true;
  1180. }
  1181. else if (tag == Js::FinalLowerPhase)
  1182. {
  1183. Assert(this->isPostLayout && !this->isPostFinalLower);
  1184. this->isPostFinalLower = true;
  1185. }
  1186. if (this->isPostLower)
  1187. {
  1188. #ifndef _M_ARM // Need to verify ARM is clean.
  1189. DbCheckPostLower dbCheck(this);
  1190. dbCheck.Check();
  1191. #endif
  1192. }
  1193. this->m_alloc->MergeDelayFreeList();
  1194. #endif
  1195. }
  1196. StackSym *
  1197. Func::EnsureLoopParamSym()
  1198. {
  1199. if (this->m_loopParamSym == nullptr)
  1200. {
  1201. this->m_loopParamSym = StackSym::New(TyMachPtr, this);
  1202. }
  1203. return this->m_loopParamSym;
  1204. }
  1205. void
  1206. Func::UpdateMaxInlineeArgOutSize(uint inlineeArgOutSize)
  1207. {
  1208. if (this->maxInlineeArgOutSize < inlineeArgOutSize)
  1209. {
  1210. this->maxInlineeArgOutSize = inlineeArgOutSize;
  1211. }
  1212. }
  1213. void
  1214. Func::BeginClone(Lowerer * lowerer, JitArenaAllocator *alloc)
  1215. {
  1216. Assert(this->IsTopFunc());
  1217. AssertMsg(m_cloner == nullptr, "Starting new clone while one is in progress");
  1218. m_cloner = JitAnew(alloc, Cloner, lowerer, alloc);
  1219. if (m_cloneMap == nullptr)
  1220. {
  1221. m_cloneMap = JitAnew(alloc, InstrMap, alloc, 7);
  1222. }
  1223. }
  1224. void
  1225. Func::EndClone()
  1226. {
  1227. Assert(this->IsTopFunc());
  1228. if (m_cloner)
  1229. {
  1230. m_cloner->Finish();
  1231. JitAdelete(m_cloner->alloc, m_cloner);
  1232. m_cloner = nullptr;
  1233. }
  1234. }
  1235. IR::SymOpnd *
  1236. Func::GetInlineeOpndAtOffset(int32 offset)
  1237. {
  1238. Assert(IsInlinee());
  1239. StackSym *stackSym = CreateInlineeStackSym();
  1240. this->SetArgOffset(stackSym, stackSym->m_offset + offset);
  1241. Assert(stackSym->m_offset >= 0);
  1242. return IR::SymOpnd::New(stackSym, 0, TyMachReg, this);
  1243. }
  1244. StackSym *
  1245. Func::CreateInlineeStackSym()
  1246. {
  1247. // Make sure this is an inlinee and that GlobOpt has initialized the offset
  1248. // in the inlinee's frame.
  1249. Assert(IsInlinee());
  1250. Assert(m_inlineeFrameStartSym->m_offset != -1);
  1251. StackSym *stackSym = m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1252. stackSym->m_isInlinedArgSlot = true;
  1253. stackSym->m_offset = m_inlineeFrameStartSym->m_offset;
  1254. stackSym->m_allocated = true;
  1255. return stackSym;
  1256. }
  1257. uint16
  1258. Func::GetArgUsedForBranch() const
  1259. {
  1260. // this value can change while JITing, so or these together
  1261. return GetJITFunctionBody()->GetArgUsedForBranch() | GetJITOutput()->GetArgUsedForBranch();
  1262. }
  1263. intptr_t
  1264. Func::GetJittedLoopIterationsSinceLastBailoutAddress() const
  1265. {
  1266. Assert(this->m_workItem->Type() == JsLoopBodyWorkItemType);
  1267. return m_workItem->GetJittedLoopIterationsSinceLastBailoutAddr();
  1268. }
  1269. intptr_t
  1270. Func::GetWeakFuncRef() const
  1271. {
  1272. // TODO: OOP JIT figure out if this can be null
  1273. return m_workItem->GetJITTimeInfo()->GetWeakFuncRef();
  1274. }
  1275. intptr_t
  1276. Func::GetRuntimeInlineCache(const uint index) const
  1277. {
  1278. if(m_runtimeInfo != nullptr && m_runtimeInfo->HasClonedInlineCaches())
  1279. {
  1280. intptr_t inlineCache = m_runtimeInfo->GetClonedInlineCache(index);
  1281. if(inlineCache)
  1282. {
  1283. return inlineCache;
  1284. }
  1285. }
  1286. return GetJITFunctionBody()->GetInlineCache(index);
  1287. }
  1288. JITTimePolymorphicInlineCache *
  1289. Func::GetRuntimePolymorphicInlineCache(const uint index) const
  1290. {
  1291. if (this->m_polymorphicInlineCacheInfo && this->m_polymorphicInlineCacheInfo->HasInlineCaches())
  1292. {
  1293. return this->m_polymorphicInlineCacheInfo->GetInlineCache(index);
  1294. }
  1295. return nullptr;
  1296. }
  1297. byte
  1298. Func::GetPolyCacheUtilToInitialize(const uint index) const
  1299. {
  1300. return this->GetRuntimePolymorphicInlineCache(index) ? this->GetPolyCacheUtil(index) : PolymorphicInlineCacheUtilizationMinValue;
  1301. }
  1302. byte
  1303. Func::GetPolyCacheUtil(const uint index) const
  1304. {
  1305. return this->m_polymorphicInlineCacheInfo->GetUtil(index);
  1306. }
  1307. ObjTypeSpecFldInfo*
  1308. Func::GetObjTypeSpecFldInfo(const uint index) const
  1309. {
  1310. if (GetJITFunctionBody()->GetInlineCacheCount() == 0)
  1311. {
  1312. Assert(UNREACHED);
  1313. return nullptr;
  1314. }
  1315. return GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(index);
  1316. }
  1317. ObjTypeSpecFldInfo*
  1318. Func::GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const
  1319. {
  1320. Assert(propertyInfoId < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  1321. return GetTopFunc()->m_globalObjTypeSpecFldInfoArray[propertyInfoId];
  1322. }
  1323. void
  1324. Func::EnsurePinnedTypeRefs()
  1325. {
  1326. if (this->pinnedTypeRefs == nullptr)
  1327. {
  1328. this->pinnedTypeRefs = JitAnew(this->m_alloc, TypeRefSet, this->m_alloc);
  1329. }
  1330. }
  1331. void
  1332. Func::PinTypeRef(void* typeRef)
  1333. {
  1334. EnsurePinnedTypeRefs();
  1335. this->pinnedTypeRefs->AddNew(typeRef);
  1336. }
  1337. void
  1338. Func::EnsureSingleTypeGuards()
  1339. {
  1340. if (this->singleTypeGuards == nullptr)
  1341. {
  1342. this->singleTypeGuards = JitAnew(this->m_alloc, TypePropertyGuardDictionary, this->m_alloc);
  1343. }
  1344. }
  1345. Js::JitTypePropertyGuard*
  1346. Func::GetOrCreateSingleTypeGuard(intptr_t typeAddr)
  1347. {
  1348. EnsureSingleTypeGuards();
  1349. Js::JitTypePropertyGuard* guard = nullptr;
  1350. if (!this->singleTypeGuards->TryGetValue(typeAddr, &guard))
  1351. {
  1352. // Property guards are allocated by NativeCodeData::Allocator so that their lifetime extends as long as the EntryPointInfo is alive.
  1353. guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitTypePropertyGuard, typeAddr, this->indexedPropertyGuardCount++);
  1354. this->singleTypeGuards->Add(typeAddr, guard);
  1355. }
  1356. else
  1357. {
  1358. Assert(guard->GetTypeAddr() == typeAddr);
  1359. }
  1360. return guard;
  1361. }
  1362. void
  1363. Func::EnsureEquivalentTypeGuards()
  1364. {
  1365. AssertMsg(!PHASE_OFF(Js::EquivObjTypeSpecPhase, this), "Why do we have equivalent type guards if we don't do equivalent object type spec?");
  1366. if (this->equivalentTypeGuards == nullptr)
  1367. {
  1368. this->equivalentTypeGuards = JitAnew(this->m_alloc, EquivalentTypeGuardList, this->m_alloc);
  1369. }
  1370. }
  1371. Js::JitEquivalentTypeGuard*
  1372. Func::CreateEquivalentTypeGuard(JITTypeHolder type, uint32 objTypeSpecFldId)
  1373. {
  1374. EnsureEquivalentTypeGuards();
  1375. Js::JitEquivalentTypeGuard* guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitEquivalentTypeGuard, type->GetAddr(), this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1376. this->InitializeEquivalentTypeGuard(guard);
  1377. return guard;
  1378. }
  1379. Js::JitPolyEquivalentTypeGuard*
  1380. Func::CreatePolyEquivalentTypeGuard(uint32 objTypeSpecFldId)
  1381. {
  1382. EnsureEquivalentTypeGuards();
  1383. Js::JitPolyEquivalentTypeGuard* guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitPolyEquivalentTypeGuard, this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1384. this->InitializeEquivalentTypeGuard(guard);
  1385. return guard;
  1386. }
  1387. void
  1388. Func::InitializeEquivalentTypeGuard(Js::JitEquivalentTypeGuard * guard)
  1389. {
  1390. // If we want to hard code the address of the cache, we will need to go back to allocating it from the native code data allocator.
  1391. // We would then need to maintain consistency (double write) to both the recycler allocated cache and the one on the heap.
  1392. Js::EquivalentTypeCache* cache = nullptr;
  1393. if (this->IsOOPJIT())
  1394. {
  1395. cache = JitAnewZ(this->m_alloc, Js::EquivalentTypeCache);
  1396. }
  1397. else
  1398. {
  1399. cache = NativeCodeDataNewZNoFixup(GetTransferDataAllocator(), Js::EquivalentTypeCache);
  1400. }
  1401. guard->SetCache(cache);
  1402. // Give the cache a back-pointer to the guard so that the guard can be cleared at runtime if necessary.
  1403. cache->SetGuard(guard);
  1404. this->equivalentTypeGuards->Prepend(guard);
  1405. }
  1406. void
  1407. Func::EnsurePropertyGuardsByPropertyId()
  1408. {
  1409. if (this->propertyGuardsByPropertyId == nullptr)
  1410. {
  1411. this->propertyGuardsByPropertyId = JitAnew(this->m_alloc, PropertyGuardByPropertyIdMap, this->m_alloc);
  1412. }
  1413. }
  1414. void
  1415. Func::EnsureCtorCachesByPropertyId()
  1416. {
  1417. if (this->ctorCachesByPropertyId == nullptr)
  1418. {
  1419. this->ctorCachesByPropertyId = JitAnew(this->m_alloc, CtorCachesByPropertyIdMap, this->m_alloc);
  1420. }
  1421. }
  1422. void
  1423. Func::LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard)
  1424. {
  1425. Assert(guard != nullptr);
  1426. Assert(guard->GetValue() != NULL);
  1427. Assert(this->propertyGuardsByPropertyId != nullptr);
  1428. IndexedPropertyGuardSet* set;
  1429. if (!this->propertyGuardsByPropertyId->TryGetValue(propertyId, &set))
  1430. {
  1431. set = JitAnew(this->m_alloc, IndexedPropertyGuardSet, this->m_alloc);
  1432. this->propertyGuardsByPropertyId->Add(propertyId, set);
  1433. }
  1434. set->Item(guard);
  1435. }
  1436. void
  1437. Func::LinkCtorCacheToPropertyId(Js::PropertyId propertyId, JITTimeConstructorCache* cache)
  1438. {
  1439. Assert(cache != nullptr);
  1440. Assert(this->ctorCachesByPropertyId != nullptr);
  1441. CtorCacheSet* set;
  1442. if (!this->ctorCachesByPropertyId->TryGetValue(propertyId, &set))
  1443. {
  1444. set = JitAnew(this->m_alloc, CtorCacheSet, this->m_alloc);
  1445. this->ctorCachesByPropertyId->Add(propertyId, set);
  1446. }
  1447. set->Item(cache->GetRuntimeCacheAddr());
  1448. }
  1449. JITTimeConstructorCache* Func::GetConstructorCache(const Js::ProfileId profiledCallSiteId)
  1450. {
  1451. AssertOrFailFast(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1452. Assert(this->constructorCaches != nullptr);
  1453. return this->constructorCaches[profiledCallSiteId];
  1454. }
  1455. void Func::SetConstructorCache(const Js::ProfileId profiledCallSiteId, JITTimeConstructorCache* constructorCache)
  1456. {
  1457. AssertOrFailFast(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1458. Assert(constructorCache != nullptr);
  1459. Assert(this->constructorCaches != nullptr);
  1460. Assert(this->constructorCaches[profiledCallSiteId] == nullptr);
  1461. this->constructorCacheCount++;
  1462. this->constructorCaches[profiledCallSiteId] = constructorCache;
  1463. }
  1464. void Func::EnsurePropertiesWrittenTo()
  1465. {
  1466. if (this->propertiesWrittenTo == nullptr)
  1467. {
  1468. this->propertiesWrittenTo = JitAnew(this->m_alloc, PropertyIdSet, this->m_alloc);
  1469. }
  1470. }
  1471. void Func::EnsureCallSiteToArgumentsOffsetFixupMap()
  1472. {
  1473. if (this->callSiteToArgumentsOffsetFixupMap == nullptr)
  1474. {
  1475. this->callSiteToArgumentsOffsetFixupMap = JitAnew(this->m_alloc, CallSiteToArgumentsOffsetFixupMap, this->m_alloc);
  1476. }
  1477. }
  1478. IR::LabelInstr *
  1479. Func::GetFuncStartLabel()
  1480. {
  1481. return m_funcStartLabel;
  1482. }
  1483. IR::LabelInstr *
  1484. Func::EnsureFuncStartLabel()
  1485. {
  1486. if(m_funcStartLabel == nullptr)
  1487. {
  1488. m_funcStartLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1489. m_funcStartLabel->m_isDataLabel = true;
  1490. }
  1491. return m_funcStartLabel;
  1492. }
  1493. IR::LabelInstr *
  1494. Func::GetFuncEndLabel()
  1495. {
  1496. return m_funcEndLabel;
  1497. }
  1498. IR::LabelInstr *
  1499. Func::EnsureFuncEndLabel()
  1500. {
  1501. if(m_funcEndLabel == nullptr)
  1502. {
  1503. m_funcEndLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1504. m_funcEndLabel->m_isDataLabel = true;
  1505. }
  1506. return m_funcEndLabel;
  1507. }
  1508. void
  1509. Func::EnsureStackArgWithFormalsTracker()
  1510. {
  1511. if (stackArgWithFormalsTracker == nullptr)
  1512. {
  1513. stackArgWithFormalsTracker = JitAnew(m_alloc, StackArgWithFormalsTracker, m_alloc);
  1514. }
  1515. }
  1516. BOOL
  1517. Func::IsFormalsArraySym(SymID symId)
  1518. {
  1519. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsArraySyms() == nullptr)
  1520. {
  1521. return false;
  1522. }
  1523. return stackArgWithFormalsTracker->GetFormalsArraySyms()->Test(symId);
  1524. }
  1525. void
  1526. Func::TrackFormalsArraySym(SymID symId)
  1527. {
  1528. EnsureStackArgWithFormalsTracker();
  1529. stackArgWithFormalsTracker->SetFormalsArraySyms(symId);
  1530. }
  1531. void
  1532. Func::TrackStackSymForFormalIndex(Js::ArgSlot formalsIndex, StackSym * sym)
  1533. {
  1534. EnsureStackArgWithFormalsTracker();
  1535. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1536. stackArgWithFormalsTracker->SetStackSymInFormalsIndexMap(sym, formalsIndex, formalsCount);
  1537. }
  1538. StackSym *
  1539. Func::GetStackSymForFormal(Js::ArgSlot formalsIndex)
  1540. {
  1541. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1542. {
  1543. return nullptr;
  1544. }
  1545. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1546. StackSym ** formalsIndexToStackSymMap = stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap();
  1547. AssertMsg(formalsIndex < formalsCount, "OutOfRange ? ");
  1548. return formalsIndexToStackSymMap[formalsIndex];
  1549. }
  1550. bool
  1551. Func::HasStackSymForFormal(Js::ArgSlot formalsIndex)
  1552. {
  1553. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1554. {
  1555. return false;
  1556. }
  1557. return GetStackSymForFormal(formalsIndex) != nullptr;
  1558. }
  1559. void
  1560. Func::SetScopeObjSym(StackSym * sym)
  1561. {
  1562. EnsureStackArgWithFormalsTracker();
  1563. stackArgWithFormalsTracker->SetScopeObjSym(sym);
  1564. }
  1565. StackSym *
  1566. Func::GetNativeCodeDataSym() const
  1567. {
  1568. Assert(IsOOPJIT());
  1569. return m_nativeCodeDataSym;
  1570. }
  1571. void
  1572. Func::SetNativeCodeDataSym(StackSym * opnd)
  1573. {
  1574. Assert(IsOOPJIT());
  1575. m_nativeCodeDataSym = opnd;
  1576. }
  1577. StackSym*
  1578. Func::GetScopeObjSym()
  1579. {
  1580. if (stackArgWithFormalsTracker == nullptr)
  1581. {
  1582. return nullptr;
  1583. }
  1584. return stackArgWithFormalsTracker->GetScopeObjSym();
  1585. }
  1586. BVSparse<JitArenaAllocator> *
  1587. StackArgWithFormalsTracker::GetFormalsArraySyms()
  1588. {
  1589. return formalsArraySyms;
  1590. }
  1591. void
  1592. StackArgWithFormalsTracker::SetFormalsArraySyms(SymID symId)
  1593. {
  1594. if (formalsArraySyms == nullptr)
  1595. {
  1596. formalsArraySyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1597. }
  1598. formalsArraySyms->Set(symId);
  1599. }
  1600. StackSym **
  1601. StackArgWithFormalsTracker::GetFormalsIndexToStackSymMap()
  1602. {
  1603. return formalsIndexToStackSymMap;
  1604. }
  1605. void
  1606. StackArgWithFormalsTracker::SetStackSymInFormalsIndexMap(StackSym * sym, Js::ArgSlot formalsIndex, Js::ArgSlot formalsCount)
  1607. {
  1608. if(formalsIndexToStackSymMap == nullptr)
  1609. {
  1610. formalsIndexToStackSymMap = JitAnewArrayZ(alloc, StackSym*, formalsCount);
  1611. }
  1612. AssertMsg(formalsIndex < formalsCount, "Out of range ?");
  1613. formalsIndexToStackSymMap[formalsIndex] = sym;
  1614. }
  1615. void
  1616. StackArgWithFormalsTracker::SetScopeObjSym(StackSym * sym)
  1617. {
  1618. m_scopeObjSym = sym;
  1619. }
  1620. StackSym *
  1621. StackArgWithFormalsTracker::GetScopeObjSym()
  1622. {
  1623. return m_scopeObjSym;
  1624. }
  1625. void
  1626. Cloner::AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone)
  1627. {
  1628. if (!this->instrFirst)
  1629. {
  1630. this->instrFirst = instrClone;
  1631. }
  1632. this->instrLast = instrClone;
  1633. }
  1634. void
  1635. Cloner::Finish()
  1636. {
  1637. this->RetargetClonedBranches();
  1638. if (this->lowerer)
  1639. {
  1640. lowerer->LowerRange(this->instrFirst, this->instrLast, false, false);
  1641. }
  1642. }
  1643. void
  1644. Cloner::RetargetClonedBranches()
  1645. {
  1646. if (!this->fRetargetClonedBranch)
  1647. {
  1648. return;
  1649. }
  1650. FOREACH_INSTR_IN_RANGE(instr, this->instrFirst, this->instrLast)
  1651. {
  1652. if (instr->IsBranchInstr())
  1653. {
  1654. instr->AsBranchInstr()->RetargetClonedBranch();
  1655. }
  1656. }
  1657. NEXT_INSTR_IN_RANGE;
  1658. }
  1659. void Func::ThrowIfScriptClosed()
  1660. {
  1661. if (GetScriptContextInfo()->IsClosed())
  1662. {
  1663. // Should not be jitting something in the foreground when the script context is actually closed
  1664. Assert(IsBackgroundJIT() || !GetScriptContext()->IsActuallyClosed());
  1665. throw Js::OperationAbortedException();
  1666. }
  1667. }
  1668. IR::IndirOpnd * Func::GetConstantAddressIndirOpnd(intptr_t address, IR::Opnd * largeConstOpnd, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode)
  1669. {
  1670. Assert(this->GetTopFunc() == this);
  1671. if (!canHoistConstantAddressLoad)
  1672. {
  1673. // We can't hoist constant address load after lower, as we can't mark the sym as
  1674. // live on back edge
  1675. return nullptr;
  1676. }
  1677. int offset = 0;
  1678. IR::RegOpnd ** foundRegOpnd = this->constantAddressRegOpnd.Find([address, &offset](IR::RegOpnd * regOpnd)
  1679. {
  1680. Assert(regOpnd->m_sym->IsSingleDef());
  1681. Assert(regOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd() || regOpnd->m_sym->m_instrDef->GetSrc1()->IsIntConstOpnd());
  1682. void * curr = regOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd() ?
  1683. regOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address :
  1684. (void *)regOpnd->m_sym->m_instrDef->GetSrc1()->AsIntConstOpnd()->GetValue();
  1685. ptrdiff_t diff = (uintptr_t)address - (uintptr_t)curr;
  1686. if (!Math::FitsInDWord(diff))
  1687. {
  1688. return false;
  1689. }
  1690. offset = (int)diff;
  1691. return true;
  1692. });
  1693. IR::RegOpnd * addressRegOpnd;
  1694. if (foundRegOpnd != nullptr)
  1695. {
  1696. addressRegOpnd = *foundRegOpnd;
  1697. }
  1698. else
  1699. {
  1700. Assert(offset == 0);
  1701. addressRegOpnd = IR::RegOpnd::New(TyMachPtr, this);
  1702. IR::Instr *const newInstr =
  1703. IR::Instr::New(
  1704. loadOpCode,
  1705. addressRegOpnd,
  1706. largeConstOpnd,
  1707. this);
  1708. this->constantAddressRegOpnd.Prepend(addressRegOpnd);
  1709. IR::Instr * insertBeforeInstr = this->lastConstantAddressRegLoadInstr;
  1710. if (insertBeforeInstr == nullptr)
  1711. {
  1712. insertBeforeInstr = this->GetFunctionEntryInsertionPoint();
  1713. this->lastConstantAddressRegLoadInstr = newInstr;
  1714. }
  1715. insertBeforeInstr->InsertBefore(newInstr);
  1716. }
  1717. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(addressRegOpnd, offset, type, this, true);
  1718. #if DBG_DUMP
  1719. // TODO: michhol make intptr_t
  1720. indirOpnd->SetAddrKind(kind, (void*)address);
  1721. #endif
  1722. return indirOpnd;
  1723. }
  1724. void Func::MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv)
  1725. {
  1726. Assert(this->GetTopFunc() == this);
  1727. this->constantAddressRegOpnd.Iterate([bv](IR::RegOpnd * regOpnd)
  1728. {
  1729. bv->Set(regOpnd->m_sym->m_id);
  1730. });
  1731. }
  1732. IR::Instr *
  1733. Func::GetFunctionEntryInsertionPoint()
  1734. {
  1735. Assert(this->GetTopFunc() == this);
  1736. IR::Instr * insertInsert = this->lastConstantAddressRegLoadInstr;
  1737. if (insertInsert != nullptr)
  1738. {
  1739. return insertInsert->m_next;
  1740. }
  1741. insertInsert = this->m_headInstr;
  1742. if (this->HasTry())
  1743. {
  1744. // Insert it inside the root region
  1745. insertInsert = insertInsert->m_next;
  1746. Assert(insertInsert->IsLabelInstr() && insertInsert->AsLabelInstr()->GetRegion()->GetType() == RegionTypeRoot);
  1747. }
  1748. return insertInsert->m_next;
  1749. }
  1750. Js::Var
  1751. Func::AllocateNumber(double value)
  1752. {
  1753. Js::Var number = nullptr;
  1754. #if FLOATVAR
  1755. number = Js::JavascriptNumber::NewCodeGenInstance((double)value, nullptr);
  1756. #else
  1757. if (!IsOOPJIT()) // in-proc jit
  1758. {
  1759. number = Js::JavascriptNumber::NewCodeGenInstance(GetNumberAllocator(), (double)value, GetScriptContext());
  1760. }
  1761. else // OOP JIT
  1762. {
  1763. number = GetXProcNumberAllocator()->AllocateNumber(this, value);
  1764. }
  1765. #endif
  1766. return number;
  1767. }
  1768. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1769. void
  1770. Func::DumpFullFunctionName()
  1771. {
  1772. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1773. Output::Print(_u("Function %s (%s)"), GetJITFunctionBody()->GetDisplayName(), GetDebugNumberSet(debugStringBuffer));
  1774. }
  1775. #endif
  1776. void
  1777. Func::UpdateForInLoopMaxDepth(uint forInLoopMaxDepth)
  1778. {
  1779. Assert(this->IsTopFunc());
  1780. this->m_forInLoopMaxDepth = max(this->m_forInLoopMaxDepth, forInLoopMaxDepth);
  1781. }
  1782. int
  1783. Func::GetForInEnumeratorArrayOffset() const
  1784. {
  1785. Func const* topFunc = this->GetTopFunc();
  1786. Assert(this->m_forInLoopBaseDepth + this->GetJITFunctionBody()->GetForInLoopDepth() <= topFunc->m_forInLoopMaxDepth);
  1787. return topFunc->m_forInEnumeratorArrayOffset
  1788. + this->m_forInLoopBaseDepth * sizeof(Js::ForInObjectEnumerator);
  1789. }
  1790. void
  1791. Func::SetHasLazyBailOut()
  1792. {
  1793. this->hasLazyBailOut = true;
  1794. }
  1795. bool
  1796. Func::HasLazyBailOut() const
  1797. {
  1798. AssertMsg(
  1799. this->isPostRegAlloc,
  1800. "We don't know whether a function has lazy bailout until after RegAlloc"
  1801. );
  1802. return this->hasLazyBailOut;
  1803. }
  1804. void
  1805. Func::EnsureLazyBailOutRecordSlot()
  1806. {
  1807. if (this->m_lazyBailOutRecordSlot == nullptr)
  1808. {
  1809. this->m_lazyBailOutRecordSlot = StackSym::New(TyMachPtr, this);
  1810. this->StackAllocate(this->m_lazyBailOutRecordSlot, MachPtr);
  1811. }
  1812. }
  1813. StackSym *
  1814. Func::GetLazyBailOutRecordSlot() const
  1815. {
  1816. Assert(this->m_lazyBailOutRecordSlot != nullptr);
  1817. return this->m_lazyBailOutRecordSlot;
  1818. }
  1819. bool
  1820. Func::ShouldDoLazyBailOut() const
  1821. {
  1822. #if defined(_M_X64)
  1823. if (!PHASE_ON1(Js::LazyBailoutPhase) ||
  1824. this->GetJITFunctionBody()->IsAsmJsMode() || // don't have bailouts in asm.js
  1825. this->HasTry() || // lazy bailout in function with try/catch not supported for now
  1826. // `EHBailoutPatchUp` set a `hasBailedOut` bit to rethrow the exception in the interpreter
  1827. // if the instruction has ANY bailout. In the future, to implement lazy bailout with try/catch,
  1828. // we would need to change how this bit is generated.
  1829. this->IsLoopBody()) // don't do lazy bailout on jit'd loop body either
  1830. {
  1831. return false;
  1832. }
  1833. return true;
  1834. #else
  1835. return false;
  1836. #endif
  1837. }
  1838. #if DBG_DUMP
  1839. ///----------------------------------------------------------------------------
  1840. ///
  1841. /// Func::DumpHeader
  1842. ///
  1843. ///----------------------------------------------------------------------------
  1844. void
  1845. Func::DumpHeader()
  1846. {
  1847. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1848. DumpFullFunctionName();
  1849. Output::SkipToColumn(50);
  1850. Output::Print(_u("Instr Count:%d"), GetInstrCount());
  1851. if(m_codeSize > 0)
  1852. {
  1853. Output::Print(_u("\t\tSize:%d\n\n"), m_codeSize);
  1854. }
  1855. else
  1856. {
  1857. Output::Print(_u("\n\n"));
  1858. }
  1859. }
  1860. ///----------------------------------------------------------------------------
  1861. ///
  1862. /// Func::Dump
  1863. ///
  1864. ///----------------------------------------------------------------------------
  1865. void
  1866. Func::Dump(IRDumpFlags flags)
  1867. {
  1868. this->DumpHeader();
  1869. FOREACH_INSTR_IN_FUNC(instr, this)
  1870. {
  1871. instr->DumpGlobOptInstrString();
  1872. instr->Dump(flags);
  1873. }NEXT_INSTR_IN_FUNC;
  1874. Output::Flush();
  1875. }
  1876. void
  1877. Func::Dump()
  1878. {
  1879. this->Dump(IRDumpFlags_None);
  1880. }
  1881. #endif
  1882. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  1883. LPCSTR
  1884. Func::GetVtableName(INT_PTR address)
  1885. {
  1886. #if DBG
  1887. if (vtableMap == nullptr)
  1888. {
  1889. vtableMap = VirtualTableRegistry::CreateVtableHashMap(this->m_alloc);
  1890. };
  1891. LPCSTR name = vtableMap->Lookup(address, nullptr);
  1892. if (name)
  1893. {
  1894. if (strncmp(name, "class ", _countof("class ") - 1) == 0)
  1895. {
  1896. name += _countof("class ") - 1;
  1897. }
  1898. }
  1899. return name;
  1900. #else
  1901. return "";
  1902. #endif
  1903. }
  1904. #endif
  1905. #if DBG_DUMP | defined(VTUNE_PROFILING)
  1906. bool Func::DoRecordNativeMap() const
  1907. {
  1908. #if defined(VTUNE_PROFILING)
  1909. if (VTuneChakraProfile::isJitProfilingActive)
  1910. {
  1911. return true;
  1912. }
  1913. #endif
  1914. #if DBG_DUMP
  1915. return PHASE_DUMP(Js::EncoderPhase, this) && Js::Configuration::Global.flags.Verbose;
  1916. #else
  1917. return false;
  1918. #endif
  1919. }
  1920. #endif
  1921. #ifdef PERF_HINT
  1922. void WritePerfHint(PerfHints hint, Func* func, uint byteCodeOffset /*= Js::Constants::NoByteCodeOffset*/)
  1923. {
  1924. if (!func->IsOOPJIT())
  1925. {
  1926. WritePerfHint(hint, (Js::FunctionBody*)func->GetJITFunctionBody()->GetAddr(), byteCodeOffset);
  1927. }
  1928. }
  1929. #endif