Func.cpp 54 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Base/EtwTrace.h"
  7. #include "Base/ScriptContextProfiler.h"
  8. #ifdef VTUNE_PROFILING
  9. #include "Base/VTuneChakraProfile.h"
  10. #endif
  11. Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  12. ThreadContextInfo * threadContextInfo,
  13. ScriptContextInfo * scriptContextInfo,
  14. JITOutputIDL * outputData,
  15. Js::EntryPointInfo* epInfo,
  16. const FunctionJITRuntimeInfo *const runtimeInfo,
  17. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, CodeGenAllocators *const codeGenAllocators,
  18. CodeGenNumberAllocator * numberAllocator,
  19. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc,
  20. uint postCallByteCodeOffset, Js::RegSlot returnValueRegSlot, const bool isInlinedConstructor,
  21. Js::ProfileId callSiteIdInParentFunc, bool isGetterSetter) :
  22. m_alloc(alloc),
  23. m_workItem(workItem),
  24. m_output(outputData),
  25. m_entryPointInfo(epInfo),
  26. m_threadContextInfo(threadContextInfo),
  27. m_scriptContextInfo(scriptContextInfo),
  28. m_runtimeInfo(runtimeInfo),
  29. m_polymorphicInlineCacheInfo(polymorphicInlineCacheInfo),
  30. m_codeGenAllocators(codeGenAllocators),
  31. m_inlineeId(0),
  32. pinnedTypeRefs(nullptr),
  33. singleTypeGuards(nullptr),
  34. equivalentTypeGuards(nullptr),
  35. propertyGuardsByPropertyId(nullptr),
  36. ctorCachesByPropertyId(nullptr),
  37. callSiteToArgumentsOffsetFixupMap(nullptr),
  38. indexedPropertyGuardCount(0),
  39. propertiesWrittenTo(nullptr),
  40. lazyBailoutProperties(alloc),
  41. anyPropertyMayBeWrittenTo(false),
  42. #ifdef PROFILE_EXEC
  43. m_codeGenProfiler(codeGenProfiler),
  44. #endif
  45. m_isBackgroundJIT(isBackgroundJIT),
  46. m_cloner(nullptr),
  47. m_cloneMap(nullptr),
  48. m_loopParamSym(nullptr),
  49. m_funcObjSym(nullptr),
  50. m_localClosureSym(nullptr),
  51. m_paramClosureSym(nullptr),
  52. m_localFrameDisplaySym(nullptr),
  53. m_bailoutReturnValueSym(nullptr),
  54. m_hasBailedOutSym(nullptr),
  55. m_inlineeFrameStartSym(nullptr),
  56. m_regsUsed(0),
  57. m_fg(nullptr),
  58. m_labelCount(0),
  59. m_argSlotsForFunctionsCalled(0),
  60. m_isLeaf(false),
  61. m_hasCalls(false),
  62. m_hasInlineArgsOpt(false),
  63. m_canDoInlineArgsOpt(true),
  64. m_doFastPaths(false),
  65. hasBailout(false),
  66. hasBailoutInEHRegion(false),
  67. hasInstrNumber(false),
  68. maintainByteCodeOffset(true),
  69. frameSize(0),
  70. parentFunc(parentFunc),
  71. argObjSyms(nullptr),
  72. m_nonTempLocalVars(nullptr),
  73. hasAnyStackNestedFunc(false),
  74. hasMarkTempObjects(false),
  75. postCallByteCodeOffset(postCallByteCodeOffset),
  76. maxInlineeArgOutCount(0),
  77. returnValueRegSlot(returnValueRegSlot),
  78. firstActualStackOffset(-1),
  79. m_localVarSlotsOffset(Js::Constants::InvalidOffset),
  80. m_hasLocalVarChangedOffset(Js::Constants::InvalidOffset),
  81. actualCount((Js::ArgSlot) - 1),
  82. tryCatchNestingLevel(0),
  83. m_localStackHeight(0),
  84. tempSymDouble(nullptr),
  85. tempSymBool(nullptr),
  86. hasInlinee(false),
  87. thisOrParentInlinerHasArguments(false),
  88. hasStackArgs(false),
  89. hasNonSimpleParams(false),
  90. hasUnoptimizedArgumentsAcccess(false),
  91. hasApplyTargetInlining(false),
  92. hasImplicitCalls(false),
  93. hasTempObjectProducingInstr(false),
  94. isInlinedConstructor(isInlinedConstructor),
  95. numberAllocator(numberAllocator),
  96. loopCount(0),
  97. callSiteIdInParentFunc(callSiteIdInParentFunc),
  98. isGetterSetter(isGetterSetter),
  99. frameInfo(nullptr),
  100. isTJLoopBody(false),
  101. isFlowGraphValid(false),
  102. #if DBG
  103. m_callSiteCount(0),
  104. #endif
  105. stackNestedFunc(false),
  106. stackClosure(false)
  107. #if defined(_M_ARM32_OR_ARM64)
  108. , m_ArgumentsOffset(0)
  109. , m_epilogLabel(nullptr)
  110. #endif
  111. , m_funcStartLabel(nullptr)
  112. , m_funcEndLabel(nullptr)
  113. #ifdef _M_X64
  114. , m_prologEncoder(alloc)
  115. #endif
  116. #if DBG
  117. , hasCalledSetDoFastPaths(false)
  118. , allowRemoveBailOutArgInstr(false)
  119. , currentPhases(alloc)
  120. , isPostLower(false)
  121. , isPostRegAlloc(false)
  122. , isPostPeeps(false)
  123. , isPostLayout(false)
  124. , isPostFinalLower(false)
  125. , vtableMap(nullptr)
  126. #endif
  127. , m_yieldOffsetResumeLabelList(nullptr)
  128. , m_bailOutNoSaveLabel(nullptr)
  129. , constantAddressRegOpnd(alloc)
  130. , lastConstantAddressRegLoadInstr(nullptr)
  131. , m_totalJumpTableSizeInBytesForSwitchStatements(0)
  132. , slotArrayCheckTable(nullptr)
  133. , frameDisplayCheckTable(nullptr)
  134. , stackArgWithFormalsTracker(nullptr)
  135. , argInsCount(0)
  136. , m_globalObjTypeSpecFldInfoArray(nullptr)
  137. {
  138. Assert(this->IsInlined() == !!runtimeInfo);
  139. if (this->IsTopFunc())
  140. {
  141. outputData->writeableEPData.hasJittedStackClosure = false;
  142. // TODO: (michhol) validate initial values
  143. outputData->writeableEPData.localVarSlotsOffset = 0;
  144. outputData->writeableEPData.localVarChangedOffset = 0;
  145. }
  146. if (this->IsInlined())
  147. {
  148. m_inlineeId = ++(GetTopFunc()->m_inlineeId);
  149. }
  150. m_jnFunction = nullptr;
  151. bool doStackNestedFunc = GetJITFunctionBody()->DoStackNestedFunc();
  152. bool doStackClosure = GetJITFunctionBody()->DoStackClosure() && !PHASE_OFF(Js::FrameDisplayFastPathPhase, this) && !PHASE_OFF(Js::StackClosurePhase, this);
  153. Assert(!doStackClosure || doStackNestedFunc);
  154. this->stackClosure = doStackClosure && this->IsTopFunc();
  155. if (this->stackClosure)
  156. {
  157. // TODO: calculate on runtime side?
  158. m_output.SetHasJITStackClosure();
  159. }
  160. if (GetJITFunctionBody()->DoBackendArgumentsOptimization() && !GetJITFunctionBody()->HasTry())
  161. {
  162. // doBackendArgumentsOptimization bit is set when there is no eval inside a function
  163. // as determined by the bytecode generator.
  164. SetHasStackArgs(true);
  165. }
  166. if (m_workItem->Type() == JsFunctionType)
  167. {
  168. if (doStackNestedFunc && GetJITFunctionBody()->GetNestedCount() != 0 &&
  169. this->GetTopFunc()->m_workItem->Type() != JsLoopBodyWorkItemType) // make sure none of the functions inlined in a jitted loop body allocate nested functions on the stack
  170. {
  171. Assert(!(this->IsJitInDebugMode() && !GetJITFunctionBody()->IsLibraryCode()));
  172. stackNestedFunc = true;
  173. this->GetTopFunc()->hasAnyStackNestedFunc = true;
  174. }
  175. }
  176. else
  177. {
  178. Assert(m_workItem->IsLoopBody());
  179. }
  180. if (GetJITFunctionBody()->HasOrParentHasArguments() || parentFunc && parentFunc->thisOrParentInlinerHasArguments)
  181. {
  182. thisOrParentInlinerHasArguments = true;
  183. }
  184. if (parentFunc == nullptr)
  185. {
  186. inlineDepth = 0;
  187. m_symTable = JitAnew(alloc, SymTable);
  188. m_symTable->Init(this);
  189. m_symTable->SetStartingID(static_cast<SymID>(workItem->GetJITFunctionBody()->GetLocalsCount() + 1));
  190. Assert(Js::Constants::NoByteCodeOffset == postCallByteCodeOffset);
  191. Assert(Js::Constants::NoRegister == returnValueRegSlot);
  192. #if defined(_M_IX86) || defined(_M_X64)
  193. if (HasArgumentSlot())
  194. {
  195. // Pre-allocate the single argument slot we'll reserve for the arguments object.
  196. // For ARM, the argument slot is not part of the local but part of the register saves
  197. m_localStackHeight = MachArgsSlotOffset;
  198. }
  199. #endif
  200. }
  201. else
  202. {
  203. inlineDepth = parentFunc->inlineDepth + 1;
  204. Assert(Js::Constants::NoByteCodeOffset != postCallByteCodeOffset);
  205. }
  206. this->constructorCacheCount = 0;
  207. this->constructorCaches = AnewArrayZ(this->m_alloc, JITTimeConstructorCache*, GetJITFunctionBody()->GetProfiledCallSiteCount());
  208. #if DBG_DUMP
  209. m_codeSize = -1;
  210. #endif
  211. #if defined(_M_X64)
  212. m_spillSize = -1;
  213. m_argsSize = -1;
  214. m_savedRegSize = -1;
  215. #endif
  216. if (this->IsJitInDebugMode())
  217. {
  218. m_nonTempLocalVars = Anew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  219. }
  220. if (GetJITFunctionBody()->IsGenerator())
  221. {
  222. m_yieldOffsetResumeLabelList = YieldOffsetResumeLabelList::New(this->m_alloc);
  223. }
  224. if (this->IsTopFunc())
  225. {
  226. m_globalObjTypeSpecFldInfoArray = JitAnewArrayZ(this->m_alloc, JITObjTypeSpecFldInfo*, GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  227. }
  228. for (uint i = 0; i < GetJITFunctionBody()->GetInlineCacheCount(); ++i)
  229. {
  230. JITObjTypeSpecFldInfo * info = GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(i);
  231. if (info != nullptr)
  232. {
  233. Assert(info->GetObjTypeSpecFldId() < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  234. GetTopFunc()->m_globalObjTypeSpecFldInfoArray[info->GetObjTypeSpecFldId()] = info;
  235. }
  236. }
  237. canHoistConstantAddressLoad = !PHASE_OFF(Js::HoistConstAddrPhase, this);
  238. }
  239. bool
  240. Func::IsLoopBodyInTry() const
  241. {
  242. return IsLoopBody() && m_workItem->GetLoopHeader()->isInTry;
  243. }
  244. /* static */
  245. void
  246. Func::Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  247. ThreadContextInfo * threadContextInfo,
  248. ScriptContextInfo * scriptContextInfo,
  249. JITOutputIDL * outputData,
  250. Js::EntryPointInfo* epInfo, // for in-proc jit only
  251. const FunctionJITRuntimeInfo *const runtimeInfo,
  252. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, CodeGenAllocators *const codeGenAllocators,
  253. CodeGenNumberAllocator * numberAllocator,
  254. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT)
  255. {
  256. bool rejit;
  257. do
  258. {
  259. Func func(alloc, workItem, threadContextInfo,
  260. scriptContextInfo, outputData, epInfo, runtimeInfo,
  261. polymorphicInlineCacheInfo, codeGenAllocators, numberAllocator,
  262. codeGenProfiler, isBackgroundJIT);
  263. try
  264. {
  265. func.TryCodegen();
  266. rejit = false;
  267. }
  268. catch (Js::RejitException ex)
  269. {
  270. // The work item needs to be rejitted, likely due to some optimization that was too aggressive
  271. if (ex.Reason() == RejitReason::AggressiveIntTypeSpecDisabled)
  272. {
  273. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableAggressiveIntTypeSpec(func.IsLoopBody());
  274. outputData->disableAggressiveIntTypeSpec = TRUE;
  275. }
  276. else if (ex.Reason() == RejitReason::InlineApplyDisabled)
  277. {
  278. workItem->GetJITFunctionBody()->DisableInlineApply();
  279. outputData->disableInlineApply = FALSE;
  280. }
  281. else if (ex.Reason() == RejitReason::InlineSpreadDisabled)
  282. {
  283. workItem->GetJITFunctionBody()->DisableInlineSpread();
  284. outputData->disableInlineSpread = FALSE;
  285. }
  286. else if (ex.Reason() == RejitReason::DisableStackArgOpt)
  287. {
  288. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableStackArgOpt();
  289. outputData->disableStackArgOpt = TRUE;
  290. }
  291. else if (ex.Reason() == RejitReason::DisableSwitchOptExpectingInteger ||
  292. ex.Reason() == RejitReason::DisableSwitchOptExpectingString)
  293. {
  294. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableSwitchOpt();
  295. outputData->disableSwitchOpt = TRUE;
  296. }
  297. else
  298. {
  299. Assert(ex.Reason() == RejitReason::TrackIntOverflowDisabled);
  300. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableTrackCompoundedIntOverflow();
  301. outputData->disableTrackCompoundedIntOverflow = TRUE;
  302. }
  303. if (PHASE_TRACE(Js::ReJITPhase, &func))
  304. {
  305. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  306. Output::Print(
  307. _u("Rejit (compile-time): function: %s (%s) reason: %S\n"),
  308. workItem->GetJITFunctionBody()->GetDisplayName(),
  309. workItem->GetJITTimeInfo()->GetDebugNumberSet(debugStringBuffer),
  310. ex.ReasonName());
  311. }
  312. rejit = true;
  313. }
  314. // Either the entry point has a reference to the number now, or we failed to code gen and we
  315. // don't need to numbers, we can flush the completed page now.
  316. //
  317. // If the number allocator is NULL then we are shutting down the thread context and so too the
  318. // code generator. The number allocator must be freed before the recycler (and thus before the
  319. // code generator) so we can't and don't need to flush it.
  320. // TODO: OOP JIT, allocator cleanup
  321. } while (rejit);
  322. }
  323. ///----------------------------------------------------------------------------
  324. ///
  325. /// Func::TryCodegen
  326. ///
  327. /// Attempt to Codegen this function.
  328. ///
  329. ///----------------------------------------------------------------------------
  330. void
  331. Func::TryCodegen()
  332. {
  333. Assert(!IsJitInDebugMode() || !GetJITFunctionBody()->HasTry());
  334. BEGIN_CODEGEN_PHASE(this, Js::BackEndPhase);
  335. {
  336. // IRBuilder
  337. BEGIN_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  338. if (GetJITFunctionBody()->IsAsmJsMode())
  339. {
  340. IRBuilderAsmJs asmIrBuilder(this);
  341. asmIrBuilder.Build();
  342. }
  343. else
  344. {
  345. IRBuilder irBuilder(this);
  346. irBuilder.Build();
  347. }
  348. END_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  349. #ifdef IR_VIEWER
  350. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::IRBuilderPhase);
  351. #endif /* IR_VIEWER */
  352. BEGIN_CODEGEN_PHASE(this, Js::InlinePhase);
  353. InliningHeuristics heuristics(GetWorkItem()->GetJITTimeInfo(), this->IsLoopBody());
  354. Inline inliner(this, heuristics);
  355. inliner.Optimize();
  356. END_CODEGEN_PHASE(this, Js::InlinePhase);
  357. // FlowGraph
  358. {
  359. // Scope for FlowGraph arena
  360. NoRecoverMemoryJitArenaAllocator fgAlloc(_u("BE-FlowGraph"), m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  361. BEGIN_CODEGEN_PHASE(this, Js::FGBuildPhase);
  362. this->m_fg = FlowGraph::New(this, &fgAlloc);
  363. this->m_fg->Build();
  364. END_CODEGEN_PHASE(this, Js::FGBuildPhase);
  365. // Global Optimization and Type Specialization
  366. BEGIN_CODEGEN_PHASE(this, Js::GlobOptPhase);
  367. GlobOpt globOpt(this);
  368. globOpt.Optimize();
  369. END_CODEGEN_PHASE(this, Js::GlobOptPhase);
  370. // Delete flowGraph now
  371. this->m_fg->Destroy();
  372. this->m_fg = nullptr;
  373. }
  374. #ifdef IR_VIEWER
  375. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::GlobOptPhase);
  376. #endif /* IR_VIEWER */
  377. // Lowering
  378. Lowerer lowerer(this);
  379. BEGIN_CODEGEN_PHASE(this, Js::LowererPhase);
  380. lowerer.Lower();
  381. END_CODEGEN_PHASE(this, Js::LowererPhase);
  382. #ifdef IR_VIEWER
  383. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::LowererPhase);
  384. #endif /* IR_VIEWER */
  385. // Encode constants
  386. Security security(this);
  387. BEGIN_CODEGEN_PHASE(this, Js::EncodeConstantsPhase)
  388. security.EncodeLargeConstants();
  389. END_CODEGEN_PHASE(this, Js::EncodeConstantsPhase);
  390. if (GetJITFunctionBody()->DoInterruptProbe())
  391. {
  392. BEGIN_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  393. lowerer.DoInterruptProbes();
  394. END_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  395. }
  396. // Register Allocation
  397. BEGIN_CODEGEN_PHASE(this, Js::RegAllocPhase);
  398. LinearScan linearScan(this);
  399. linearScan.RegAlloc();
  400. END_CODEGEN_PHASE(this, Js::RegAllocPhase);
  401. #ifdef IR_VIEWER
  402. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::RegAllocPhase);
  403. #endif /* IR_VIEWER */
  404. // Peephole optimizations
  405. BEGIN_CODEGEN_PHASE(this, Js::PeepsPhase);
  406. Peeps peeps(this);
  407. peeps.PeepFunc();
  408. END_CODEGEN_PHASE(this, Js::PeepsPhase);
  409. // Layout
  410. BEGIN_CODEGEN_PHASE(this, Js::LayoutPhase);
  411. SimpleLayout layout(this);
  412. layout.Layout();
  413. END_CODEGEN_PHASE(this, Js::LayoutPhase);
  414. if (this->HasTry() && this->hasBailoutInEHRegion)
  415. {
  416. BEGIN_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  417. lowerer.EHBailoutPatchUp();
  418. END_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  419. }
  420. // Insert NOPs (moving this before prolog/epilog for AMD64 and possibly ARM).
  421. BEGIN_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  422. security.InsertNOPs();
  423. END_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  424. // Prolog/Epilog
  425. BEGIN_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  426. if (GetJITFunctionBody()->IsAsmJsMode())
  427. {
  428. lowerer.LowerPrologEpilogAsmJs();
  429. }
  430. else
  431. {
  432. lowerer.LowerPrologEpilog();
  433. }
  434. END_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  435. BEGIN_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  436. lowerer.FinalLower();
  437. END_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  438. // Encoder
  439. BEGIN_CODEGEN_PHASE(this, Js::EncoderPhase);
  440. Encoder encoder(this);
  441. encoder.Encode();
  442. END_CODEGEN_PHASE_NO_DUMP(this, Js::EncoderPhase);
  443. #ifdef IR_VIEWER
  444. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::EncoderPhase);
  445. #endif /* IR_VIEWER */
  446. }
  447. END_CODEGEN_PHASE(this, Js::BackEndPhase);
  448. #if DBG_DUMP
  449. if (Js::Configuration::Global.flags.IsEnabled(Js::AsmDumpModeFlag))
  450. {
  451. FILE * oldFile = 0;
  452. FILE * asmFile = GetScriptContext()->GetNativeCodeGenerator()->asmFile;
  453. if (asmFile)
  454. {
  455. oldFile = Output::SetFile(asmFile);
  456. }
  457. this->Dump(IRDumpFlags_AsmDumpMode);
  458. Output::Flush();
  459. if (asmFile)
  460. {
  461. FILE *openedFile = Output::SetFile(oldFile);
  462. Assert(openedFile == asmFile);
  463. }
  464. }
  465. #endif
  466. if (this->IsOOPJIT())
  467. {
  468. auto dataAllocator = this->GetNativeCodeDataAllocator();
  469. if (dataAllocator->allocCount > 0)
  470. {
  471. // fill in the fixup list by scanning the memory
  472. // todo: this should be done while generating code
  473. NativeCodeData::DataChunk *chunk = (NativeCodeData::DataChunk*)dataAllocator->chunkList;
  474. NativeCodeData::DataChunk *next1 = chunk;
  475. while (next1)
  476. {
  477. if (next1->fixupFunc)
  478. {
  479. next1->fixupFunc(next1->data, chunk);
  480. }
  481. #if DBG
  482. NativeCodeData::DataChunk *next2 = chunk;
  483. while (next2)
  484. {
  485. for (unsigned int i = 0; i < next1->len / sizeof(void*); i++)
  486. {
  487. if (((void**)next1->data)[i] == (void*)next2->data)
  488. {
  489. NativeCodeData::VerifyExistFixupEntry((void*)next2->data, &((void**)next1->data)[i], next1->data);
  490. //NativeCodeData::AddFixupEntry((void*)next2->data, &((void**)next1->data)[i], next1->data, chunk);
  491. }
  492. }
  493. next2 = next2->next;
  494. }
  495. #endif
  496. next1 = next1->next;
  497. }
  498. ////
  499. JITOutputIDL* jitOutputData = m_output.GetOutputData();
  500. jitOutputData->nativeDataFixupTable = (NativeDataFixupTable*)midl_user_allocate(offsetof(NativeDataFixupTable, fixupRecords) + sizeof(NativeDataFixupRecord)* (dataAllocator->allocCount));
  501. jitOutputData->nativeDataFixupTable->count = dataAllocator->allocCount;
  502. jitOutputData->buffer = (NativeDataBuffer*)midl_user_allocate(offsetof(NativeDataBuffer, data) + dataAllocator->totalSize);
  503. jitOutputData->buffer->len = dataAllocator->totalSize;
  504. unsigned int len = 0;
  505. unsigned int count = 0;
  506. next1 = chunk;
  507. while (next1)
  508. {
  509. memcpy(jitOutputData->buffer->data + len, next1->data, next1->len);
  510. len += next1->len;
  511. jitOutputData->nativeDataFixupTable->fixupRecords[count].index = next1->allocIndex;
  512. jitOutputData->nativeDataFixupTable->fixupRecords[count].length = next1->len;
  513. jitOutputData->nativeDataFixupTable->fixupRecords[count].startOffset = next1->offset;
  514. jitOutputData->nativeDataFixupTable->fixupRecords[count].updateList = next1->fixupList;
  515. count++;
  516. next1 = next1->next;
  517. }
  518. #if DBG
  519. if (PHASE_TRACE1(Js::NativeCodeDataPhase))
  520. {
  521. Output::Print(L"NativeCodeData Server Buffer: %p, len: %x, chunk head: %p\n", jitOutputData->buffer->data, jitOutputData->buffer->len, chunk);
  522. }
  523. #endif
  524. }
  525. }
  526. }
  527. ///----------------------------------------------------------------------------
  528. /// Func::StackAllocate
  529. /// Allocate stack space of given size.
  530. ///----------------------------------------------------------------------------
  531. int32
  532. Func::StackAllocate(int size)
  533. {
  534. Assert(this->IsTopFunc());
  535. int32 offset;
  536. #ifdef MD_GROW_LOCALS_AREA_UP
  537. // Locals have positive offsets and are allocated from bottom to top.
  538. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  539. offset = m_localStackHeight;
  540. m_localStackHeight += size;
  541. #else
  542. // Locals have negative offsets and are allocated from top to bottom.
  543. m_localStackHeight += size;
  544. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  545. offset = -m_localStackHeight;
  546. #endif
  547. return offset;
  548. }
  549. ///----------------------------------------------------------------------------
  550. ///
  551. /// Func::StackAllocate
  552. ///
  553. /// Allocate stack space for this symbol.
  554. ///
  555. ///----------------------------------------------------------------------------
  556. int32
  557. Func::StackAllocate(StackSym *stackSym, int size)
  558. {
  559. Assert(size > 0);
  560. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym() || stackSym->IsAllocated())
  561. {
  562. return stackSym->m_offset;
  563. }
  564. Assert(stackSym->m_offset == 0);
  565. stackSym->m_allocated = true;
  566. stackSym->m_offset = StackAllocate(size);
  567. return stackSym->m_offset;
  568. }
  569. void
  570. Func::SetArgOffset(StackSym *stackSym, int32 offset)
  571. {
  572. AssertMsg(offset >= 0, "Why is the offset, negative?");
  573. stackSym->m_offset = offset;
  574. stackSym->m_allocated = true;
  575. }
  576. ///
  577. /// Ensures that local var slots are created, if the function has locals.
  578. /// Allocate stack space for locals used for debugging
  579. /// (for local non-temp vars we write-through memory so that locals inspection can make use of that.).
  580. // On stack, after local slots we allocate space for metadata (in particular, whether any the locals was changed in debugger).
  581. ///
  582. void
  583. Func::EnsureLocalVarSlots()
  584. {
  585. Assert(IsJitInDebugMode());
  586. if (!this->HasLocalVarSlotCreated())
  587. {
  588. uint32 localSlotCount = GetJITFunctionBody()->GetNonTempLocalVarCount();
  589. if (localSlotCount && m_localVarSlotsOffset == Js::Constants::InvalidOffset)
  590. {
  591. // Allocate the slots.
  592. int32 size = localSlotCount * GetDiagLocalSlotSize();
  593. m_localVarSlotsOffset = StackAllocate(size);
  594. m_hasLocalVarChangedOffset = StackAllocate(max(1, MachStackAlignment)); // Can't alloc less than StackAlignment bytes.
  595. Assert(m_workItem->Type() == JsFunctionType);
  596. m_output.SetVarSlotsOffset(AdjustOffsetValue(m_localVarSlotsOffset));
  597. m_output.SetVarChangedOffset(AdjustOffsetValue(m_hasLocalVarChangedOffset));
  598. }
  599. }
  600. }
  601. void Func::SetFirstArgOffset(IR::Instr* inlineeStart)
  602. {
  603. Assert(inlineeStart->m_func == this);
  604. Assert(!IsTopFunc());
  605. int32 lastOffset;
  606. IR::Instr* arg = inlineeStart->GetNextArg();
  607. const auto lastArgOutStackSym = arg->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  608. lastOffset = lastArgOutStackSym->m_offset;
  609. Assert(lastArgOutStackSym->m_isSingleDef);
  610. const auto secondLastArgOutOpnd = lastArgOutStackSym->m_instrDef->GetSrc2();
  611. if (secondLastArgOutOpnd->IsSymOpnd())
  612. {
  613. const auto secondLastOffset = secondLastArgOutOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_offset;
  614. if (secondLastOffset > lastOffset)
  615. {
  616. lastOffset = secondLastOffset;
  617. }
  618. }
  619. lastOffset += MachPtr;
  620. int32 firstActualStackOffset = lastOffset - ((this->actualCount + Js::Constants::InlineeMetaArgCount) * MachPtr);
  621. Assert((this->firstActualStackOffset == -1) || (this->firstActualStackOffset == firstActualStackOffset));
  622. this->firstActualStackOffset = firstActualStackOffset;
  623. }
  624. int32
  625. Func::GetLocalVarSlotOffset(int32 slotId)
  626. {
  627. this->EnsureLocalVarSlots();
  628. Assert(m_localVarSlotsOffset != Js::Constants::InvalidOffset);
  629. int32 slotOffset = slotId * GetDiagLocalSlotSize();
  630. return m_localVarSlotsOffset + slotOffset;
  631. }
  632. void Func::OnAddSym(Sym* sym)
  633. {
  634. Assert(sym);
  635. if (this->IsJitInDebugMode() && this->IsNonTempLocalVar(sym->m_id))
  636. {
  637. Assert(m_nonTempLocalVars);
  638. m_nonTempLocalVars->Set(sym->m_id);
  639. }
  640. }
  641. ///
  642. /// Returns offset of the flag (1 byte) whether any local was changed (in debugger).
  643. /// If the function does not have any locals, returns -1.
  644. ///
  645. int32
  646. Func::GetHasLocalVarChangedOffset()
  647. {
  648. this->EnsureLocalVarSlots();
  649. return m_hasLocalVarChangedOffset;
  650. }
  651. bool
  652. Func::IsJitInDebugMode()
  653. {
  654. return m_workItem->IsJitInDebugMode();
  655. }
  656. bool
  657. Func::IsNonTempLocalVar(uint32 slotIndex)
  658. {
  659. return GetJITFunctionBody()->IsNonTempLocalVar(slotIndex);
  660. }
  661. int32
  662. Func::AdjustOffsetValue(int32 offset)
  663. {
  664. #ifdef MD_GROW_LOCALS_AREA_UP
  665. return -(offset + BailOutInfo::StackSymBias);
  666. #else
  667. // Stack offset are negative, includes the PUSH EBP and return address
  668. return offset - (2 * MachPtr);
  669. #endif
  670. }
  671. #ifdef MD_GROW_LOCALS_AREA_UP
  672. // Note: this is called during jit-compile when we finalize bail out record.
  673. void
  674. Func::AjustLocalVarSlotOffset()
  675. {
  676. if (GetJITFunctionBody()->GetNonTempLocalVarCount())
  677. {
  678. // Turn positive SP-relative base locals offset into negative frame-pointer-relative offset
  679. // This is changing value for restoring the locals when read due to locals inspection.
  680. int localsOffset = m_localVarSlotsOffset - (m_localStackHeight + m_ArgumentsOffset);
  681. int valueChangeOffset = m_hasLocalVarChangedOffset - (m_localStackHeight + m_ArgumentsOffset);
  682. Js::FunctionEntryPointInfo * entryPointInfo = static_cast<Js::FunctionEntryPointInfo*>(this->m_workItem->GetEntryPoint());
  683. Assert(entryPointInfo != nullptr);
  684. entryPointInfo->localVarSlotsOffset = localsOffset;
  685. entryPointInfo->localVarChangedOffset = valueChangeOffset;
  686. }
  687. }
  688. #endif
  689. bool
  690. Func::DoGlobOptsForGeneratorFunc() const
  691. {
  692. // Disable GlobOpt optimizations for generators initially. Will visit and enable each one by one.
  693. return !GetJITFunctionBody()->IsGenerator();
  694. }
  695. bool
  696. Func::DoSimpleJitDynamicProfile() const
  697. {
  698. return IsSimpleJit() && !PHASE_OFF(Js::SimpleJitDynamicProfilePhase, GetTopFunc()) && !CONFIG_FLAG(NewSimpleJit);
  699. }
  700. void
  701. Func::SetDoFastPaths()
  702. {
  703. // Make sure we only call this once!
  704. Assert(!this->hasCalledSetDoFastPaths);
  705. bool doFastPaths = false;
  706. if(!PHASE_OFF(Js::FastPathPhase, this) && (!IsSimpleJit() || CONFIG_FLAG(NewSimpleJit)))
  707. {
  708. doFastPaths = true;
  709. }
  710. this->m_doFastPaths = doFastPaths;
  711. #ifdef DBG
  712. this->hasCalledSetDoFastPaths = true;
  713. #endif
  714. }
  715. #ifdef _M_ARM
  716. RegNum
  717. Func::GetLocalsPointer() const
  718. {
  719. #ifdef DBG
  720. if (Js::Configuration::Global.flags.IsEnabled(Js::ForceLocalsPtrFlag))
  721. {
  722. return ALT_LOCALS_PTR;
  723. }
  724. #endif
  725. if (GetJITFunctionBody()->HasTry())
  726. {
  727. return ALT_LOCALS_PTR;
  728. }
  729. return RegSP;
  730. }
  731. #endif
  732. void Func::AddSlotArrayCheck(IR::SymOpnd *fieldOpnd)
  733. {
  734. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  735. {
  736. return;
  737. }
  738. Assert(IsTopFunc());
  739. if (this->slotArrayCheckTable == nullptr)
  740. {
  741. this->slotArrayCheckTable = SlotArrayCheckTable::New(m_alloc, 4);
  742. }
  743. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  744. uint32 slot = propertySym->m_propertyId;
  745. uint32 *pSlotId = this->slotArrayCheckTable->FindOrInsert(slot, propertySym->m_stackSym->m_id);
  746. if (pSlotId && (*pSlotId == (uint32)-1 || *pSlotId < slot))
  747. {
  748. *pSlotId = propertySym->m_propertyId;
  749. }
  750. }
  751. void Func::AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId)
  752. {
  753. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  754. {
  755. return;
  756. }
  757. Assert(IsTopFunc());
  758. if (this->frameDisplayCheckTable == nullptr)
  759. {
  760. this->frameDisplayCheckTable = FrameDisplayCheckTable::New(m_alloc, 4);
  761. }
  762. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  763. FrameDisplayCheckRecord **record = this->frameDisplayCheckTable->FindOrInsertNew(propertySym->m_stackSym->m_id);
  764. if (*record == nullptr)
  765. {
  766. *record = JitAnew(m_alloc, FrameDisplayCheckRecord);
  767. }
  768. uint32 frameDisplaySlot = propertySym->m_propertyId;
  769. if ((*record)->table == nullptr || (*record)->slotId < frameDisplaySlot)
  770. {
  771. (*record)->slotId = frameDisplaySlot;
  772. }
  773. if (slotId != (uint32)-1)
  774. {
  775. if ((*record)->table == nullptr)
  776. {
  777. (*record)->table = SlotArrayCheckTable::New(m_alloc, 4);
  778. }
  779. uint32 *pSlotId = (*record)->table->FindOrInsert(slotId, frameDisplaySlot);
  780. if (pSlotId && *pSlotId < slotId)
  781. {
  782. *pSlotId = slotId;
  783. }
  784. }
  785. }
  786. void Func::InitLocalClosureSyms()
  787. {
  788. Assert(this->m_localClosureSym == nullptr);
  789. // Allocate stack space for closure pointers. Do this only if we're jitting for stack closures, and
  790. // tell bailout that these are not byte code symbols so that we don't try to encode them in the bailout record,
  791. // as they don't have normal lifetimes.
  792. Js::RegSlot regSlot = GetJITFunctionBody()->GetLocalClosureReg();
  793. if (regSlot != Js::Constants::NoRegister)
  794. {
  795. this->m_localClosureSym =
  796. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  797. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  798. this);
  799. }
  800. regSlot = this->GetJITFunctionBody()->GetParamClosureReg();
  801. if (regSlot != Js::Constants::NoRegister)
  802. {
  803. Assert(this->GetParamClosureSym() == nullptr && !this->GetJITFunctionBody()->IsParamAndBodyScopeMerged());
  804. this->m_paramClosureSym =
  805. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  806. this->DoStackFrameDisplay() ? (Js::RegSlot) - 1 : regSlot,
  807. this);
  808. }
  809. regSlot = GetJITFunctionBody()->GetLocalFrameDisplayReg();
  810. if (regSlot != Js::Constants::NoRegister)
  811. {
  812. this->m_localFrameDisplaySym =
  813. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  814. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  815. this);
  816. }
  817. }
  818. bool Func::CanAllocInPreReservedHeapPageSegment ()
  819. {
  820. #ifdef _CONTROL_FLOW_GUARD
  821. return PHASE_FORCE1(Js::PreReservedHeapAllocPhase) || (!PHASE_OFF1(Js::PreReservedHeapAllocPhase) &&
  822. !IsJitInDebugMode() && GetThreadContextInfo()->IsCFGEnabled()
  823. //&& !GetScriptContext()->IsScriptContextInDebugMode()
  824. #if _M_IX86
  825. && m_workItem->GetJitMode() == ExecutionMode::FullJit && GetCodeGenAllocators()->canCreatePreReservedSegment);
  826. #elif _M_X64
  827. && true);
  828. #else
  829. && false); //Not yet implemented for architectures other than x86 and amd64.
  830. #endif //_M_ARCH
  831. #else
  832. return false;
  833. #endif//_CONTROL_FLOW_GUARD
  834. }
  835. ///----------------------------------------------------------------------------
  836. ///
  837. /// Func::GetInstrCount
  838. ///
  839. /// Returns the number of instrs.
  840. /// Note: It counts all instrs for now, including labels, etc.
  841. ///
  842. ///----------------------------------------------------------------------------
  843. uint32
  844. Func::GetInstrCount()
  845. {
  846. uint instrCount = 0;
  847. FOREACH_INSTR_IN_FUNC(instr, this)
  848. {
  849. instrCount++;
  850. }NEXT_INSTR_IN_FUNC;
  851. return instrCount;
  852. }
  853. ///----------------------------------------------------------------------------
  854. ///
  855. /// Func::NumberInstrs
  856. ///
  857. /// Number each instruction in order of appearance in the function.
  858. ///
  859. ///----------------------------------------------------------------------------
  860. void
  861. Func::NumberInstrs()
  862. {
  863. #if DBG_DUMP
  864. Assert(this->IsTopFunc());
  865. Assert(!this->hasInstrNumber);
  866. this->hasInstrNumber = true;
  867. #endif
  868. uint instrCount = 1;
  869. FOREACH_INSTR_IN_FUNC(instr, this)
  870. {
  871. instr->SetNumber(instrCount++);
  872. }
  873. NEXT_INSTR_IN_FUNC;
  874. }
  875. ///----------------------------------------------------------------------------
  876. ///
  877. /// Func::IsInPhase
  878. ///
  879. /// Determines whether the function is currently in the provided phase
  880. ///
  881. ///----------------------------------------------------------------------------
  882. #if DBG
  883. bool
  884. Func::IsInPhase(Js::Phase tag)
  885. {
  886. return this->GetTopFunc()->currentPhases.Contains(tag);
  887. }
  888. #endif
  889. ///----------------------------------------------------------------------------
  890. ///
  891. /// Func::BeginPhase
  892. ///
  893. /// Takes care of the profiler
  894. ///
  895. ///----------------------------------------------------------------------------
  896. void
  897. Func::BeginPhase(Js::Phase tag)
  898. {
  899. #ifdef DBG
  900. this->GetTopFunc()->currentPhases.Push(tag);
  901. #endif
  902. #ifdef PROFILE_EXEC
  903. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  904. "Profiler tag is supplied but the profiler pointer is NULL");
  905. if (this->m_codeGenProfiler)
  906. {
  907. this->m_codeGenProfiler->ProfileBegin(tag);
  908. }
  909. #endif
  910. }
  911. ///----------------------------------------------------------------------------
  912. ///
  913. /// Func::EndPhase
  914. ///
  915. /// Takes care of the profiler and dumper
  916. ///
  917. ///----------------------------------------------------------------------------
  918. void
  919. Func::EndProfiler(Js::Phase tag)
  920. {
  921. #ifdef DBG
  922. Assert(this->GetTopFunc()->currentPhases.Count() > 0);
  923. Js::Phase popped = this->GetTopFunc()->currentPhases.Pop();
  924. Assert(tag == popped);
  925. #endif
  926. #ifdef PROFILE_EXEC
  927. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  928. "Profiler tag is supplied but the profiler pointer is NULL");
  929. if (this->m_codeGenProfiler)
  930. {
  931. this->m_codeGenProfiler->ProfileEnd(tag);
  932. }
  933. #endif
  934. }
  935. void
  936. Func::EndPhase(Js::Phase tag, bool dump)
  937. {
  938. this->EndProfiler(tag);
  939. #if DBG_DUMP
  940. if(dump && (PHASE_DUMP(tag, this)
  941. || PHASE_DUMP(Js::BackEndPhase, this)))
  942. {
  943. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  944. if (IsLoopBody())
  945. {
  946. Output::Print(_u("************ IR after %s (%S) Loop %d ************\n"),
  947. Js::PhaseNames[tag],
  948. ExecutionModeName(m_workItem->GetJitMode()),
  949. m_workItem->GetLoopNumber());
  950. }
  951. else
  952. {
  953. Output::Print(_u("************ IR after %s (%S) ************\n"),
  954. Js::PhaseNames[tag],
  955. ExecutionModeName(m_workItem->GetJitMode()));
  956. }
  957. this->Dump(Js::Configuration::Global.flags.AsmDiff? IRDumpFlags_AsmDumpMode : IRDumpFlags_None);
  958. }
  959. #endif
  960. #if DBG
  961. if (tag == Js::LowererPhase)
  962. {
  963. Assert(!this->isPostLower);
  964. this->isPostLower = true;
  965. }
  966. else if (tag == Js::RegAllocPhase)
  967. {
  968. Assert(!this->isPostRegAlloc);
  969. this->isPostRegAlloc = true;
  970. }
  971. else if (tag == Js::PeepsPhase)
  972. {
  973. Assert(this->isPostLower && !this->isPostLayout);
  974. this->isPostPeeps = true;
  975. }
  976. else if (tag == Js::LayoutPhase)
  977. {
  978. Assert(this->isPostPeeps && !this->isPostLayout);
  979. this->isPostLayout = true;
  980. }
  981. else if (tag == Js::FinalLowerPhase)
  982. {
  983. Assert(this->isPostLayout && !this->isPostFinalLower);
  984. this->isPostFinalLower = true;
  985. }
  986. if (this->isPostLower)
  987. {
  988. #ifndef _M_ARM // Need to verify ARM is clean.
  989. DbCheckPostLower dbCheck(this);
  990. dbCheck.Check();
  991. #endif
  992. }
  993. #endif
  994. }
  995. Func const *
  996. Func::GetTopFunc() const
  997. {
  998. Func const * func = this;
  999. while (!func->IsTopFunc())
  1000. {
  1001. func = func->parentFunc;
  1002. }
  1003. return func;
  1004. }
  1005. Func *
  1006. Func::GetTopFunc()
  1007. {
  1008. Func * func = this;
  1009. while (!func->IsTopFunc())
  1010. {
  1011. func = func->parentFunc;
  1012. }
  1013. return func;
  1014. }
  1015. StackSym *
  1016. Func::EnsureLoopParamSym()
  1017. {
  1018. if (this->m_loopParamSym == nullptr)
  1019. {
  1020. this->m_loopParamSym = StackSym::New(TyMachPtr, this);
  1021. }
  1022. return this->m_loopParamSym;
  1023. }
  1024. void
  1025. Func::UpdateMaxInlineeArgOutCount(uint inlineeArgOutCount)
  1026. {
  1027. if (maxInlineeArgOutCount < inlineeArgOutCount)
  1028. {
  1029. maxInlineeArgOutCount = inlineeArgOutCount;
  1030. }
  1031. }
  1032. void
  1033. Func::BeginClone(Lowerer * lowerer, JitArenaAllocator *alloc)
  1034. {
  1035. Assert(this->IsTopFunc());
  1036. AssertMsg(m_cloner == nullptr, "Starting new clone while one is in progress");
  1037. m_cloner = JitAnew(alloc, Cloner, lowerer, alloc);
  1038. if (m_cloneMap == nullptr)
  1039. {
  1040. m_cloneMap = JitAnew(alloc, InstrMap, alloc, 7);
  1041. }
  1042. }
  1043. void
  1044. Func::EndClone()
  1045. {
  1046. Assert(this->IsTopFunc());
  1047. if (m_cloner)
  1048. {
  1049. m_cloner->Finish();
  1050. JitAdelete(m_cloner->alloc, m_cloner);
  1051. m_cloner = nullptr;
  1052. }
  1053. }
  1054. IR::SymOpnd *
  1055. Func::GetInlineeOpndAtOffset(int32 offset)
  1056. {
  1057. Assert(IsInlinee());
  1058. StackSym *stackSym = CreateInlineeStackSym();
  1059. this->SetArgOffset(stackSym, stackSym->m_offset + offset);
  1060. Assert(stackSym->m_offset >= 0);
  1061. return IR::SymOpnd::New(stackSym, 0, TyMachReg, this);
  1062. }
  1063. StackSym *
  1064. Func::CreateInlineeStackSym()
  1065. {
  1066. // Make sure this is an inlinee and that GlobOpt has initialized the offset
  1067. // in the inlinee's frame.
  1068. Assert(IsInlinee());
  1069. Assert(m_inlineeFrameStartSym->m_offset != -1);
  1070. StackSym *stackSym = m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1071. stackSym->m_isInlinedArgSlot = true;
  1072. stackSym->m_offset = m_inlineeFrameStartSym->m_offset;
  1073. stackSym->m_allocated = true;
  1074. return stackSym;
  1075. }
  1076. uint16
  1077. Func::GetArgUsedForBranch() const
  1078. {
  1079. // this value can change while JITing, so or these together
  1080. return GetJITFunctionBody()->GetArgUsedForBranch() | GetJITOutput()->GetArgUsedForBranch();
  1081. }
  1082. intptr_t
  1083. Func::GetJittedLoopIterationsSinceLastBailoutAddress() const
  1084. {
  1085. Assert(this->m_workItem->Type() == JsLoopBodyWorkItemType);
  1086. return m_workItem->GetJittedLoopIterationsSinceLastBailoutAddr();
  1087. }
  1088. intptr_t
  1089. Func::GetWeakFuncRef() const
  1090. {
  1091. // TODO: OOP JIT figure out if this can be null
  1092. return m_workItem->GetJITTimeInfo()->GetWeakFuncRef();
  1093. }
  1094. intptr_t
  1095. Func::GetRuntimeInlineCache(const uint index) const
  1096. {
  1097. if(m_runtimeInfo != nullptr && m_runtimeInfo->HasClonedInlineCaches())
  1098. {
  1099. intptr_t inlineCache = m_runtimeInfo->GetClonedInlineCache(index);
  1100. if(inlineCache)
  1101. {
  1102. return inlineCache;
  1103. }
  1104. }
  1105. return GetJITFunctionBody()->GetInlineCache(index);
  1106. }
  1107. JITTimePolymorphicInlineCache *
  1108. Func::GetRuntimePolymorphicInlineCache(const uint index) const
  1109. {
  1110. if (this->m_polymorphicInlineCacheInfo && this->m_polymorphicInlineCacheInfo->HasInlineCaches())
  1111. {
  1112. return this->m_polymorphicInlineCacheInfo->GetInlineCache(index);
  1113. }
  1114. return nullptr;
  1115. }
  1116. byte
  1117. Func::GetPolyCacheUtilToInitialize(const uint index) const
  1118. {
  1119. return this->GetRuntimePolymorphicInlineCache(index) ? this->GetPolyCacheUtil(index) : PolymorphicInlineCacheUtilizationMinValue;
  1120. }
  1121. byte
  1122. Func::GetPolyCacheUtil(const uint index) const
  1123. {
  1124. return this->m_polymorphicInlineCacheInfo->GetUtil(index);
  1125. }
  1126. JITObjTypeSpecFldInfo*
  1127. Func::GetObjTypeSpecFldInfo(const uint index) const
  1128. {
  1129. if (GetJITFunctionBody()->GetInlineCacheCount() == 0)
  1130. {
  1131. Assert(UNREACHED);
  1132. return nullptr;
  1133. }
  1134. return GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(index);
  1135. }
  1136. JITObjTypeSpecFldInfo*
  1137. Func::GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const
  1138. {
  1139. Assert(propertyInfoId < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  1140. return GetTopFunc()->m_globalObjTypeSpecFldInfoArray[propertyInfoId];
  1141. }
  1142. void
  1143. Func::EnsurePinnedTypeRefs()
  1144. {
  1145. if (this->pinnedTypeRefs == nullptr)
  1146. {
  1147. this->pinnedTypeRefs = JitAnew(this->m_alloc, TypeRefSet, this->m_alloc);
  1148. }
  1149. }
  1150. void
  1151. Func::PinTypeRef(void* typeRef)
  1152. {
  1153. EnsurePinnedTypeRefs();
  1154. this->pinnedTypeRefs->AddNew(typeRef);
  1155. }
  1156. void
  1157. Func::EnsureSingleTypeGuards()
  1158. {
  1159. if (this->singleTypeGuards == nullptr)
  1160. {
  1161. this->singleTypeGuards = JitAnew(this->m_alloc, TypePropertyGuardDictionary, this->m_alloc);
  1162. }
  1163. }
  1164. Js::JitTypePropertyGuard*
  1165. Func::GetOrCreateSingleTypeGuard(intptr_t typeAddr)
  1166. {
  1167. EnsureSingleTypeGuards();
  1168. Js::JitTypePropertyGuard* guard;
  1169. if (!this->singleTypeGuards->TryGetValue(typeAddr, &guard))
  1170. {
  1171. // Property guards are allocated by NativeCodeData::Allocator so that their lifetime extends as long as the EntryPointInfo is alive.
  1172. guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitTypePropertyGuard, typeAddr, this->indexedPropertyGuardCount++);
  1173. this->singleTypeGuards->Add(typeAddr, guard);
  1174. }
  1175. else
  1176. {
  1177. Assert(guard->GetTypeAddr() == typeAddr);
  1178. }
  1179. return guard;
  1180. }
  1181. void
  1182. Func::EnsureEquivalentTypeGuards()
  1183. {
  1184. if (this->equivalentTypeGuards == nullptr)
  1185. {
  1186. this->equivalentTypeGuards = JitAnew(this->m_alloc, EquivalentTypeGuardList, this->m_alloc);
  1187. }
  1188. }
  1189. Js::JitEquivalentTypeGuard*
  1190. Func::CreateEquivalentTypeGuard(JITTypeHolder type, uint32 objTypeSpecFldId)
  1191. {
  1192. EnsureEquivalentTypeGuards();
  1193. Js::JitEquivalentTypeGuard* guard = NativeCodeDataNew(GetNativeCodeDataAllocator(), Js::JitEquivalentTypeGuard, type.t->GetAddr(), this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1194. // If we want to hard code the address of the cache, we will need to go back to allocating it from the native code data allocator.
  1195. // We would then need to maintain consistency (double write) to both the recycler allocated cache and the one on the heap.
  1196. Js::EquivalentTypeCache* cache = nullptr;
  1197. if (this->IsOOPJIT())
  1198. {
  1199. cache = JitAnewZ(this->m_alloc, Js::EquivalentTypeCache);
  1200. }
  1201. else
  1202. {
  1203. cache = NativeCodeDataNewZ(GetTransferDataAllocator(), Js::EquivalentTypeCache);
  1204. }
  1205. guard->SetCache(cache);
  1206. // Give the cache a back-pointer to the guard so that the guard can be cleared at runtime if necessary.
  1207. cache->SetGuard(guard);
  1208. this->equivalentTypeGuards->Prepend(guard);
  1209. return guard;
  1210. }
  1211. void
  1212. Func::EnsurePropertyGuardsByPropertyId()
  1213. {
  1214. if (this->propertyGuardsByPropertyId == nullptr)
  1215. {
  1216. this->propertyGuardsByPropertyId = JitAnew(this->m_alloc, PropertyGuardByPropertyIdMap, this->m_alloc);
  1217. }
  1218. }
  1219. void
  1220. Func::EnsureCtorCachesByPropertyId()
  1221. {
  1222. if (this->ctorCachesByPropertyId == nullptr)
  1223. {
  1224. this->ctorCachesByPropertyId = JitAnew(this->m_alloc, CtorCachesByPropertyIdMap, this->m_alloc);
  1225. }
  1226. }
  1227. void
  1228. Func::LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard)
  1229. {
  1230. Assert(guard != nullptr);
  1231. Assert(guard->GetValue() != NULL);
  1232. Assert(this->propertyGuardsByPropertyId != nullptr);
  1233. IndexedPropertyGuardSet* set;
  1234. if (!this->propertyGuardsByPropertyId->TryGetValue(propertyId, &set))
  1235. {
  1236. set = JitAnew(this->m_alloc, IndexedPropertyGuardSet, this->m_alloc);
  1237. this->propertyGuardsByPropertyId->Add(propertyId, set);
  1238. }
  1239. set->Item(guard);
  1240. }
  1241. void
  1242. Func::LinkCtorCacheToPropertyId(Js::PropertyId propertyId, JITTimeConstructorCache* cache)
  1243. {
  1244. Assert(cache != nullptr);
  1245. Assert(this->ctorCachesByPropertyId != nullptr);
  1246. CtorCacheSet* set;
  1247. if (!this->ctorCachesByPropertyId->TryGetValue(propertyId, &set))
  1248. {
  1249. set = JitAnew(this->m_alloc, CtorCacheSet, this->m_alloc);
  1250. this->ctorCachesByPropertyId->Add(propertyId, set);
  1251. }
  1252. set->Item(cache->GetRuntimeCacheAddr());
  1253. }
  1254. JITTimeConstructorCache* Func::GetConstructorCache(const Js::ProfileId profiledCallSiteId)
  1255. {
  1256. Assert(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1257. Assert(this->constructorCaches != nullptr);
  1258. return this->constructorCaches[profiledCallSiteId];
  1259. }
  1260. void Func::SetConstructorCache(const Js::ProfileId profiledCallSiteId, JITTimeConstructorCache* constructorCache)
  1261. {
  1262. Assert(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1263. Assert(constructorCache != nullptr);
  1264. Assert(this->constructorCaches != nullptr);
  1265. Assert(this->constructorCaches[profiledCallSiteId] == nullptr);
  1266. this->constructorCacheCount++;
  1267. this->constructorCaches[profiledCallSiteId] = constructorCache;
  1268. }
  1269. void Func::EnsurePropertiesWrittenTo()
  1270. {
  1271. if (this->propertiesWrittenTo == nullptr)
  1272. {
  1273. this->propertiesWrittenTo = JitAnew(this->m_alloc, PropertyIdSet, this->m_alloc);
  1274. }
  1275. }
  1276. void Func::EnsureCallSiteToArgumentsOffsetFixupMap()
  1277. {
  1278. if (this->callSiteToArgumentsOffsetFixupMap == nullptr)
  1279. {
  1280. this->callSiteToArgumentsOffsetFixupMap = JitAnew(this->m_alloc, CallSiteToArgumentsOffsetFixupMap, this->m_alloc);
  1281. }
  1282. }
  1283. IR::LabelInstr *
  1284. Func::GetFuncStartLabel()
  1285. {
  1286. return m_funcStartLabel;
  1287. }
  1288. IR::LabelInstr *
  1289. Func::EnsureFuncStartLabel()
  1290. {
  1291. if(m_funcStartLabel == nullptr)
  1292. {
  1293. m_funcStartLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1294. }
  1295. return m_funcStartLabel;
  1296. }
  1297. IR::LabelInstr *
  1298. Func::GetFuncEndLabel()
  1299. {
  1300. return m_funcEndLabel;
  1301. }
  1302. IR::LabelInstr *
  1303. Func::EnsureFuncEndLabel()
  1304. {
  1305. if(m_funcEndLabel == nullptr)
  1306. {
  1307. m_funcEndLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1308. }
  1309. return m_funcEndLabel;
  1310. }
  1311. void
  1312. Func::EnsureStackArgWithFormalsTracker()
  1313. {
  1314. if (stackArgWithFormalsTracker == nullptr)
  1315. {
  1316. stackArgWithFormalsTracker = JitAnew(m_alloc, StackArgWithFormalsTracker, m_alloc);
  1317. }
  1318. }
  1319. BOOL
  1320. Func::IsFormalsArraySym(SymID symId)
  1321. {
  1322. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsArraySyms() == nullptr)
  1323. {
  1324. return false;
  1325. }
  1326. return stackArgWithFormalsTracker->GetFormalsArraySyms()->Test(symId);
  1327. }
  1328. void
  1329. Func::TrackFormalsArraySym(SymID symId)
  1330. {
  1331. EnsureStackArgWithFormalsTracker();
  1332. stackArgWithFormalsTracker->SetFormalsArraySyms(symId);
  1333. }
  1334. void
  1335. Func::TrackStackSymForFormalIndex(Js::ArgSlot formalsIndex, StackSym * sym)
  1336. {
  1337. EnsureStackArgWithFormalsTracker();
  1338. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1339. stackArgWithFormalsTracker->SetStackSymInFormalsIndexMap(sym, formalsIndex, formalsCount);
  1340. }
  1341. StackSym *
  1342. Func::GetStackSymForFormal(Js::ArgSlot formalsIndex)
  1343. {
  1344. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1345. {
  1346. return nullptr;
  1347. }
  1348. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1349. StackSym ** formalsIndexToStackSymMap = stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap();
  1350. AssertMsg(formalsIndex < formalsCount, "OutOfRange ? ");
  1351. return formalsIndexToStackSymMap[formalsIndex];
  1352. }
  1353. bool
  1354. Func::HasStackSymForFormal(Js::ArgSlot formalsIndex)
  1355. {
  1356. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1357. {
  1358. return false;
  1359. }
  1360. return GetStackSymForFormal(formalsIndex) != nullptr;
  1361. }
  1362. void
  1363. Func::SetScopeObjSym(StackSym * sym)
  1364. {
  1365. EnsureStackArgWithFormalsTracker();
  1366. stackArgWithFormalsTracker->SetScopeObjSym(sym);
  1367. }
  1368. StackSym*
  1369. Func::GetScopeObjSym()
  1370. {
  1371. if (stackArgWithFormalsTracker == nullptr)
  1372. {
  1373. return nullptr;
  1374. }
  1375. return stackArgWithFormalsTracker->GetScopeObjSym();
  1376. }
  1377. BVSparse<JitArenaAllocator> *
  1378. StackArgWithFormalsTracker::GetFormalsArraySyms()
  1379. {
  1380. return formalsArraySyms;
  1381. }
  1382. void
  1383. StackArgWithFormalsTracker::SetFormalsArraySyms(SymID symId)
  1384. {
  1385. if (formalsArraySyms == nullptr)
  1386. {
  1387. formalsArraySyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1388. }
  1389. formalsArraySyms->Set(symId);
  1390. }
  1391. StackSym **
  1392. StackArgWithFormalsTracker::GetFormalsIndexToStackSymMap()
  1393. {
  1394. return formalsIndexToStackSymMap;
  1395. }
  1396. void
  1397. StackArgWithFormalsTracker::SetStackSymInFormalsIndexMap(StackSym * sym, Js::ArgSlot formalsIndex, Js::ArgSlot formalsCount)
  1398. {
  1399. if(formalsIndexToStackSymMap == nullptr)
  1400. {
  1401. formalsIndexToStackSymMap = JitAnewArrayZ(alloc, StackSym*, formalsCount);
  1402. }
  1403. AssertMsg(formalsIndex < formalsCount, "Out of range ?");
  1404. formalsIndexToStackSymMap[formalsIndex] = sym;
  1405. }
  1406. void
  1407. StackArgWithFormalsTracker::SetScopeObjSym(StackSym * sym)
  1408. {
  1409. m_scopeObjSym = sym;
  1410. }
  1411. StackSym *
  1412. StackArgWithFormalsTracker::GetScopeObjSym()
  1413. {
  1414. return m_scopeObjSym;
  1415. }
  1416. void
  1417. Cloner::AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone)
  1418. {
  1419. if (!this->instrFirst)
  1420. {
  1421. this->instrFirst = instrClone;
  1422. }
  1423. this->instrLast = instrClone;
  1424. }
  1425. void
  1426. Cloner::Finish()
  1427. {
  1428. this->RetargetClonedBranches();
  1429. if (this->lowerer)
  1430. {
  1431. lowerer->LowerRange(this->instrFirst, this->instrLast, false, false);
  1432. }
  1433. }
  1434. void
  1435. Cloner::RetargetClonedBranches()
  1436. {
  1437. if (!this->fRetargetClonedBranch)
  1438. {
  1439. return;
  1440. }
  1441. FOREACH_INSTR_IN_RANGE(instr, this->instrFirst, this->instrLast)
  1442. {
  1443. if (instr->IsBranchInstr())
  1444. {
  1445. instr->AsBranchInstr()->RetargetClonedBranch();
  1446. }
  1447. }
  1448. NEXT_INSTR_IN_RANGE;
  1449. }
  1450. IR::IndirOpnd * Func::GetConstantAddressIndirOpnd(intptr_t address, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode)
  1451. {
  1452. Assert(this->GetTopFunc() == this);
  1453. if (!canHoistConstantAddressLoad)
  1454. {
  1455. // We can't hoist constant address load after lower, as we can't mark the sym as
  1456. // live on back edge
  1457. return nullptr;
  1458. }
  1459. int offset = 0;
  1460. IR::RegOpnd ** foundRegOpnd = this->constantAddressRegOpnd.Find([address, &offset](IR::RegOpnd * regOpnd)
  1461. {
  1462. Assert(regOpnd->m_sym->IsSingleDef());
  1463. void * curr = regOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address;
  1464. ptrdiff_t diff = (intptr_t)address - (intptr_t)curr;
  1465. if (!Math::FitsInDWord(diff))
  1466. {
  1467. return false;
  1468. }
  1469. offset = (int)diff;
  1470. return true;
  1471. });
  1472. IR::RegOpnd * addressRegOpnd;
  1473. if (foundRegOpnd != nullptr)
  1474. {
  1475. addressRegOpnd = *foundRegOpnd;
  1476. }
  1477. else
  1478. {
  1479. Assert(offset == 0);
  1480. addressRegOpnd = IR::RegOpnd::New(TyMachPtr, this);
  1481. IR::Instr *const newInstr =
  1482. IR::Instr::New(
  1483. loadOpCode,
  1484. addressRegOpnd,
  1485. IR::AddrOpnd::New(address, kind, this, true),
  1486. this);
  1487. this->constantAddressRegOpnd.Prepend(addressRegOpnd);
  1488. IR::Instr * insertBeforeInstr = this->lastConstantAddressRegLoadInstr;
  1489. if (insertBeforeInstr == nullptr)
  1490. {
  1491. insertBeforeInstr = this->GetFunctionEntryInsertionPoint();
  1492. this->lastConstantAddressRegLoadInstr = newInstr;
  1493. }
  1494. insertBeforeInstr->InsertBefore(newInstr);
  1495. }
  1496. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(addressRegOpnd, offset, type, this, true);
  1497. #if DBG_DUMP
  1498. // TODO: michhol make intptr_t
  1499. indirOpnd->SetAddrKind(kind, (void*)address);
  1500. #endif
  1501. return indirOpnd;
  1502. }
  1503. void Func::MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv)
  1504. {
  1505. Assert(this->GetTopFunc() == this);
  1506. this->constantAddressRegOpnd.Iterate([bv](IR::RegOpnd * regOpnd)
  1507. {
  1508. bv->Set(regOpnd->m_sym->m_id);
  1509. });
  1510. }
  1511. IR::Instr *
  1512. Func::GetFunctionEntryInsertionPoint()
  1513. {
  1514. Assert(this->GetTopFunc() == this);
  1515. IR::Instr * insertInsert = this->lastConstantAddressRegLoadInstr;
  1516. if (insertInsert != nullptr)
  1517. {
  1518. return insertInsert->m_next;
  1519. }
  1520. insertInsert = this->m_headInstr;
  1521. if (this->HasTry())
  1522. {
  1523. // Insert it inside the root region
  1524. insertInsert = insertInsert->m_next;
  1525. Assert(insertInsert->IsLabelInstr() && insertInsert->AsLabelInstr()->GetRegion()->GetType() == RegionTypeRoot);
  1526. }
  1527. return insertInsert->m_next;
  1528. }
  1529. Js::Var
  1530. Func::AllocateNumber(double value)
  1531. {
  1532. Js::Var number = nullptr;
  1533. #if FLOATVAR
  1534. number = Js::JavascriptNumber::NewCodeGenInstance(GetNumberAllocator(), (double)value, nullptr);
  1535. #else
  1536. if (!IsOOPJIT()) // in-proc jit
  1537. {
  1538. number = Js::JavascriptNumber::NewCodeGenInstance(GetNumberAllocator(), (double)value, GetScriptContext());
  1539. }
  1540. else // OOP JIT
  1541. {
  1542. GetXProcNumberAllocator()->AllocateNumber(this->GetThreadContextInfo()->GetProcessHandle(),
  1543. value,
  1544. (Js::StaticType*)this->GetScriptContextInfo()->GetNumberTypeStaticAddr(),
  1545. (void*)this->GetScriptContextInfo()->GetVTableAddress(VTableValue::VtableJavascriptNumber));
  1546. }
  1547. #endif
  1548. return number;
  1549. }
  1550. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1551. void
  1552. Func::DumpFullFunctionName()
  1553. {
  1554. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1555. Output::Print(L"Function %s (%s)", GetJITFunctionBody()->GetDisplayName(), GetDebugNumberSet(debugStringBuffer));
  1556. }
  1557. #endif
  1558. #if DBG_DUMP
  1559. ///----------------------------------------------------------------------------
  1560. ///
  1561. /// Func::DumpHeader
  1562. ///
  1563. ///----------------------------------------------------------------------------
  1564. void
  1565. Func::DumpHeader()
  1566. {
  1567. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1568. DumpFullFunctionName();
  1569. Output::SkipToColumn(50);
  1570. Output::Print(_u("Instr Count:%d"), GetInstrCount());
  1571. if(m_codeSize > 0)
  1572. {
  1573. Output::Print(_u("\t\tSize:%d\n\n"), m_codeSize);
  1574. }
  1575. else
  1576. {
  1577. Output::Print(_u("\n\n"));
  1578. }
  1579. }
  1580. ///----------------------------------------------------------------------------
  1581. ///
  1582. /// Func::Dump
  1583. ///
  1584. ///----------------------------------------------------------------------------
  1585. void
  1586. Func::Dump(IRDumpFlags flags)
  1587. {
  1588. this->DumpHeader();
  1589. FOREACH_INSTR_IN_FUNC(instr, this)
  1590. {
  1591. instr->DumpGlobOptInstrString();
  1592. instr->Dump(flags);
  1593. }NEXT_INSTR_IN_FUNC;
  1594. Output::Flush();
  1595. }
  1596. void
  1597. Func::Dump()
  1598. {
  1599. this->Dump(IRDumpFlags_None);
  1600. }
  1601. #endif
  1602. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  1603. LPCSTR
  1604. Func::GetVtableName(INT_PTR address)
  1605. {
  1606. #if DBG
  1607. if (vtableMap == nullptr)
  1608. {
  1609. vtableMap = VirtualTableRegistry::CreateVtableHashMap(this->m_alloc);
  1610. };
  1611. LPCSTR name = vtableMap->Lookup(address, nullptr);
  1612. if (name)
  1613. {
  1614. if (strncmp(name, "class ", _countof("class ") - 1) == 0)
  1615. {
  1616. name += _countof("class ") - 1;
  1617. }
  1618. }
  1619. return name;
  1620. #else
  1621. return "";
  1622. #endif
  1623. }
  1624. #endif
  1625. #if DBG_DUMP | defined(VTUNE_PROFILING)
  1626. bool Func::DoRecordNativeMap() const
  1627. {
  1628. #if defined(VTUNE_PROFILING)
  1629. if (VTuneChakraProfile::isJitProfilingActive)
  1630. {
  1631. return true;
  1632. }
  1633. #endif
  1634. #if DBG_DUMP
  1635. return PHASE_DUMP(Js::EncoderPhase, this) && Js::Configuration::Global.flags.Verbose;
  1636. #else
  1637. return false;
  1638. #endif
  1639. }
  1640. #endif