Func.cpp 54 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Base/EtwTrace.h"
  7. #include "Base/ScriptContextProfiler.h"
  8. #ifdef VTUNE_PROFILING
  9. #include "Base/VTuneChakraProfile.h"
  10. #endif
  11. Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  12. ThreadContextInfo * threadContextInfo,
  13. ScriptContextInfo * scriptContextInfo,
  14. JITOutputIDL * outputData,
  15. const FunctionJITRuntimeInfo *const runtimeInfo,
  16. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, CodeGenAllocators *const codeGenAllocators,
  17. CodeGenNumberAllocator * numberAllocator,
  18. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc,
  19. uint postCallByteCodeOffset, Js::RegSlot returnValueRegSlot, const bool isInlinedConstructor,
  20. Js::ProfileId callSiteIdInParentFunc, bool isGetterSetter) :
  21. m_alloc(alloc),
  22. m_workItem(workItem),
  23. m_output(outputData),
  24. m_threadContextInfo(threadContextInfo),
  25. m_scriptContextInfo(scriptContextInfo),
  26. m_runtimeInfo(runtimeInfo),
  27. m_polymorphicInlineCacheInfo(polymorphicInlineCacheInfo),
  28. m_codeGenAllocators(codeGenAllocators),
  29. m_inlineeId(0),
  30. pinnedTypeRefs(nullptr),
  31. singleTypeGuards(nullptr),
  32. equivalentTypeGuards(nullptr),
  33. propertyGuardsByPropertyId(nullptr),
  34. ctorCachesByPropertyId(nullptr),
  35. callSiteToArgumentsOffsetFixupMap(nullptr),
  36. indexedPropertyGuardCount(0),
  37. propertiesWrittenTo(nullptr),
  38. lazyBailoutProperties(alloc),
  39. anyPropertyMayBeWrittenTo(false),
  40. #ifdef PROFILE_EXEC
  41. m_codeGenProfiler(codeGenProfiler),
  42. #endif
  43. m_isBackgroundJIT(isBackgroundJIT),
  44. m_cloner(nullptr),
  45. m_cloneMap(nullptr),
  46. m_loopParamSym(nullptr),
  47. m_funcObjSym(nullptr),
  48. m_localClosureSym(nullptr),
  49. m_paramClosureSym(nullptr),
  50. m_localFrameDisplaySym(nullptr),
  51. m_bailoutReturnValueSym(nullptr),
  52. m_hasBailedOutSym(nullptr),
  53. m_inlineeFrameStartSym(nullptr),
  54. m_regsUsed(0),
  55. m_fg(nullptr),
  56. m_labelCount(0),
  57. m_argSlotsForFunctionsCalled(0),
  58. m_isLeaf(false),
  59. m_hasCalls(false),
  60. m_hasInlineArgsOpt(false),
  61. m_canDoInlineArgsOpt(true),
  62. m_doFastPaths(false),
  63. hasBailout(false),
  64. hasBailoutInEHRegion(false),
  65. hasInstrNumber(false),
  66. maintainByteCodeOffset(true),
  67. frameSize(0),
  68. parentFunc(parentFunc),
  69. argObjSyms(nullptr),
  70. m_nonTempLocalVars(nullptr),
  71. hasAnyStackNestedFunc(false),
  72. hasMarkTempObjects(false),
  73. postCallByteCodeOffset(postCallByteCodeOffset),
  74. maxInlineeArgOutCount(0),
  75. returnValueRegSlot(returnValueRegSlot),
  76. firstActualStackOffset(-1),
  77. m_localVarSlotsOffset(Js::Constants::InvalidOffset),
  78. m_hasLocalVarChangedOffset(Js::Constants::InvalidOffset),
  79. actualCount((Js::ArgSlot) - 1),
  80. tryCatchNestingLevel(0),
  81. m_localStackHeight(0),
  82. tempSymDouble(nullptr),
  83. tempSymBool(nullptr),
  84. hasInlinee(false),
  85. thisOrParentInlinerHasArguments(false),
  86. hasStackArgs(false),
  87. hasNonSimpleParams(false),
  88. hasUnoptimizedArgumentsAcccess(false),
  89. hasApplyTargetInlining(false),
  90. hasImplicitCalls(false),
  91. hasTempObjectProducingInstr(false),
  92. isInlinedConstructor(isInlinedConstructor),
  93. numberAllocator(numberAllocator),
  94. loopCount(0),
  95. callSiteIdInParentFunc(callSiteIdInParentFunc),
  96. isGetterSetter(isGetterSetter),
  97. frameInfo(nullptr),
  98. isTJLoopBody(false),
  99. isFlowGraphValid(false),
  100. #if DBG
  101. m_callSiteCount(0),
  102. #endif
  103. stackNestedFunc(false),
  104. stackClosure(false)
  105. #if defined(_M_ARM32_OR_ARM64)
  106. , m_ArgumentsOffset(0)
  107. , m_epilogLabel(nullptr)
  108. #endif
  109. , m_funcStartLabel(nullptr)
  110. , m_funcEndLabel(nullptr)
  111. #ifdef _M_X64
  112. , m_prologEncoder(alloc)
  113. #endif
  114. #if DBG
  115. , hasCalledSetDoFastPaths(false)
  116. , allowRemoveBailOutArgInstr(false)
  117. , currentPhases(alloc)
  118. , isPostLower(false)
  119. , isPostRegAlloc(false)
  120. , isPostPeeps(false)
  121. , isPostLayout(false)
  122. , isPostFinalLower(false)
  123. , vtableMap(nullptr)
  124. #endif
  125. , m_yieldOffsetResumeLabelList(nullptr)
  126. , m_bailOutNoSaveLabel(nullptr)
  127. , constantAddressRegOpnd(alloc)
  128. , lastConstantAddressRegLoadInstr(nullptr)
  129. , m_totalJumpTableSizeInBytesForSwitchStatements(0)
  130. , slotArrayCheckTable(nullptr)
  131. , frameDisplayCheckTable(nullptr)
  132. , stackArgWithFormalsTracker(nullptr)
  133. , argInsCount(0)
  134. , m_globalObjTypeSpecFldInfoArray(nullptr)
  135. {
  136. Assert(this->IsInlined() == !!runtimeInfo);
  137. if (this->IsInlined())
  138. {
  139. m_inlineeId = ++(GetTopFunc()->m_inlineeId);
  140. }
  141. m_jnFunction = nullptr;
  142. bool doStackNestedFunc = GetJITFunctionBody()->DoStackNestedFunc();
  143. bool doStackClosure = GetJITFunctionBody()->DoStackClosure() && !PHASE_OFF(Js::FrameDisplayFastPathPhase, this) && !PHASE_OFF(Js::StackClosurePhase, this);
  144. Assert(!doStackClosure || doStackNestedFunc);
  145. this->stackClosure = doStackClosure && this->IsTopFunc();
  146. if (this->stackClosure)
  147. {
  148. // TODO: calculate on runtime side?
  149. m_output.SetHasJITStackClosure();
  150. }
  151. if (GetJITFunctionBody()->DoBackendArgumentsOptimization() && !GetJITFunctionBody()->HasTry())
  152. {
  153. // doBackendArgumentsOptimization bit is set when there is no eval inside a function
  154. // as determined by the bytecode generator.
  155. SetHasStackArgs(true);
  156. }
  157. if (m_workItem->Type() == JsFunctionType)
  158. {
  159. if (doStackNestedFunc && GetJITFunctionBody()->GetNestedCount() != 0 &&
  160. this->GetTopFunc()->m_workItem->Type() != JsLoopBodyWorkItemType) // make sure none of the functions inlined in a jitted loop body allocate nested functions on the stack
  161. {
  162. Assert(!(this->IsJitInDebugMode() && !GetJITFunctionBody()->IsLibraryCode()));
  163. stackNestedFunc = true;
  164. this->GetTopFunc()->hasAnyStackNestedFunc = true;
  165. }
  166. }
  167. else
  168. {
  169. Assert(m_workItem->IsLoopBody());
  170. }
  171. if (GetJITFunctionBody()->HasOrParentHasArguments() || parentFunc && parentFunc->thisOrParentInlinerHasArguments)
  172. {
  173. thisOrParentInlinerHasArguments = true;
  174. }
  175. if (parentFunc == nullptr)
  176. {
  177. inlineDepth = 0;
  178. m_symTable = JitAnew(alloc, SymTable);
  179. m_symTable->Init(this);
  180. m_symTable->SetStartingID(static_cast<SymID>(workItem->GetJITFunctionBody()->GetLocalsCount() + 1));
  181. Assert(Js::Constants::NoByteCodeOffset == postCallByteCodeOffset);
  182. Assert(Js::Constants::NoRegister == returnValueRegSlot);
  183. #if defined(_M_IX86) || defined(_M_X64)
  184. if (HasArgumentSlot())
  185. {
  186. // Pre-allocate the single argument slot we'll reserve for the arguments object.
  187. // For ARM, the argument slot is not part of the local but part of the register saves
  188. m_localStackHeight = MachArgsSlotOffset;
  189. }
  190. #endif
  191. }
  192. else
  193. {
  194. inlineDepth = parentFunc->inlineDepth + 1;
  195. Assert(Js::Constants::NoByteCodeOffset != postCallByteCodeOffset);
  196. }
  197. this->constructorCacheCount = 0;
  198. this->constructorCaches = AnewArrayZ(this->m_alloc, JITTimeConstructorCache*, GetJITFunctionBody()->GetProfiledCallSiteCount());
  199. #if DBG_DUMP
  200. m_codeSize = -1;
  201. #endif
  202. #if defined(_M_X64)
  203. m_spillSize = -1;
  204. m_argsSize = -1;
  205. m_savedRegSize = -1;
  206. #endif
  207. if (this->IsJitInDebugMode())
  208. {
  209. m_nonTempLocalVars = Anew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  210. }
  211. if (GetJITFunctionBody()->IsGenerator())
  212. {
  213. m_yieldOffsetResumeLabelList = YieldOffsetResumeLabelList::New(this->m_alloc);
  214. }
  215. if (this->IsTopFunc())
  216. {
  217. m_globalObjTypeSpecFldInfoArray = JitAnewArrayZ(this->m_alloc, JITObjTypeSpecFldInfo*, GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  218. }
  219. for (uint i = 0; i < GetJITFunctionBody()->GetInlineCacheCount(); ++i)
  220. {
  221. JITObjTypeSpecFldInfo * info = GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(i);
  222. if (info != nullptr)
  223. {
  224. Assert(info->GetObjTypeSpecFldId() < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  225. GetTopFunc()->m_globalObjTypeSpecFldInfoArray[info->GetObjTypeSpecFldId()] = info;
  226. }
  227. }
  228. canHoistConstantAddressLoad = !PHASE_OFF(Js::HoistConstAddrPhase, this);
  229. }
  230. bool
  231. Func::IsLoopBodyInTry() const
  232. {
  233. return IsLoopBody() && m_workItem->GetLoopHeader()->isInTry;
  234. }
  235. /* static */
  236. void
  237. Func::Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  238. ThreadContextInfo * threadContextInfo,
  239. ScriptContextInfo * scriptContextInfo,
  240. JITOutputIDL * outputData,
  241. const FunctionJITRuntimeInfo *const runtimeInfo,
  242. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, CodeGenAllocators *const codeGenAllocators,
  243. CodeGenNumberAllocator * numberAllocator,
  244. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT)
  245. {
  246. bool rejit;
  247. do
  248. {
  249. Func func(alloc, workItem, threadContextInfo,
  250. scriptContextInfo, outputData, runtimeInfo,
  251. polymorphicInlineCacheInfo, codeGenAllocators, numberAllocator,
  252. codeGenProfiler, isBackgroundJIT);
  253. try
  254. {
  255. func.TryCodegen();
  256. rejit = false;
  257. }
  258. catch (Js::RejitException ex)
  259. {
  260. // The work item needs to be rejitted, likely due to some optimization that was too aggressive
  261. if (ex.Reason() == RejitReason::AggressiveIntTypeSpecDisabled)
  262. {
  263. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableAggressiveIntTypeSpec(func.IsLoopBody());
  264. outputData->disableAggressiveIntTypeSpec = TRUE;
  265. }
  266. else if (ex.Reason() == RejitReason::InlineApplyDisabled)
  267. {
  268. workItem->GetJITFunctionBody()->DisableInlineApply();
  269. outputData->disableInlineApply = FALSE;
  270. }
  271. else if (ex.Reason() == RejitReason::InlineSpreadDisabled)
  272. {
  273. workItem->GetJITFunctionBody()->DisableInlineSpread();
  274. outputData->disableInlineSpread = FALSE;
  275. }
  276. else if (ex.Reason() == RejitReason::DisableStackArgOpt)
  277. {
  278. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableStackArgOpt();
  279. outputData->disableStackArgOpt = TRUE;
  280. }
  281. else if (ex.Reason() == RejitReason::DisableSwitchOptExpectingInteger ||
  282. ex.Reason() == RejitReason::DisableSwitchOptExpectingString)
  283. {
  284. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableSwitchOpt();
  285. outputData->disableSwitchOpt = TRUE;
  286. }
  287. else
  288. {
  289. Assert(ex.Reason() == RejitReason::TrackIntOverflowDisabled);
  290. workItem->GetJITFunctionBody()->GetProfileInfo()->DisableTrackCompoundedIntOverflow();
  291. outputData->disableTrackCompoundedIntOverflow = TRUE;
  292. }
  293. if (PHASE_TRACE(Js::ReJITPhase, &func))
  294. {
  295. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  296. Output::Print(
  297. _u("Rejit (compile-time): function: %s (%s) reason: %S\n"),
  298. workItem->GetJITFunctionBody()->GetDisplayName(),
  299. workItem->GetJITTimeInfo()->GetDebugNumberSet(debugStringBuffer),
  300. ex.ReasonName());
  301. }
  302. rejit = true;
  303. }
  304. // Either the entry point has a reference to the number now, or we failed to code gen and we
  305. // don't need to numbers, we can flush the completed page now.
  306. //
  307. // If the number allocator is NULL then we are shutting down the thread context and so too the
  308. // code generator. The number allocator must be freed before the recycler (and thus before the
  309. // code generator) so we can't and don't need to flush it.
  310. // TODO: OOP JIT, allocator cleanup
  311. } while (rejit);
  312. }
  313. ///----------------------------------------------------------------------------
  314. ///
  315. /// Func::TryCodegen
  316. ///
  317. /// Attempt to Codegen this function.
  318. ///
  319. ///----------------------------------------------------------------------------
  320. void
  321. Func::TryCodegen()
  322. {
  323. Assert(!IsJitInDebugMode() || !GetJITFunctionBody()->HasTry());
  324. BEGIN_CODEGEN_PHASE(this, Js::BackEndPhase);
  325. {
  326. // IRBuilder
  327. BEGIN_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  328. if (GetJITFunctionBody()->IsAsmJsMode())
  329. {
  330. IRBuilderAsmJs asmIrBuilder(this);
  331. asmIrBuilder.Build();
  332. }
  333. else
  334. {
  335. IRBuilder irBuilder(this);
  336. irBuilder.Build();
  337. }
  338. END_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  339. #ifdef IR_VIEWER
  340. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::IRBuilderPhase);
  341. #endif /* IR_VIEWER */
  342. BEGIN_CODEGEN_PHASE(this, Js::InlinePhase);
  343. InliningHeuristics heuristics(GetWorkItem()->GetJITTimeInfo(), this->IsLoopBody());
  344. Inline inliner(this, heuristics);
  345. inliner.Optimize();
  346. END_CODEGEN_PHASE(this, Js::InlinePhase);
  347. // FlowGraph
  348. {
  349. // Scope for FlowGraph arena
  350. NoRecoverMemoryJitArenaAllocator fgAlloc(_u("BE-FlowGraph"), m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  351. BEGIN_CODEGEN_PHASE(this, Js::FGBuildPhase);
  352. this->m_fg = FlowGraph::New(this, &fgAlloc);
  353. this->m_fg->Build();
  354. END_CODEGEN_PHASE(this, Js::FGBuildPhase);
  355. // Global Optimization and Type Specialization
  356. BEGIN_CODEGEN_PHASE(this, Js::GlobOptPhase);
  357. GlobOpt globOpt(this);
  358. globOpt.Optimize();
  359. END_CODEGEN_PHASE(this, Js::GlobOptPhase);
  360. // Delete flowGraph now
  361. this->m_fg->Destroy();
  362. this->m_fg = nullptr;
  363. }
  364. #ifdef IR_VIEWER
  365. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::GlobOptPhase);
  366. #endif /* IR_VIEWER */
  367. // Lowering
  368. Lowerer lowerer(this);
  369. BEGIN_CODEGEN_PHASE(this, Js::LowererPhase);
  370. lowerer.Lower();
  371. END_CODEGEN_PHASE(this, Js::LowererPhase);
  372. #ifdef IR_VIEWER
  373. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::LowererPhase);
  374. #endif /* IR_VIEWER */
  375. // Encode constants
  376. Security security(this);
  377. BEGIN_CODEGEN_PHASE(this, Js::EncodeConstantsPhase)
  378. security.EncodeLargeConstants();
  379. END_CODEGEN_PHASE(this, Js::EncodeConstantsPhase);
  380. if (GetJITFunctionBody()->DoInterruptProbe())
  381. {
  382. BEGIN_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  383. lowerer.DoInterruptProbes();
  384. END_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  385. }
  386. // Register Allocation
  387. BEGIN_CODEGEN_PHASE(this, Js::RegAllocPhase);
  388. LinearScan linearScan(this);
  389. linearScan.RegAlloc();
  390. END_CODEGEN_PHASE(this, Js::RegAllocPhase);
  391. #ifdef IR_VIEWER
  392. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::RegAllocPhase);
  393. #endif /* IR_VIEWER */
  394. // Peephole optimizations
  395. BEGIN_CODEGEN_PHASE(this, Js::PeepsPhase);
  396. Peeps peeps(this);
  397. peeps.PeepFunc();
  398. END_CODEGEN_PHASE(this, Js::PeepsPhase);
  399. // Layout
  400. BEGIN_CODEGEN_PHASE(this, Js::LayoutPhase);
  401. SimpleLayout layout(this);
  402. layout.Layout();
  403. END_CODEGEN_PHASE(this, Js::LayoutPhase);
  404. if (this->HasTry() && this->hasBailoutInEHRegion)
  405. {
  406. BEGIN_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  407. lowerer.EHBailoutPatchUp();
  408. END_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  409. }
  410. // Insert NOPs (moving this before prolog/epilog for AMD64 and possibly ARM).
  411. BEGIN_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  412. security.InsertNOPs();
  413. END_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  414. // Prolog/Epilog
  415. BEGIN_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  416. if (GetJITFunctionBody()->IsAsmJsMode())
  417. {
  418. lowerer.LowerPrologEpilogAsmJs();
  419. }
  420. else
  421. {
  422. lowerer.LowerPrologEpilog();
  423. }
  424. END_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  425. BEGIN_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  426. lowerer.FinalLower();
  427. END_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  428. // Encoder
  429. BEGIN_CODEGEN_PHASE(this, Js::EncoderPhase);
  430. Encoder encoder(this);
  431. encoder.Encode();
  432. END_CODEGEN_PHASE_NO_DUMP(this, Js::EncoderPhase);
  433. #ifdef IR_VIEWER
  434. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::EncoderPhase);
  435. #endif /* IR_VIEWER */
  436. }
  437. END_CODEGEN_PHASE(this, Js::BackEndPhase);
  438. #if DBG_DUMP
  439. if (Js::Configuration::Global.flags.IsEnabled(Js::AsmDumpModeFlag))
  440. {
  441. FILE * oldFile = 0;
  442. FILE * asmFile = GetScriptContext()->GetNativeCodeGenerator()->asmFile;
  443. if (asmFile)
  444. {
  445. oldFile = Output::SetFile(asmFile);
  446. }
  447. this->Dump(IRDumpFlags_AsmDumpMode);
  448. Output::Flush();
  449. if (asmFile)
  450. {
  451. FILE *openedFile = Output::SetFile(oldFile);
  452. Assert(openedFile == asmFile);
  453. }
  454. }
  455. #endif
  456. auto dataAllocator = this->GetNativeCodeDataAllocator();
  457. if (dataAllocator->allocCount > 0)
  458. {
  459. // fill in the fixup list by scanning the memory
  460. // todo: this should be done while generating code
  461. NativeCodeData::DataChunk *chunk = (NativeCodeData::DataChunk*)dataAllocator->chunkList;
  462. NativeCodeData::DataChunk *next1 = chunk;
  463. while (next1)
  464. {
  465. if (next1->fixupFunc)
  466. {
  467. next1->fixupFunc(next1->data, chunk);
  468. }
  469. #if DBG
  470. NativeCodeData::DataChunk *next2 = chunk;
  471. while (next2)
  472. {
  473. for (unsigned int i = 0; i < next1->len / sizeof(void*); i++)
  474. {
  475. if (((void**)next1->data)[i] == (void*)next2->data)
  476. {
  477. NativeCodeData::VerifyExistFixupEntry((void*)next2->data, &((void**)next1->data)[i], next1->data);
  478. //NativeCodeData::AddFixupEntry((void*)next2->data, &((void**)next1->data)[i], next1->data, chunk);
  479. }
  480. }
  481. next2 = next2->next;
  482. }
  483. #endif
  484. next1 = next1->next;
  485. }
  486. ////
  487. JITOutputIDL* jitOutputData = m_output.GetOutputData();
  488. jitOutputData->nativeDataFixupTable = (NativeDataFixupTable*)midl_user_allocate(offsetof(NativeDataFixupTable, fixupRecords) + sizeof(NativeDataFixupRecord)* (dataAllocator->allocCount));
  489. jitOutputData->nativeDataFixupTable->count = dataAllocator->allocCount;
  490. jitOutputData->buffer = (NativeDataBuffer*)midl_user_allocate(offsetof(NativeDataBuffer, data) + dataAllocator->totalSize);
  491. jitOutputData->buffer->len = dataAllocator->totalSize;
  492. unsigned int len = 0;
  493. unsigned int count = 0;
  494. next1 = chunk;
  495. while (next1)
  496. {
  497. memcpy(jitOutputData->buffer->data + len, next1->data, next1->len);
  498. len += next1->len;
  499. jitOutputData->nativeDataFixupTable->fixupRecords[count].index = next1->allocIndex;
  500. jitOutputData->nativeDataFixupTable->fixupRecords[count].length = next1->len;
  501. jitOutputData->nativeDataFixupTable->fixupRecords[count].startOffset = next1->offset;
  502. jitOutputData->nativeDataFixupTable->fixupRecords[count].updateList = next1->fixupList;
  503. count++;
  504. next1 = next1->next;
  505. }
  506. #if DBG
  507. if (PHASE_TRACE1(Js::NativeCodeDataPhase))
  508. {
  509. Output::Print(L"NativeCodeData Server Buffer: %p, len: %x, chunk head: %p\n", jitOutputData->buffer->data, jitOutputData->buffer->len, chunk);
  510. }
  511. #endif
  512. }
  513. }
  514. ///----------------------------------------------------------------------------
  515. /// Func::StackAllocate
  516. /// Allocate stack space of given size.
  517. ///----------------------------------------------------------------------------
  518. int32
  519. Func::StackAllocate(int size)
  520. {
  521. Assert(this->IsTopFunc());
  522. int32 offset;
  523. #ifdef MD_GROW_LOCALS_AREA_UP
  524. // Locals have positive offsets and are allocated from bottom to top.
  525. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  526. offset = m_localStackHeight;
  527. m_localStackHeight += size;
  528. #else
  529. // Locals have negative offsets and are allocated from top to bottom.
  530. m_localStackHeight += size;
  531. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  532. offset = -m_localStackHeight;
  533. #endif
  534. return offset;
  535. }
  536. ///----------------------------------------------------------------------------
  537. ///
  538. /// Func::StackAllocate
  539. ///
  540. /// Allocate stack space for this symbol.
  541. ///
  542. ///----------------------------------------------------------------------------
  543. int32
  544. Func::StackAllocate(StackSym *stackSym, int size)
  545. {
  546. Assert(size > 0);
  547. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym() || stackSym->IsAllocated())
  548. {
  549. return stackSym->m_offset;
  550. }
  551. Assert(stackSym->m_offset == 0);
  552. stackSym->m_allocated = true;
  553. stackSym->m_offset = StackAllocate(size);
  554. return stackSym->m_offset;
  555. }
  556. void
  557. Func::SetArgOffset(StackSym *stackSym, int32 offset)
  558. {
  559. AssertMsg(offset >= 0, "Why is the offset, negative?");
  560. stackSym->m_offset = offset;
  561. stackSym->m_allocated = true;
  562. }
  563. ///
  564. /// Ensures that local var slots are created, if the function has locals.
  565. /// Allocate stack space for locals used for debugging
  566. /// (for local non-temp vars we write-through memory so that locals inspection can make use of that.).
  567. // On stack, after local slots we allocate space for metadata (in particular, whether any the locals was changed in debugger).
  568. ///
  569. void
  570. Func::EnsureLocalVarSlots()
  571. {
  572. Assert(IsJitInDebugMode());
  573. if (!this->HasLocalVarSlotCreated())
  574. {
  575. uint32 localSlotCount = GetJITFunctionBody()->GetNonTempLocalVarCount();
  576. if (localSlotCount && m_localVarSlotsOffset == Js::Constants::InvalidOffset)
  577. {
  578. // Allocate the slots.
  579. int32 size = localSlotCount * GetDiagLocalSlotSize();
  580. m_localVarSlotsOffset = StackAllocate(size);
  581. m_hasLocalVarChangedOffset = StackAllocate(max(1, MachStackAlignment)); // Can't alloc less than StackAlignment bytes.
  582. Assert(m_workItem->Type() == JsFunctionType);
  583. m_output.SetVarSlotsOffset(AdjustOffsetValue(m_localVarSlotsOffset));
  584. m_output.SetVarChangedOffset(AdjustOffsetValue(m_hasLocalVarChangedOffset));
  585. }
  586. }
  587. }
  588. void Func::SetFirstArgOffset(IR::Instr* inlineeStart)
  589. {
  590. Assert(inlineeStart->m_func == this);
  591. Assert(!IsTopFunc());
  592. int32 lastOffset;
  593. IR::Instr* arg = inlineeStart->GetNextArg();
  594. const auto lastArgOutStackSym = arg->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  595. lastOffset = lastArgOutStackSym->m_offset;
  596. Assert(lastArgOutStackSym->m_isSingleDef);
  597. const auto secondLastArgOutOpnd = lastArgOutStackSym->m_instrDef->GetSrc2();
  598. if (secondLastArgOutOpnd->IsSymOpnd())
  599. {
  600. const auto secondLastOffset = secondLastArgOutOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_offset;
  601. if (secondLastOffset > lastOffset)
  602. {
  603. lastOffset = secondLastOffset;
  604. }
  605. }
  606. lastOffset += MachPtr;
  607. int32 firstActualStackOffset = lastOffset - ((this->actualCount + Js::Constants::InlineeMetaArgCount) * MachPtr);
  608. Assert((this->firstActualStackOffset == -1) || (this->firstActualStackOffset == firstActualStackOffset));
  609. this->firstActualStackOffset = firstActualStackOffset;
  610. }
  611. int32
  612. Func::GetLocalVarSlotOffset(int32 slotId)
  613. {
  614. this->EnsureLocalVarSlots();
  615. Assert(m_localVarSlotsOffset != Js::Constants::InvalidOffset);
  616. int32 slotOffset = slotId * GetDiagLocalSlotSize();
  617. return m_localVarSlotsOffset + slotOffset;
  618. }
  619. void Func::OnAddSym(Sym* sym)
  620. {
  621. Assert(sym);
  622. if (this->IsJitInDebugMode() && this->IsNonTempLocalVar(sym->m_id))
  623. {
  624. Assert(m_nonTempLocalVars);
  625. m_nonTempLocalVars->Set(sym->m_id);
  626. }
  627. }
  628. ///
  629. /// Returns offset of the flag (1 byte) whether any local was changed (in debugger).
  630. /// If the function does not have any locals, returns -1.
  631. ///
  632. int32
  633. Func::GetHasLocalVarChangedOffset()
  634. {
  635. this->EnsureLocalVarSlots();
  636. return m_hasLocalVarChangedOffset;
  637. }
  638. bool
  639. Func::IsJitInDebugMode()
  640. {
  641. return m_workItem->IsJitInDebugMode();
  642. }
  643. bool
  644. Func::IsNonTempLocalVar(uint32 slotIndex)
  645. {
  646. return GetJITFunctionBody()->IsNonTempLocalVar(slotIndex);
  647. }
  648. int32
  649. Func::AdjustOffsetValue(int32 offset)
  650. {
  651. #ifdef MD_GROW_LOCALS_AREA_UP
  652. return -(offset + BailOutInfo::StackSymBias);
  653. #else
  654. // Stack offset are negative, includes the PUSH EBP and return address
  655. return offset - (2 * MachPtr);
  656. #endif
  657. }
  658. #ifdef MD_GROW_LOCALS_AREA_UP
  659. // Note: this is called during jit-compile when we finalize bail out record.
  660. void
  661. Func::AjustLocalVarSlotOffset()
  662. {
  663. if (GetJITFunctionBody()->GetNonTempLocalVarCount())
  664. {
  665. // Turn positive SP-relative base locals offset into negative frame-pointer-relative offset
  666. // This is changing value for restoring the locals when read due to locals inspection.
  667. int localsOffset = m_localVarSlotsOffset - (m_localStackHeight + m_ArgumentsOffset);
  668. int valueChangeOffset = m_hasLocalVarChangedOffset - (m_localStackHeight + m_ArgumentsOffset);
  669. Js::FunctionEntryPointInfo * entryPointInfo = static_cast<Js::FunctionEntryPointInfo*>(this->m_workItem->GetEntryPoint());
  670. Assert(entryPointInfo != nullptr);
  671. entryPointInfo->localVarSlotsOffset = localsOffset;
  672. entryPointInfo->localVarChangedOffset = valueChangeOffset;
  673. }
  674. }
  675. #endif
  676. bool
  677. Func::DoGlobOptsForGeneratorFunc() const
  678. {
  679. // Disable GlobOpt optimizations for generators initially. Will visit and enable each one by one.
  680. return !GetJITFunctionBody()->IsGenerator();
  681. }
  682. bool
  683. Func::DoSimpleJitDynamicProfile() const
  684. {
  685. return IsSimpleJit() && !PHASE_OFF(Js::SimpleJitDynamicProfilePhase, GetTopFunc()) && !CONFIG_FLAG(NewSimpleJit);
  686. }
  687. void
  688. Func::SetDoFastPaths()
  689. {
  690. // Make sure we only call this once!
  691. Assert(!this->hasCalledSetDoFastPaths);
  692. bool doFastPaths = false;
  693. if(!PHASE_OFF(Js::FastPathPhase, this) && (!IsSimpleJit() || CONFIG_FLAG(NewSimpleJit)))
  694. {
  695. doFastPaths = true;
  696. }
  697. this->m_doFastPaths = doFastPaths;
  698. #ifdef DBG
  699. this->hasCalledSetDoFastPaths = true;
  700. #endif
  701. }
  702. #ifdef _M_ARM
  703. RegNum
  704. Func::GetLocalsPointer() const
  705. {
  706. #ifdef DBG
  707. if (Js::Configuration::Global.flags.IsEnabled(Js::ForceLocalsPtrFlag))
  708. {
  709. return ALT_LOCALS_PTR;
  710. }
  711. #endif
  712. if (GetJITFunctionBody()->HasTry())
  713. {
  714. return ALT_LOCALS_PTR;
  715. }
  716. return RegSP;
  717. }
  718. #endif
  719. void Func::AddSlotArrayCheck(IR::SymOpnd *fieldOpnd)
  720. {
  721. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  722. {
  723. return;
  724. }
  725. Assert(IsTopFunc());
  726. if (this->slotArrayCheckTable == nullptr)
  727. {
  728. this->slotArrayCheckTable = SlotArrayCheckTable::New(m_alloc, 4);
  729. }
  730. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  731. uint32 slot = propertySym->m_propertyId;
  732. uint32 *pSlotId = this->slotArrayCheckTable->FindOrInsert(slot, propertySym->m_stackSym->m_id);
  733. if (pSlotId && (*pSlotId == (uint32)-1 || *pSlotId < slot))
  734. {
  735. *pSlotId = propertySym->m_propertyId;
  736. }
  737. }
  738. void Func::AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId)
  739. {
  740. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  741. {
  742. return;
  743. }
  744. Assert(IsTopFunc());
  745. if (this->frameDisplayCheckTable == nullptr)
  746. {
  747. this->frameDisplayCheckTable = FrameDisplayCheckTable::New(m_alloc, 4);
  748. }
  749. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  750. FrameDisplayCheckRecord **record = this->frameDisplayCheckTable->FindOrInsertNew(propertySym->m_stackSym->m_id);
  751. if (*record == nullptr)
  752. {
  753. *record = JitAnew(m_alloc, FrameDisplayCheckRecord);
  754. }
  755. uint32 frameDisplaySlot = propertySym->m_propertyId;
  756. if ((*record)->table == nullptr || (*record)->slotId < frameDisplaySlot)
  757. {
  758. (*record)->slotId = frameDisplaySlot;
  759. }
  760. if (slotId != (uint32)-1)
  761. {
  762. if ((*record)->table == nullptr)
  763. {
  764. (*record)->table = SlotArrayCheckTable::New(m_alloc, 4);
  765. }
  766. uint32 *pSlotId = (*record)->table->FindOrInsert(slotId, frameDisplaySlot);
  767. if (pSlotId && *pSlotId < slotId)
  768. {
  769. *pSlotId = slotId;
  770. }
  771. }
  772. }
  773. void Func::InitLocalClosureSyms()
  774. {
  775. Assert(this->m_localClosureSym == nullptr);
  776. // Allocate stack space for closure pointers. Do this only if we're jitting for stack closures, and
  777. // tell bailout that these are not byte code symbols so that we don't try to encode them in the bailout record,
  778. // as they don't have normal lifetimes.
  779. Js::RegSlot regSlot = GetJITFunctionBody()->GetLocalClosureReg();
  780. if (regSlot != Js::Constants::NoRegister)
  781. {
  782. this->m_localClosureSym =
  783. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  784. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  785. this);
  786. }
  787. regSlot = this->GetJITFunctionBody()->GetParamClosureReg();
  788. if (regSlot != Js::Constants::NoRegister)
  789. {
  790. Assert(this->GetParamClosureSym() == nullptr && !this->GetJITFunctionBody()->IsParamAndBodyScopeMerged());
  791. this->m_paramClosureSym =
  792. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  793. this->DoStackFrameDisplay() ? (Js::RegSlot) - 1 : regSlot,
  794. this);
  795. }
  796. regSlot = GetJITFunctionBody()->GetLocalFrameDisplayReg();
  797. if (regSlot != Js::Constants::NoRegister)
  798. {
  799. this->m_localFrameDisplaySym =
  800. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  801. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  802. this);
  803. }
  804. }
  805. bool Func::CanAllocInPreReservedHeapPageSegment ()
  806. {
  807. #ifdef _CONTROL_FLOW_GUARD
  808. return PHASE_FORCE1(Js::PreReservedHeapAllocPhase) || (!PHASE_OFF1(Js::PreReservedHeapAllocPhase) &&
  809. !IsJitInDebugMode() && !GetScriptContext()->IsScriptContextInDebugMode() && GetScriptContext()->GetThreadContext()->IsCFGEnabled()
  810. #if _M_IX86
  811. && m_workItem->GetJitMode() == ExecutionMode::FullJit && GetCodeGenAllocators()->canCreatePreReservedSegment);
  812. #elif _M_X64
  813. && true);
  814. #else
  815. && false); //Not yet implemented for architectures other than x86 and amd64.
  816. #endif //_M_ARCH
  817. #else
  818. return false;
  819. #endif//_CONTROL_FLOW_GUARD
  820. }
  821. ///----------------------------------------------------------------------------
  822. ///
  823. /// Func::GetInstrCount
  824. ///
  825. /// Returns the number of instrs.
  826. /// Note: It counts all instrs for now, including labels, etc.
  827. ///
  828. ///----------------------------------------------------------------------------
  829. uint32
  830. Func::GetInstrCount()
  831. {
  832. uint instrCount = 0;
  833. FOREACH_INSTR_IN_FUNC(instr, this)
  834. {
  835. instrCount++;
  836. }NEXT_INSTR_IN_FUNC;
  837. return instrCount;
  838. }
  839. ///----------------------------------------------------------------------------
  840. ///
  841. /// Func::NumberInstrs
  842. ///
  843. /// Number each instruction in order of appearance in the function.
  844. ///
  845. ///----------------------------------------------------------------------------
  846. void
  847. Func::NumberInstrs()
  848. {
  849. #if DBG_DUMP
  850. Assert(this->IsTopFunc());
  851. Assert(!this->hasInstrNumber);
  852. this->hasInstrNumber = true;
  853. #endif
  854. uint instrCount = 1;
  855. FOREACH_INSTR_IN_FUNC(instr, this)
  856. {
  857. instr->SetNumber(instrCount++);
  858. }
  859. NEXT_INSTR_IN_FUNC;
  860. }
  861. ///----------------------------------------------------------------------------
  862. ///
  863. /// Func::IsInPhase
  864. ///
  865. /// Determines whether the function is currently in the provided phase
  866. ///
  867. ///----------------------------------------------------------------------------
  868. #if DBG
  869. bool
  870. Func::IsInPhase(Js::Phase tag)
  871. {
  872. return this->GetTopFunc()->currentPhases.Contains(tag);
  873. }
  874. #endif
  875. ///----------------------------------------------------------------------------
  876. ///
  877. /// Func::BeginPhase
  878. ///
  879. /// Takes care of the profiler
  880. ///
  881. ///----------------------------------------------------------------------------
  882. void
  883. Func::BeginPhase(Js::Phase tag)
  884. {
  885. #ifdef DBG
  886. this->GetTopFunc()->currentPhases.Push(tag);
  887. #endif
  888. #ifdef PROFILE_EXEC
  889. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  890. "Profiler tag is supplied but the profiler pointer is NULL");
  891. if (this->m_codeGenProfiler)
  892. {
  893. this->m_codeGenProfiler->ProfileBegin(tag);
  894. }
  895. #endif
  896. }
  897. ///----------------------------------------------------------------------------
  898. ///
  899. /// Func::EndPhase
  900. ///
  901. /// Takes care of the profiler and dumper
  902. ///
  903. ///----------------------------------------------------------------------------
  904. void
  905. Func::EndProfiler(Js::Phase tag)
  906. {
  907. #ifdef DBG
  908. Assert(this->GetTopFunc()->currentPhases.Count() > 0);
  909. Js::Phase popped = this->GetTopFunc()->currentPhases.Pop();
  910. Assert(tag == popped);
  911. #endif
  912. #ifdef PROFILE_EXEC
  913. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  914. "Profiler tag is supplied but the profiler pointer is NULL");
  915. if (this->m_codeGenProfiler)
  916. {
  917. this->m_codeGenProfiler->ProfileEnd(tag);
  918. }
  919. #endif
  920. }
  921. void
  922. Func::EndPhase(Js::Phase tag, bool dump)
  923. {
  924. this->EndProfiler(tag);
  925. #if DBG_DUMP
  926. if(dump && (PHASE_DUMP(tag, this)
  927. || PHASE_DUMP(Js::BackEndPhase, this)))
  928. {
  929. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  930. if (IsLoopBody())
  931. {
  932. Output::Print(_u("************ IR after %s (%S) Loop %d ************\n"),
  933. Js::PhaseNames[tag],
  934. ExecutionModeName(m_workItem->GetJitMode()),
  935. m_workItem->GetLoopNumber());
  936. }
  937. else
  938. {
  939. Output::Print(_u("************ IR after %s (%S) ************\n"),
  940. Js::PhaseNames[tag],
  941. ExecutionModeName(m_workItem->GetJitMode()));
  942. }
  943. this->Dump(Js::Configuration::Global.flags.AsmDiff? IRDumpFlags_AsmDumpMode : IRDumpFlags_None);
  944. }
  945. #endif
  946. #if DBG
  947. if (tag == Js::LowererPhase)
  948. {
  949. Assert(!this->isPostLower);
  950. this->isPostLower = true;
  951. }
  952. else if (tag == Js::RegAllocPhase)
  953. {
  954. Assert(!this->isPostRegAlloc);
  955. this->isPostRegAlloc = true;
  956. }
  957. else if (tag == Js::PeepsPhase)
  958. {
  959. Assert(this->isPostLower && !this->isPostLayout);
  960. this->isPostPeeps = true;
  961. }
  962. else if (tag == Js::LayoutPhase)
  963. {
  964. Assert(this->isPostPeeps && !this->isPostLayout);
  965. this->isPostLayout = true;
  966. }
  967. else if (tag == Js::FinalLowerPhase)
  968. {
  969. Assert(this->isPostLayout && !this->isPostFinalLower);
  970. this->isPostFinalLower = true;
  971. }
  972. if (this->isPostLower)
  973. {
  974. #ifndef _M_ARM // Need to verify ARM is clean.
  975. DbCheckPostLower dbCheck(this);
  976. dbCheck.Check();
  977. #endif
  978. }
  979. #endif
  980. }
  981. Func const *
  982. Func::GetTopFunc() const
  983. {
  984. Func const * func = this;
  985. while (!func->IsTopFunc())
  986. {
  987. func = func->parentFunc;
  988. }
  989. return func;
  990. }
  991. Func *
  992. Func::GetTopFunc()
  993. {
  994. Func * func = this;
  995. while (!func->IsTopFunc())
  996. {
  997. func = func->parentFunc;
  998. }
  999. return func;
  1000. }
  1001. StackSym *
  1002. Func::EnsureLoopParamSym()
  1003. {
  1004. if (this->m_loopParamSym == nullptr)
  1005. {
  1006. this->m_loopParamSym = StackSym::New(TyMachPtr, this);
  1007. }
  1008. return this->m_loopParamSym;
  1009. }
  1010. void
  1011. Func::UpdateMaxInlineeArgOutCount(uint inlineeArgOutCount)
  1012. {
  1013. if (maxInlineeArgOutCount < inlineeArgOutCount)
  1014. {
  1015. maxInlineeArgOutCount = inlineeArgOutCount;
  1016. }
  1017. }
  1018. void
  1019. Func::BeginClone(Lowerer * lowerer, JitArenaAllocator *alloc)
  1020. {
  1021. Assert(this->IsTopFunc());
  1022. AssertMsg(m_cloner == nullptr, "Starting new clone while one is in progress");
  1023. m_cloner = JitAnew(alloc, Cloner, lowerer, alloc);
  1024. if (m_cloneMap == nullptr)
  1025. {
  1026. m_cloneMap = JitAnew(alloc, InstrMap, alloc, 7);
  1027. }
  1028. }
  1029. void
  1030. Func::EndClone()
  1031. {
  1032. Assert(this->IsTopFunc());
  1033. if (m_cloner)
  1034. {
  1035. m_cloner->Finish();
  1036. JitAdelete(m_cloner->alloc, m_cloner);
  1037. m_cloner = nullptr;
  1038. }
  1039. }
  1040. IR::SymOpnd *
  1041. Func::GetInlineeOpndAtOffset(int32 offset)
  1042. {
  1043. Assert(IsInlinee());
  1044. StackSym *stackSym = CreateInlineeStackSym();
  1045. this->SetArgOffset(stackSym, stackSym->m_offset + offset);
  1046. Assert(stackSym->m_offset >= 0);
  1047. return IR::SymOpnd::New(stackSym, 0, TyMachReg, this);
  1048. }
  1049. StackSym *
  1050. Func::CreateInlineeStackSym()
  1051. {
  1052. // Make sure this is an inlinee and that GlobOpt has initialized the offset
  1053. // in the inlinee's frame.
  1054. Assert(IsInlinee());
  1055. Assert(m_inlineeFrameStartSym->m_offset != -1);
  1056. StackSym *stackSym = m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1057. stackSym->m_isInlinedArgSlot = true;
  1058. stackSym->m_offset = m_inlineeFrameStartSym->m_offset;
  1059. stackSym->m_allocated = true;
  1060. return stackSym;
  1061. }
  1062. uint16
  1063. Func::GetArgUsedForBranch() const
  1064. {
  1065. // this value can change while JITing, so or these together
  1066. return GetJITFunctionBody()->GetArgUsedForBranch() | GetJITOutput()->GetArgUsedForBranch();
  1067. }
  1068. intptr_t
  1069. Func::GetJittedLoopIterationsSinceLastBailoutAddress() const
  1070. {
  1071. Assert(this->m_workItem->Type() == JsLoopBodyWorkItemType);
  1072. return m_workItem->GetJittedLoopIterationsSinceLastBailoutAddr();
  1073. }
  1074. intptr_t
  1075. Func::GetWeakFuncRef() const
  1076. {
  1077. // TODO: OOP JIT figure out if this can be null
  1078. return m_workItem->GetJITTimeInfo()->GetWeakFuncRef();
  1079. }
  1080. intptr_t
  1081. Func::GetRuntimeInlineCache(const uint index) const
  1082. {
  1083. if(m_runtimeInfo != nullptr && m_runtimeInfo->HasClonedInlineCaches())
  1084. {
  1085. intptr_t inlineCache = m_runtimeInfo->GetClonedInlineCache(index);
  1086. if(inlineCache)
  1087. {
  1088. return inlineCache;
  1089. }
  1090. }
  1091. return GetJITFunctionBody()->GetInlineCache(index);
  1092. }
  1093. JITTimePolymorphicInlineCache *
  1094. Func::GetRuntimePolymorphicInlineCache(const uint index) const
  1095. {
  1096. if (this->m_polymorphicInlineCacheInfo && this->m_polymorphicInlineCacheInfo->HasInlineCaches())
  1097. {
  1098. return this->m_polymorphicInlineCacheInfo->GetInlineCache(index);
  1099. }
  1100. return nullptr;
  1101. }
  1102. byte
  1103. Func::GetPolyCacheUtilToInitialize(const uint index) const
  1104. {
  1105. return this->GetRuntimePolymorphicInlineCache(index) ? this->GetPolyCacheUtil(index) : PolymorphicInlineCacheUtilizationMinValue;
  1106. }
  1107. byte
  1108. Func::GetPolyCacheUtil(const uint index) const
  1109. {
  1110. return this->m_polymorphicInlineCacheInfo->GetUtil(index);
  1111. }
  1112. JITObjTypeSpecFldInfo*
  1113. Func::GetObjTypeSpecFldInfo(const uint index) const
  1114. {
  1115. if (GetJITFunctionBody()->GetInlineCacheCount() == 0)
  1116. {
  1117. Assert(UNREACHED);
  1118. return nullptr;
  1119. }
  1120. return GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(index);
  1121. }
  1122. JITObjTypeSpecFldInfo*
  1123. Func::GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const
  1124. {
  1125. Assert(propertyInfoId < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  1126. return GetTopFunc()->m_globalObjTypeSpecFldInfoArray[propertyInfoId];
  1127. }
  1128. void
  1129. Func::EnsurePinnedTypeRefs()
  1130. {
  1131. if (this->pinnedTypeRefs == nullptr)
  1132. {
  1133. this->pinnedTypeRefs = JitAnew(this->m_alloc, TypeRefSet, this->m_alloc);
  1134. }
  1135. }
  1136. void
  1137. Func::PinTypeRef(void* typeRef)
  1138. {
  1139. EnsurePinnedTypeRefs();
  1140. this->pinnedTypeRefs->AddNew(typeRef);
  1141. }
  1142. void
  1143. Func::EnsureSingleTypeGuards()
  1144. {
  1145. if (this->singleTypeGuards == nullptr)
  1146. {
  1147. this->singleTypeGuards = JitAnew(this->m_alloc, TypePropertyGuardDictionary, this->m_alloc);
  1148. }
  1149. }
  1150. Js::JitTypePropertyGuard*
  1151. Func::GetOrCreateSingleTypeGuard(intptr_t typeAddr)
  1152. {
  1153. EnsureSingleTypeGuards();
  1154. Js::JitTypePropertyGuard* guard;
  1155. if (!this->singleTypeGuards->TryGetValue(typeAddr, &guard))
  1156. {
  1157. // Property guards are allocated by NativeCodeData::Allocator so that their lifetime extends as long as the EntryPointInfo is alive.
  1158. guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitTypePropertyGuard, typeAddr, this->indexedPropertyGuardCount++);
  1159. this->singleTypeGuards->Add(typeAddr, guard);
  1160. }
  1161. else
  1162. {
  1163. Assert(guard->GetTypeAddr() == typeAddr);
  1164. }
  1165. return guard;
  1166. }
  1167. void
  1168. Func::EnsureEquivalentTypeGuards()
  1169. {
  1170. if (this->equivalentTypeGuards == nullptr)
  1171. {
  1172. this->equivalentTypeGuards = JitAnew(this->m_alloc, EquivalentTypeGuardList, this->m_alloc);
  1173. }
  1174. }
  1175. Js::JitEquivalentTypeGuard*
  1176. Func::CreateEquivalentTypeGuard(JITTypeHolder type, uint32 objTypeSpecFldId)
  1177. {
  1178. EnsureEquivalentTypeGuards();
  1179. Js::JitEquivalentTypeGuard* guard = NativeCodeDataNew(GetNativeCodeDataAllocator(), Js::JitEquivalentTypeGuard, type.t->GetAddr(), this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1180. // If we want to hard code the address of the cache, we will need to go back to allocating it from the native code data allocator.
  1181. // We would then need to maintain consistency (double write) to both the recycler allocated cache and the one on the heap.
  1182. Js::EquivalentTypeCache* cache = nullptr;
  1183. if (this->IsOOPJIT())
  1184. {
  1185. cache = JitAnewZ(this->m_alloc, Js::EquivalentTypeCache);
  1186. }
  1187. else
  1188. {
  1189. cache = NativeCodeDataNewZ(GetTransferDataAllocator(), Js::EquivalentTypeCache);
  1190. }
  1191. guard->SetCache(cache);
  1192. // Give the cache a back-pointer to the guard so that the guard can be cleared at runtime if necessary.
  1193. cache->SetGuard(guard);
  1194. this->equivalentTypeGuards->Prepend(guard);
  1195. return guard;
  1196. }
  1197. void
  1198. Func::EnsurePropertyGuardsByPropertyId()
  1199. {
  1200. if (this->propertyGuardsByPropertyId == nullptr)
  1201. {
  1202. this->propertyGuardsByPropertyId = JitAnew(this->m_alloc, PropertyGuardByPropertyIdMap, this->m_alloc);
  1203. }
  1204. }
  1205. void
  1206. Func::EnsureCtorCachesByPropertyId()
  1207. {
  1208. if (this->ctorCachesByPropertyId == nullptr)
  1209. {
  1210. this->ctorCachesByPropertyId = JitAnew(this->m_alloc, CtorCachesByPropertyIdMap, this->m_alloc);
  1211. }
  1212. }
  1213. void
  1214. Func::LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard)
  1215. {
  1216. Assert(guard != nullptr);
  1217. Assert(guard->GetValue() != NULL);
  1218. Assert(this->propertyGuardsByPropertyId != nullptr);
  1219. IndexedPropertyGuardSet* set;
  1220. if (!this->propertyGuardsByPropertyId->TryGetValue(propertyId, &set))
  1221. {
  1222. set = JitAnew(this->m_alloc, IndexedPropertyGuardSet, this->m_alloc);
  1223. this->propertyGuardsByPropertyId->Add(propertyId, set);
  1224. }
  1225. set->Item(guard);
  1226. }
  1227. void
  1228. Func::LinkCtorCacheToPropertyId(Js::PropertyId propertyId, JITTimeConstructorCache* cache)
  1229. {
  1230. Assert(cache != nullptr);
  1231. Assert(this->ctorCachesByPropertyId != nullptr);
  1232. CtorCacheSet* set;
  1233. if (!this->ctorCachesByPropertyId->TryGetValue(propertyId, &set))
  1234. {
  1235. set = JitAnew(this->m_alloc, CtorCacheSet, this->m_alloc);
  1236. this->ctorCachesByPropertyId->Add(propertyId, set);
  1237. }
  1238. set->Item(cache->GetRuntimeCacheAddr());
  1239. }
  1240. JITTimeConstructorCache* Func::GetConstructorCache(const Js::ProfileId profiledCallSiteId)
  1241. {
  1242. Assert(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1243. Assert(this->constructorCaches != nullptr);
  1244. return this->constructorCaches[profiledCallSiteId];
  1245. }
  1246. void Func::SetConstructorCache(const Js::ProfileId profiledCallSiteId, JITTimeConstructorCache* constructorCache)
  1247. {
  1248. Assert(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1249. Assert(constructorCache != nullptr);
  1250. Assert(this->constructorCaches != nullptr);
  1251. Assert(this->constructorCaches[profiledCallSiteId] == nullptr);
  1252. this->constructorCacheCount++;
  1253. this->constructorCaches[profiledCallSiteId] = constructorCache;
  1254. }
  1255. void Func::EnsurePropertiesWrittenTo()
  1256. {
  1257. if (this->propertiesWrittenTo == nullptr)
  1258. {
  1259. this->propertiesWrittenTo = JitAnew(this->m_alloc, PropertyIdSet, this->m_alloc);
  1260. }
  1261. }
  1262. void Func::EnsureCallSiteToArgumentsOffsetFixupMap()
  1263. {
  1264. if (this->callSiteToArgumentsOffsetFixupMap == nullptr)
  1265. {
  1266. this->callSiteToArgumentsOffsetFixupMap = JitAnew(this->m_alloc, CallSiteToArgumentsOffsetFixupMap, this->m_alloc);
  1267. }
  1268. }
  1269. IR::LabelInstr *
  1270. Func::GetFuncStartLabel()
  1271. {
  1272. return m_funcStartLabel;
  1273. }
  1274. IR::LabelInstr *
  1275. Func::EnsureFuncStartLabel()
  1276. {
  1277. if(m_funcStartLabel == nullptr)
  1278. {
  1279. m_funcStartLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1280. }
  1281. return m_funcStartLabel;
  1282. }
  1283. IR::LabelInstr *
  1284. Func::GetFuncEndLabel()
  1285. {
  1286. return m_funcEndLabel;
  1287. }
  1288. IR::LabelInstr *
  1289. Func::EnsureFuncEndLabel()
  1290. {
  1291. if(m_funcEndLabel == nullptr)
  1292. {
  1293. m_funcEndLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1294. }
  1295. return m_funcEndLabel;
  1296. }
  1297. void
  1298. Func::EnsureStackArgWithFormalsTracker()
  1299. {
  1300. if (stackArgWithFormalsTracker == nullptr)
  1301. {
  1302. stackArgWithFormalsTracker = JitAnew(m_alloc, StackArgWithFormalsTracker, m_alloc);
  1303. }
  1304. }
  1305. BOOL
  1306. Func::IsFormalsArraySym(SymID symId)
  1307. {
  1308. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsArraySyms() == nullptr)
  1309. {
  1310. return false;
  1311. }
  1312. return stackArgWithFormalsTracker->GetFormalsArraySyms()->Test(symId);
  1313. }
  1314. void
  1315. Func::TrackFormalsArraySym(SymID symId)
  1316. {
  1317. EnsureStackArgWithFormalsTracker();
  1318. stackArgWithFormalsTracker->SetFormalsArraySyms(symId);
  1319. }
  1320. void
  1321. Func::TrackStackSymForFormalIndex(Js::ArgSlot formalsIndex, StackSym * sym)
  1322. {
  1323. EnsureStackArgWithFormalsTracker();
  1324. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1325. stackArgWithFormalsTracker->SetStackSymInFormalsIndexMap(sym, formalsIndex, formalsCount);
  1326. }
  1327. StackSym *
  1328. Func::GetStackSymForFormal(Js::ArgSlot formalsIndex)
  1329. {
  1330. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1331. {
  1332. return nullptr;
  1333. }
  1334. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1335. StackSym ** formalsIndexToStackSymMap = stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap();
  1336. AssertMsg(formalsIndex < formalsCount, "OutOfRange ? ");
  1337. return formalsIndexToStackSymMap[formalsIndex];
  1338. }
  1339. bool
  1340. Func::HasStackSymForFormal(Js::ArgSlot formalsIndex)
  1341. {
  1342. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1343. {
  1344. return false;
  1345. }
  1346. return GetStackSymForFormal(formalsIndex) != nullptr;
  1347. }
  1348. void
  1349. Func::SetScopeObjSym(StackSym * sym)
  1350. {
  1351. EnsureStackArgWithFormalsTracker();
  1352. stackArgWithFormalsTracker->SetScopeObjSym(sym);
  1353. }
  1354. StackSym*
  1355. Func::GetScopeObjSym()
  1356. {
  1357. if (stackArgWithFormalsTracker == nullptr)
  1358. {
  1359. return nullptr;
  1360. }
  1361. return stackArgWithFormalsTracker->GetScopeObjSym();
  1362. }
  1363. BVSparse<JitArenaAllocator> *
  1364. StackArgWithFormalsTracker::GetFormalsArraySyms()
  1365. {
  1366. return formalsArraySyms;
  1367. }
  1368. void
  1369. StackArgWithFormalsTracker::SetFormalsArraySyms(SymID symId)
  1370. {
  1371. if (formalsArraySyms == nullptr)
  1372. {
  1373. formalsArraySyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1374. }
  1375. formalsArraySyms->Set(symId);
  1376. }
  1377. StackSym **
  1378. StackArgWithFormalsTracker::GetFormalsIndexToStackSymMap()
  1379. {
  1380. return formalsIndexToStackSymMap;
  1381. }
  1382. void
  1383. StackArgWithFormalsTracker::SetStackSymInFormalsIndexMap(StackSym * sym, Js::ArgSlot formalsIndex, Js::ArgSlot formalsCount)
  1384. {
  1385. if(formalsIndexToStackSymMap == nullptr)
  1386. {
  1387. formalsIndexToStackSymMap = JitAnewArrayZ(alloc, StackSym*, formalsCount);
  1388. }
  1389. AssertMsg(formalsIndex < formalsCount, "Out of range ?");
  1390. formalsIndexToStackSymMap[formalsIndex] = sym;
  1391. }
  1392. void
  1393. StackArgWithFormalsTracker::SetScopeObjSym(StackSym * sym)
  1394. {
  1395. m_scopeObjSym = sym;
  1396. }
  1397. StackSym *
  1398. StackArgWithFormalsTracker::GetScopeObjSym()
  1399. {
  1400. return m_scopeObjSym;
  1401. }
  1402. void
  1403. Cloner::AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone)
  1404. {
  1405. if (!this->instrFirst)
  1406. {
  1407. this->instrFirst = instrClone;
  1408. }
  1409. this->instrLast = instrClone;
  1410. }
  1411. void
  1412. Cloner::Finish()
  1413. {
  1414. this->RetargetClonedBranches();
  1415. if (this->lowerer)
  1416. {
  1417. lowerer->LowerRange(this->instrFirst, this->instrLast, false, false);
  1418. }
  1419. }
  1420. void
  1421. Cloner::RetargetClonedBranches()
  1422. {
  1423. if (!this->fRetargetClonedBranch)
  1424. {
  1425. return;
  1426. }
  1427. FOREACH_INSTR_IN_RANGE(instr, this->instrFirst, this->instrLast)
  1428. {
  1429. if (instr->IsBranchInstr())
  1430. {
  1431. instr->AsBranchInstr()->RetargetClonedBranch();
  1432. }
  1433. }
  1434. NEXT_INSTR_IN_RANGE;
  1435. }
  1436. IR::IndirOpnd * Func::GetConstantAddressIndirOpnd(intptr_t address, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode)
  1437. {
  1438. Assert(this->GetTopFunc() == this);
  1439. if (!canHoistConstantAddressLoad)
  1440. {
  1441. // We can't hoist constant address load after lower, as we can't mark the sym as
  1442. // live on back edge
  1443. return nullptr;
  1444. }
  1445. int offset = 0;
  1446. IR::RegOpnd ** foundRegOpnd = this->constantAddressRegOpnd.Find([address, &offset](IR::RegOpnd * regOpnd)
  1447. {
  1448. Assert(regOpnd->m_sym->IsSingleDef());
  1449. void * curr = regOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address;
  1450. ptrdiff_t diff = (intptr_t)address - (intptr_t)curr;
  1451. if (!Math::FitsInDWord(diff))
  1452. {
  1453. return false;
  1454. }
  1455. offset = (int)diff;
  1456. return true;
  1457. });
  1458. IR::RegOpnd * addressRegOpnd;
  1459. if (foundRegOpnd != nullptr)
  1460. {
  1461. addressRegOpnd = *foundRegOpnd;
  1462. }
  1463. else
  1464. {
  1465. Assert(offset == 0);
  1466. addressRegOpnd = IR::RegOpnd::New(TyMachPtr, this);
  1467. IR::Instr *const newInstr =
  1468. IR::Instr::New(
  1469. loadOpCode,
  1470. addressRegOpnd,
  1471. IR::AddrOpnd::New(address, kind, this, true),
  1472. this);
  1473. this->constantAddressRegOpnd.Prepend(addressRegOpnd);
  1474. IR::Instr * insertBeforeInstr = this->lastConstantAddressRegLoadInstr;
  1475. if (insertBeforeInstr == nullptr)
  1476. {
  1477. insertBeforeInstr = this->GetFunctionEntryInsertionPoint();
  1478. this->lastConstantAddressRegLoadInstr = newInstr;
  1479. }
  1480. insertBeforeInstr->InsertBefore(newInstr);
  1481. }
  1482. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(addressRegOpnd, offset, type, this, true);
  1483. #if DBG_DUMP
  1484. // TODO: michhol make intptr_t
  1485. indirOpnd->SetAddrKind(kind, (void*)address);
  1486. #endif
  1487. return indirOpnd;
  1488. }
  1489. void Func::MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv)
  1490. {
  1491. Assert(this->GetTopFunc() == this);
  1492. this->constantAddressRegOpnd.Iterate([bv](IR::RegOpnd * regOpnd)
  1493. {
  1494. bv->Set(regOpnd->m_sym->m_id);
  1495. });
  1496. }
  1497. IR::Instr *
  1498. Func::GetFunctionEntryInsertionPoint()
  1499. {
  1500. Assert(this->GetTopFunc() == this);
  1501. IR::Instr * insertInsert = this->lastConstantAddressRegLoadInstr;
  1502. if (insertInsert != nullptr)
  1503. {
  1504. return insertInsert->m_next;
  1505. }
  1506. insertInsert = this->m_headInstr;
  1507. if (this->HasTry())
  1508. {
  1509. // Insert it inside the root region
  1510. insertInsert = insertInsert->m_next;
  1511. Assert(insertInsert->IsLabelInstr() && insertInsert->AsLabelInstr()->GetRegion()->GetType() == RegionTypeRoot);
  1512. }
  1513. return insertInsert->m_next;
  1514. }
  1515. Js::Var
  1516. Func::AllocateNumber(double value)
  1517. {
  1518. Js::Var number = nullptr;
  1519. #if FLOATVAR
  1520. number = Js::JavascriptNumber::NewCodeGenInstance(GetNumberAllocator(), (double)value, GetScriptContext());
  1521. #else
  1522. if (!IsOOPJIT()) // in-proc jit
  1523. {
  1524. number = Js::JavascriptNumber::NewCodeGenInstance(GetNumberAllocator(), (double)value, GetScriptContext());
  1525. }
  1526. else // OOP JIT
  1527. {
  1528. GetXProcNumberAllocator()->AllocateNumber(this->GetThreadContextInfo()->GetProcessHandle(),
  1529. value,
  1530. (Js::StaticType*)this->GetScriptContextInfo()->GetNumberTypeStaticAddr(),
  1531. (void*)this->GetScriptContextInfo()->GetVTableAddress(VTableValue::VtableJavascriptNumber));
  1532. }
  1533. #endif
  1534. return number;
  1535. }
  1536. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1537. void
  1538. Func::DumpFullFunctionName()
  1539. {
  1540. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1541. Output::Print(L"Function %s (%s)", GetJITFunctionBody()->GetDisplayName(), GetDebugNumberSet(debugStringBuffer));
  1542. }
  1543. #endif
  1544. #if DBG_DUMP
  1545. ///----------------------------------------------------------------------------
  1546. ///
  1547. /// Func::DumpHeader
  1548. ///
  1549. ///----------------------------------------------------------------------------
  1550. void
  1551. Func::DumpHeader()
  1552. {
  1553. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1554. DumpFullFunctionName();
  1555. Output::SkipToColumn(50);
  1556. Output::Print(_u("Instr Count:%d"), GetInstrCount());
  1557. if(m_codeSize > 0)
  1558. {
  1559. Output::Print(_u("\t\tSize:%d\n\n"), m_codeSize);
  1560. }
  1561. else
  1562. {
  1563. Output::Print(_u("\n\n"));
  1564. }
  1565. }
  1566. ///----------------------------------------------------------------------------
  1567. ///
  1568. /// Func::Dump
  1569. ///
  1570. ///----------------------------------------------------------------------------
  1571. void
  1572. Func::Dump(IRDumpFlags flags)
  1573. {
  1574. this->DumpHeader();
  1575. FOREACH_INSTR_IN_FUNC(instr, this)
  1576. {
  1577. instr->DumpGlobOptInstrString();
  1578. instr->Dump(flags);
  1579. }NEXT_INSTR_IN_FUNC;
  1580. Output::Flush();
  1581. }
  1582. void
  1583. Func::Dump()
  1584. {
  1585. this->Dump(IRDumpFlags_None);
  1586. }
  1587. #endif
  1588. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  1589. LPCSTR
  1590. Func::GetVtableName(INT_PTR address)
  1591. {
  1592. #if DBG
  1593. if (vtableMap == nullptr)
  1594. {
  1595. vtableMap = VirtualTableRegistry::CreateVtableHashMap(this->m_alloc);
  1596. };
  1597. LPCSTR name = vtableMap->Lookup(address, nullptr);
  1598. if (name)
  1599. {
  1600. if (strncmp(name, "class ", _countof("class ") - 1) == 0)
  1601. {
  1602. name += _countof("class ") - 1;
  1603. }
  1604. }
  1605. return name;
  1606. #else
  1607. return "";
  1608. #endif
  1609. }
  1610. #endif
  1611. #if DBG_DUMP | defined(VTUNE_PROFILING)
  1612. bool Func::DoRecordNativeMap() const
  1613. {
  1614. #if defined(VTUNE_PROFILING)
  1615. if (VTuneChakraProfile::isJitProfilingActive)
  1616. {
  1617. return true;
  1618. }
  1619. #endif
  1620. #if DBG_DUMP
  1621. return PHASE_DUMP(Js::EncoderPhase, this) && Js::Configuration::Global.flags.Verbose;
  1622. #else
  1623. return false;
  1624. #endif
  1625. }
  1626. #endif