2
0

Func.cpp 64 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Base/EtwTrace.h"
  7. #include "Base/ScriptContextProfiler.h"
  8. #ifdef VTUNE_PROFILING
  9. #include "Base/VTuneChakraProfile.h"
  10. #endif
  11. #include "Library/ForInObjectEnumerator.h"
  12. Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  13. ThreadContextInfo * threadContextInfo,
  14. ScriptContextInfo * scriptContextInfo,
  15. JITOutputIDL * outputData,
  16. Js::EntryPointInfo* epInfo,
  17. const FunctionJITRuntimeInfo *const runtimeInfo,
  18. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  19. #if !FLOATVAR
  20. CodeGenNumberAllocator * numberAllocator,
  21. #endif
  22. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc,
  23. uint postCallByteCodeOffset, Js::RegSlot returnValueRegSlot, const bool isInlinedConstructor,
  24. Js::ProfileId callSiteIdInParentFunc, bool isGetterSetter) :
  25. m_alloc(alloc),
  26. m_workItem(workItem),
  27. m_output(outputData),
  28. m_entryPointInfo(epInfo),
  29. m_threadContextInfo(threadContextInfo),
  30. m_scriptContextInfo(scriptContextInfo),
  31. m_runtimeInfo(runtimeInfo),
  32. m_polymorphicInlineCacheInfo(polymorphicInlineCacheInfo),
  33. m_codeGenAllocators(codeGenAllocators),
  34. m_inlineeId(0),
  35. pinnedTypeRefs(nullptr),
  36. singleTypeGuards(nullptr),
  37. equivalentTypeGuards(nullptr),
  38. propertyGuardsByPropertyId(nullptr),
  39. ctorCachesByPropertyId(nullptr),
  40. callSiteToArgumentsOffsetFixupMap(nullptr),
  41. indexedPropertyGuardCount(0),
  42. propertiesWrittenTo(nullptr),
  43. lazyBailoutProperties(alloc),
  44. anyPropertyMayBeWrittenTo(false),
  45. #ifdef PROFILE_EXEC
  46. m_codeGenProfiler(codeGenProfiler),
  47. #endif
  48. m_isBackgroundJIT(isBackgroundJIT),
  49. m_cloner(nullptr),
  50. m_cloneMap(nullptr),
  51. m_loopParamSym(nullptr),
  52. m_funcObjSym(nullptr),
  53. m_localClosureSym(nullptr),
  54. m_paramClosureSym(nullptr),
  55. m_localFrameDisplaySym(nullptr),
  56. m_bailoutReturnValueSym(nullptr),
  57. m_hasBailedOutSym(nullptr),
  58. m_inlineeFrameStartSym(nullptr),
  59. inlineeStart(nullptr),
  60. m_regsUsed(0),
  61. m_fg(nullptr),
  62. m_labelCount(0),
  63. m_argSlotsForFunctionsCalled(0),
  64. m_hasCalls(false),
  65. m_hasInlineArgsOpt(false),
  66. m_canDoInlineArgsOpt(true),
  67. unoptimizableArgumentsObjReference(0),
  68. m_doFastPaths(false),
  69. hasBailout(false),
  70. firstIRTemp(0),
  71. hasBailoutInEHRegion(false),
  72. hasInstrNumber(false),
  73. maintainByteCodeOffset(true),
  74. frameSize(0),
  75. topFunc(parentFunc ? parentFunc->topFunc : this),
  76. parentFunc(parentFunc),
  77. argObjSyms(nullptr),
  78. m_nonTempLocalVars(nullptr),
  79. hasAnyStackNestedFunc(false),
  80. hasMarkTempObjects(false),
  81. postCallByteCodeOffset(postCallByteCodeOffset),
  82. maxInlineeArgOutSize(0),
  83. returnValueRegSlot(returnValueRegSlot),
  84. firstActualStackOffset(-1),
  85. m_localVarSlotsOffset(Js::Constants::InvalidOffset),
  86. m_hasLocalVarChangedOffset(Js::Constants::InvalidOffset),
  87. actualCount((Js::ArgSlot) - 1),
  88. tryCatchNestingLevel(0),
  89. m_localStackHeight(0),
  90. tempSymDouble(nullptr),
  91. tempSymBool(nullptr),
  92. hasInlinee(false),
  93. thisOrParentInlinerHasArguments(false),
  94. hasStackArgs(false),
  95. hasArgLenAndConstOpt(false),
  96. hasImplicitParamLoad(false),
  97. hasThrow(false),
  98. hasNonSimpleParams(false),
  99. hasUnoptimizedArgumentsAccess(false),
  100. applyTargetInliningRemovedArgumentsAccess(false),
  101. hasImplicitCalls(false),
  102. hasTempObjectProducingInstr(false),
  103. isInlinedConstructor(isInlinedConstructor),
  104. #if !FLOATVAR
  105. numberAllocator(numberAllocator),
  106. #endif
  107. loopCount(0),
  108. callSiteIdInParentFunc(callSiteIdInParentFunc),
  109. isGetterSetter(isGetterSetter),
  110. cachedInlineeFrameInfo(nullptr),
  111. frameInfo(nullptr),
  112. isTJLoopBody(false),
  113. m_nativeCodeDataSym(nullptr),
  114. isFlowGraphValid(false),
  115. legalizePostRegAlloc(false),
  116. #if DBG
  117. m_callSiteCount(0),
  118. #endif
  119. stackNestedFunc(false),
  120. stackClosure(false)
  121. #if defined(_M_ARM32_OR_ARM64)
  122. , m_ArgumentsOffset(0)
  123. , m_epilogLabel(nullptr)
  124. #endif
  125. , m_funcStartLabel(nullptr)
  126. , m_funcEndLabel(nullptr)
  127. #if DBG
  128. , hasCalledSetDoFastPaths(false)
  129. , allowRemoveBailOutArgInstr(false)
  130. , currentPhases(alloc)
  131. , isPostLower(false)
  132. , isPostRegAlloc(false)
  133. , isPostPeeps(false)
  134. , isPostLayout(false)
  135. , isPostFinalLower(false)
  136. , vtableMap(nullptr)
  137. #endif
  138. , m_yieldOffsetResumeLabelList(nullptr)
  139. , m_bailOutNoSaveLabel(nullptr)
  140. , constantAddressRegOpnd(alloc)
  141. , lastConstantAddressRegLoadInstr(nullptr)
  142. , m_totalJumpTableSizeInBytesForSwitchStatements(0)
  143. , slotArrayCheckTable(nullptr)
  144. , frameDisplayCheckTable(nullptr)
  145. , stackArgWithFormalsTracker(nullptr)
  146. , m_forInLoopBaseDepth(0)
  147. , m_forInEnumeratorArrayOffset(-1)
  148. , argInsCount(0)
  149. , m_globalObjTypeSpecFldInfoArray(nullptr)
  150. #if LOWER_SPLIT_INT64
  151. , m_int64SymPairMap(nullptr)
  152. #endif
  153. #ifdef RECYCLER_WRITE_BARRIER_JIT
  154. , m_lowerer(nullptr)
  155. #endif
  156. {
  157. Assert(this->IsInlined() == !!runtimeInfo);
  158. AssertOrFailFast(!HasProfileInfo() || GetReadOnlyProfileInfo()->GetLoopCount() == GetJITFunctionBody()->GetLoopCount());
  159. Js::RegSlot tmpResult;
  160. AssertOrFailFast(!UInt32Math::Add(GetJITFunctionBody()->GetConstCount(), GetJITFunctionBody()->GetVarCount(), &tmpResult));
  161. AssertOrFailFast(GetJITFunctionBody()->IsAsmJsMode() || GetJITFunctionBody()->GetFirstTmpReg() <= GetJITFunctionBody()->GetLocalsCount());
  162. AssertOrFailFast(!IsLoopBody() || m_workItem->GetLoopNumber() < GetJITFunctionBody()->GetLoopCount());
  163. AssertOrFailFast(CONFIG_FLAG(Prejit) || CONFIG_ISENABLED(Js::ForceNativeFlag) || GetJITFunctionBody()->GetByteCodeLength() < (uint)CONFIG_FLAG(MaxJITFunctionBytecodeByteLength));
  164. GetJITFunctionBody()->EnsureConsistentConstCount();
  165. if (this->IsTopFunc())
  166. {
  167. outputData->hasJittedStackClosure = false;
  168. outputData->localVarSlotsOffset = m_localVarSlotsOffset;
  169. outputData->localVarChangedOffset = m_hasLocalVarChangedOffset;
  170. }
  171. if (this->IsInlined())
  172. {
  173. m_inlineeId = ++(GetTopFunc()->m_inlineeId);
  174. }
  175. bool doStackNestedFunc = GetJITFunctionBody()->DoStackNestedFunc();
  176. bool doStackClosure = GetJITFunctionBody()->DoStackClosure() && !PHASE_OFF(Js::FrameDisplayFastPathPhase, this) && !PHASE_OFF(Js::StackClosurePhase, this);
  177. Assert(!doStackClosure || doStackNestedFunc);
  178. this->stackClosure = doStackClosure && this->IsTopFunc();
  179. if (this->stackClosure)
  180. {
  181. // TODO: calculate on runtime side?
  182. m_output.SetHasJITStackClosure();
  183. }
  184. if (m_workItem->Type() == JsFunctionType &&
  185. GetJITFunctionBody()->DoBackendArgumentsOptimization() &&
  186. (!GetJITFunctionBody()->HasTry() || this->DoOptimizeTry()))
  187. {
  188. // doBackendArgumentsOptimization bit is set when there is no eval inside a function
  189. // as determined by the bytecode generator.
  190. SetHasStackArgs(true);
  191. }
  192. if (doStackNestedFunc && GetJITFunctionBody()->GetNestedCount() != 0 &&
  193. (this->IsTopFunc() || this->GetTopFunc()->m_workItem->Type() != JsLoopBodyWorkItemType)) // make sure none of the functions inlined in a jitted loop body allocate nested functions on the stack
  194. {
  195. Assert(!(this->IsJitInDebugMode() && !GetJITFunctionBody()->IsLibraryCode()));
  196. stackNestedFunc = true;
  197. this->GetTopFunc()->hasAnyStackNestedFunc = true;
  198. }
  199. if (GetJITFunctionBody()->HasOrParentHasArguments() || (parentFunc && parentFunc->thisOrParentInlinerHasArguments))
  200. {
  201. thisOrParentInlinerHasArguments = true;
  202. }
  203. if (parentFunc == nullptr)
  204. {
  205. inlineDepth = 0;
  206. m_symTable = JitAnew(alloc, SymTable);
  207. m_symTable->Init(this);
  208. m_symTable->SetStartingID(static_cast<SymID>(workItem->GetJITFunctionBody()->GetLocalsCount() + 1));
  209. Assert(Js::Constants::NoByteCodeOffset == postCallByteCodeOffset);
  210. Assert(Js::Constants::NoRegister == returnValueRegSlot);
  211. #if defined(_M_IX86) || defined(_M_X64)
  212. if (HasArgumentSlot())
  213. {
  214. // Pre-allocate the single argument slot we'll reserve for the arguments object.
  215. // For ARM, the argument slot is not part of the local but part of the register saves
  216. m_localStackHeight = MachArgsSlotOffset;
  217. }
  218. #endif
  219. }
  220. else
  221. {
  222. inlineDepth = parentFunc->inlineDepth + 1;
  223. Assert(Js::Constants::NoByteCodeOffset != postCallByteCodeOffset);
  224. }
  225. this->constructorCacheCount = 0;
  226. this->constructorCaches = AnewArrayZ(this->m_alloc, JITTimeConstructorCache*, GetJITFunctionBody()->GetProfiledCallSiteCount());
  227. #if DBG_DUMP
  228. m_codeSize = -1;
  229. #endif
  230. #if defined(_M_X64)
  231. m_spillSize = -1;
  232. m_argsSize = -1;
  233. m_savedRegSize = -1;
  234. #endif
  235. if (this->IsJitInDebugMode())
  236. {
  237. m_nonTempLocalVars = Anew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  238. }
  239. if (GetJITFunctionBody()->IsCoroutine())
  240. {
  241. m_yieldOffsetResumeLabelList = YieldOffsetResumeLabelList::New(this->m_alloc);
  242. }
  243. if (this->IsTopFunc())
  244. {
  245. m_globalObjTypeSpecFldInfoArray = JitAnewArrayZ(this->m_alloc, ObjTypeSpecFldInfo*, GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  246. }
  247. for (uint i = 0; i < GetJITFunctionBody()->GetInlineCacheCount(); ++i)
  248. {
  249. ObjTypeSpecFldInfo * info = GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(i);
  250. if (info != nullptr)
  251. {
  252. AssertOrFailFast(info->GetObjTypeSpecFldId() < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  253. GetTopFunc()->m_globalObjTypeSpecFldInfoArray[info->GetObjTypeSpecFldId()] = info;
  254. }
  255. }
  256. canHoistConstantAddressLoad = !PHASE_OFF(Js::HoistConstAddrPhase, this);
  257. m_forInLoopMaxDepth = this->GetJITFunctionBody()->GetForInLoopDepth();
  258. }
  259. bool
  260. Func::IsLoopBodyInTry() const
  261. {
  262. return IsLoopBody() && m_workItem->GetLoopHeader()->isInTry;
  263. }
  264. bool
  265. Func::IsLoopBodyInTryFinally() const
  266. {
  267. return IsLoopBody() && m_workItem->GetLoopHeader()->isInTryFinally;
  268. }
  269. /* static */
  270. void
  271. Func::Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
  272. ThreadContextInfo * threadContextInfo,
  273. ScriptContextInfo * scriptContextInfo,
  274. JITOutputIDL * outputData,
  275. Js::EntryPointInfo* epInfo, // for in-proc jit only
  276. const FunctionJITRuntimeInfo *const runtimeInfo,
  277. JITTimePolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, void * const codeGenAllocators,
  278. #if !FLOATVAR
  279. CodeGenNumberAllocator * numberAllocator,
  280. #endif
  281. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT)
  282. {
  283. bool rejit;
  284. int rejitCounter = 0;
  285. do
  286. {
  287. Assert(rejitCounter < 25);
  288. Func func(alloc, workItem, threadContextInfo,
  289. scriptContextInfo, outputData, epInfo, runtimeInfo,
  290. polymorphicInlineCacheInfo, codeGenAllocators,
  291. #if !FLOATVAR
  292. numberAllocator,
  293. #endif
  294. codeGenProfiler, isBackgroundJIT);
  295. try
  296. {
  297. func.TryCodegen();
  298. rejit = false;
  299. }
  300. catch (Js::RejitException ex)
  301. {
  302. // The work item needs to be rejitted, likely due to some optimization that was too aggressive
  303. switch (ex.Reason())
  304. {
  305. case RejitReason::AggressiveIntTypeSpecDisabled:
  306. outputData->disableAggressiveIntTypeSpec = TRUE;
  307. break;
  308. case RejitReason::InlineApplyDisabled:
  309. workItem->GetJITFunctionBody()->DisableInlineApply();
  310. outputData->disableInlineApply = TRUE;
  311. break;
  312. case RejitReason::InlineSpreadDisabled:
  313. workItem->GetJITFunctionBody()->DisableInlineSpread();
  314. outputData->disableInlineSpread = TRUE;
  315. break;
  316. case RejitReason::DisableStackArgOpt:
  317. outputData->disableStackArgOpt = TRUE;
  318. break;
  319. case RejitReason::DisableStackArgLenAndConstOpt:
  320. break;
  321. case RejitReason::DisableSwitchOptExpectingInteger:
  322. case RejitReason::DisableSwitchOptExpectingString:
  323. outputData->disableSwitchOpt = TRUE;
  324. break;
  325. case RejitReason::ArrayCheckHoistDisabled:
  326. case RejitReason::ArrayAccessHelperCallEliminationDisabled:
  327. outputData->disableArrayCheckHoist = TRUE;
  328. break;
  329. case RejitReason::TrackIntOverflowDisabled:
  330. outputData->disableTrackCompoundedIntOverflow = TRUE;
  331. break;
  332. default:
  333. Assume(UNREACHED);
  334. }
  335. if (PHASE_TRACE(Js::ReJITPhase, &func))
  336. {
  337. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  338. Output::Print(
  339. _u("Rejit (compile-time): function: %s (%s) reason: %S\n"),
  340. workItem->GetJITFunctionBody()->GetDisplayName(),
  341. workItem->GetJITTimeInfo()->GetDebugNumberSet(debugStringBuffer),
  342. ex.ReasonName());
  343. }
  344. rejit = true;
  345. rejitCounter++;
  346. }
  347. // Either the entry point has a reference to the number now, or we failed to code gen and we
  348. // don't need to numbers, we can flush the completed page now.
  349. //
  350. // If the number allocator is NULL then we are shutting down the thread context and so too the
  351. // code generator. The number allocator must be freed before the recycler (and thus before the
  352. // code generator) so we can't and don't need to flush it.
  353. // TODO: OOP JIT, allocator cleanup
  354. } while (rejit);
  355. }
  356. ///----------------------------------------------------------------------------
  357. ///
  358. /// Func::TryCodegen
  359. ///
  360. /// Attempt to Codegen this function.
  361. ///
  362. ///----------------------------------------------------------------------------
  363. void
  364. Func::TryCodegen()
  365. {
  366. Assert(!IsJitInDebugMode() || !GetJITFunctionBody()->HasTry());
  367. BEGIN_CODEGEN_PHASE(this, Js::BackEndPhase);
  368. {
  369. // IRBuilder
  370. BEGIN_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  371. #ifdef ASMJS_PLAT
  372. if (GetJITFunctionBody()->IsAsmJsMode())
  373. {
  374. IRBuilderAsmJs asmIrBuilder(this);
  375. asmIrBuilder.Build();
  376. }
  377. else
  378. #endif
  379. {
  380. IRBuilder irBuilder(this);
  381. irBuilder.Build();
  382. }
  383. END_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  384. #ifdef IR_VIEWER
  385. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::IRBuilderPhase);
  386. #endif /* IR_VIEWER */
  387. BEGIN_CODEGEN_PHASE(this, Js::InlinePhase);
  388. InliningHeuristics heuristics(GetWorkItem()->GetJITTimeInfo(), this->IsLoopBody());
  389. Inline inliner(this, heuristics);
  390. inliner.Optimize();
  391. END_CODEGEN_PHASE(this, Js::InlinePhase);
  392. ThrowIfScriptClosed();
  393. // FlowGraph
  394. {
  395. // Scope for FlowGraph arena
  396. NoRecoverMemoryJitArenaAllocator fgAlloc(_u("BE-FlowGraph"), m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  397. BEGIN_CODEGEN_PHASE(this, Js::FGBuildPhase);
  398. this->m_fg = FlowGraph::New(this, &fgAlloc);
  399. this->m_fg->Build();
  400. END_CODEGEN_PHASE(this, Js::FGBuildPhase);
  401. // Global Optimization and Type Specialization
  402. BEGIN_CODEGEN_PHASE(this, Js::GlobOptPhase);
  403. GlobOpt globOpt(this);
  404. globOpt.Optimize();
  405. END_CODEGEN_PHASE(this, Js::GlobOptPhase);
  406. // Delete flowGraph now
  407. this->m_fg->Destroy();
  408. this->m_fg = nullptr;
  409. }
  410. #ifdef IR_VIEWER
  411. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::GlobOptPhase);
  412. #endif /* IR_VIEWER */
  413. ThrowIfScriptClosed();
  414. // Lowering
  415. Lowerer lowerer(this);
  416. BEGIN_CODEGEN_PHASE(this, Js::LowererPhase);
  417. lowerer.Lower();
  418. END_CODEGEN_PHASE(this, Js::LowererPhase);
  419. #ifdef IR_VIEWER
  420. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::LowererPhase);
  421. #endif /* IR_VIEWER */
  422. // Encode constants
  423. Security security(this);
  424. BEGIN_CODEGEN_PHASE(this, Js::EncodeConstantsPhase)
  425. security.EncodeLargeConstants();
  426. END_CODEGEN_PHASE(this, Js::EncodeConstantsPhase);
  427. if (GetJITFunctionBody()->DoInterruptProbe())
  428. {
  429. BEGIN_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  430. lowerer.DoInterruptProbes();
  431. END_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  432. }
  433. // Register Allocation
  434. BEGIN_CODEGEN_PHASE(this, Js::RegAllocPhase);
  435. LinearScan linearScan(this);
  436. linearScan.RegAlloc();
  437. END_CODEGEN_PHASE(this, Js::RegAllocPhase);
  438. #ifdef IR_VIEWER
  439. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::RegAllocPhase);
  440. #endif /* IR_VIEWER */
  441. ThrowIfScriptClosed();
  442. // Peephole optimizations
  443. BEGIN_CODEGEN_PHASE(this, Js::PeepsPhase);
  444. Peeps peeps(this);
  445. peeps.PeepFunc();
  446. END_CODEGEN_PHASE(this, Js::PeepsPhase);
  447. // Layout
  448. BEGIN_CODEGEN_PHASE(this, Js::LayoutPhase);
  449. SimpleLayout layout(this);
  450. layout.Layout();
  451. END_CODEGEN_PHASE(this, Js::LayoutPhase);
  452. if (this->HasTry() && this->hasBailoutInEHRegion)
  453. {
  454. BEGIN_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  455. lowerer.EHBailoutPatchUp();
  456. END_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  457. }
  458. // Insert NOPs (moving this before prolog/epilog for AMD64 and possibly ARM).
  459. BEGIN_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  460. security.InsertNOPs();
  461. END_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  462. // Prolog/Epilog
  463. BEGIN_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  464. if (GetJITFunctionBody()->IsAsmJsMode())
  465. {
  466. lowerer.LowerPrologEpilogAsmJs();
  467. }
  468. else
  469. {
  470. lowerer.LowerPrologEpilog();
  471. }
  472. END_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  473. BEGIN_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  474. lowerer.FinalLower();
  475. END_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  476. // Encoder
  477. BEGIN_CODEGEN_PHASE(this, Js::EncoderPhase);
  478. Encoder encoder(this);
  479. encoder.Encode();
  480. END_CODEGEN_PHASE_NO_DUMP(this, Js::EncoderPhase);
  481. #ifdef IR_VIEWER
  482. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::EncoderPhase);
  483. #endif /* IR_VIEWER */
  484. }
  485. #if DBG_DUMP
  486. if (Js::Configuration::Global.flags.IsEnabled(Js::AsmDumpModeFlag))
  487. {
  488. FILE * oldFile = 0;
  489. FILE * asmFile = GetScriptContext()->GetNativeCodeGenerator()->asmFile;
  490. if (asmFile)
  491. {
  492. oldFile = Output::SetFile(asmFile);
  493. }
  494. this->Dump(IRDumpFlags_AsmDumpMode);
  495. Output::Flush();
  496. if (asmFile)
  497. {
  498. FILE *openedFile = Output::SetFile(oldFile);
  499. Assert(openedFile == asmFile);
  500. }
  501. }
  502. #endif
  503. if (this->IsOOPJIT())
  504. {
  505. BEGIN_CODEGEN_PHASE(this, Js::NativeCodeDataPhase);
  506. auto dataAllocator = this->GetNativeCodeDataAllocator();
  507. if (dataAllocator->allocCount > 0)
  508. {
  509. NativeCodeData::DataChunk *chunk = (NativeCodeData::DataChunk*)dataAllocator->chunkList;
  510. NativeCodeData::DataChunk *next1 = chunk;
  511. while (next1)
  512. {
  513. if (next1->fixupFunc)
  514. {
  515. next1->fixupFunc(next1->data, chunk);
  516. }
  517. #if DBG
  518. if (CONFIG_FLAG(OOPJITFixupValidate))
  519. {
  520. // Scan memory to see if there's missing pointer needs to be fixed up
  521. // This can hit false positive if some data field happens to have value
  522. // falls into the NativeCodeData memory range.
  523. NativeCodeData::DataChunk *next2 = chunk;
  524. while (next2)
  525. {
  526. for (unsigned int i = 0; i < next1->len / sizeof(void*); i++)
  527. {
  528. if (((void**)next1->data)[i] == (void*)next2->data)
  529. {
  530. NativeCodeData::VerifyExistFixupEntry((void*)next2->data, &((void**)next1->data)[i], next1->data);
  531. }
  532. }
  533. next2 = next2->next;
  534. }
  535. }
  536. #endif
  537. next1 = next1->next;
  538. }
  539. JITOutputIDL* jitOutputData = m_output.GetOutputData();
  540. size_t allocSize = offsetof(NativeDataFixupTable, fixupRecords) + sizeof(NativeDataFixupRecord)* (dataAllocator->allocCount);
  541. jitOutputData->nativeDataFixupTable = (NativeDataFixupTable*)midl_user_allocate(allocSize);
  542. if (!jitOutputData->nativeDataFixupTable)
  543. {
  544. Js::Throw::OutOfMemory();
  545. }
  546. __analysis_assume(jitOutputData->nativeDataFixupTable);
  547. jitOutputData->nativeDataFixupTable->count = dataAllocator->allocCount;
  548. jitOutputData->buffer = (NativeDataBuffer*)midl_user_allocate(offsetof(NativeDataBuffer, data) + dataAllocator->totalSize);
  549. if (!jitOutputData->buffer)
  550. {
  551. Js::Throw::OutOfMemory();
  552. }
  553. __analysis_assume(jitOutputData->buffer);
  554. jitOutputData->buffer->len = dataAllocator->totalSize;
  555. unsigned int len = 0;
  556. unsigned int count = 0;
  557. next1 = chunk;
  558. while (next1)
  559. {
  560. memcpy(jitOutputData->buffer->data + len, next1->data, next1->len);
  561. len += next1->len;
  562. jitOutputData->nativeDataFixupTable->fixupRecords[count].index = next1->allocIndex;
  563. jitOutputData->nativeDataFixupTable->fixupRecords[count].length = next1->len;
  564. jitOutputData->nativeDataFixupTable->fixupRecords[count].startOffset = next1->offset;
  565. jitOutputData->nativeDataFixupTable->fixupRecords[count].updateList = next1->fixupList;
  566. count++;
  567. next1 = next1->next;
  568. }
  569. #if DBG
  570. if (PHASE_TRACE1(Js::NativeCodeDataPhase))
  571. {
  572. Output::Print(_u("NativeCodeData Server Buffer: %p, len: %x, chunk head: %p\n"), jitOutputData->buffer->data, jitOutputData->buffer->len, chunk);
  573. }
  574. #endif
  575. }
  576. END_CODEGEN_PHASE(this, Js::NativeCodeDataPhase);
  577. }
  578. END_CODEGEN_PHASE(this, Js::BackEndPhase);
  579. }
  580. ///----------------------------------------------------------------------------
  581. /// Func::StackAllocate
  582. /// Allocate stack space of given size.
  583. ///----------------------------------------------------------------------------
  584. int32
  585. Func::StackAllocate(int size)
  586. {
  587. Assert(this->IsTopFunc());
  588. int32 offset;
  589. #ifdef MD_GROW_LOCALS_AREA_UP
  590. // Locals have positive offsets and are allocated from bottom to top.
  591. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  592. offset = m_localStackHeight;
  593. m_localStackHeight += size;
  594. #else
  595. // Locals have negative offsets and are allocated from top to bottom.
  596. m_localStackHeight += size;
  597. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  598. offset = -m_localStackHeight;
  599. #endif
  600. return offset;
  601. }
  602. ///----------------------------------------------------------------------------
  603. ///
  604. /// Func::StackAllocate
  605. ///
  606. /// Allocate stack space for this symbol.
  607. ///
  608. ///----------------------------------------------------------------------------
  609. int32
  610. Func::StackAllocate(StackSym *stackSym, int size)
  611. {
  612. Assert(size > 0);
  613. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym() || stackSym->IsAllocated())
  614. {
  615. return stackSym->m_offset;
  616. }
  617. Assert(stackSym->m_offset == 0);
  618. stackSym->m_allocated = true;
  619. stackSym->m_offset = StackAllocate(size);
  620. return stackSym->m_offset;
  621. }
  622. void
  623. Func::SetArgOffset(StackSym *stackSym, int32 offset)
  624. {
  625. AssertMsg(offset >= 0, "Why is the offset, negative?");
  626. stackSym->m_offset = offset;
  627. stackSym->m_allocated = true;
  628. }
  629. ///
  630. /// Ensures that local var slots are created, if the function has locals.
  631. /// Allocate stack space for locals used for debugging
  632. /// (for local non-temp vars we write-through memory so that locals inspection can make use of that.).
  633. // On stack, after local slots we allocate space for metadata (in particular, whether any the locals was changed in debugger).
  634. ///
  635. void
  636. Func::EnsureLocalVarSlots()
  637. {
  638. Assert(IsJitInDebugMode());
  639. if (!this->HasLocalVarSlotCreated())
  640. {
  641. uint32 localSlotCount = GetJITFunctionBody()->GetNonTempLocalVarCount();
  642. if (localSlotCount && m_localVarSlotsOffset == Js::Constants::InvalidOffset)
  643. {
  644. // Allocate the slots.
  645. int32 size = localSlotCount * GetDiagLocalSlotSize();
  646. m_localVarSlotsOffset = StackAllocate(size);
  647. m_hasLocalVarChangedOffset = StackAllocate(max(1, MachStackAlignment)); // Can't alloc less than StackAlignment bytes.
  648. Assert(m_workItem->Type() == JsFunctionType);
  649. m_output.SetVarSlotsOffset(AdjustOffsetValue(m_localVarSlotsOffset));
  650. m_output.SetVarChangedOffset(AdjustOffsetValue(m_hasLocalVarChangedOffset));
  651. }
  652. }
  653. }
  654. void Func::SetFirstArgOffset(IR::Instr* inlineeStart)
  655. {
  656. Assert(inlineeStart->m_func == this);
  657. Assert(!IsTopFunc());
  658. int32 lastOffset;
  659. IR::Instr* arg = inlineeStart->GetNextArg();
  660. if (arg)
  661. {
  662. const auto lastArgOutStackSym = arg->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  663. lastOffset = lastArgOutStackSym->m_offset;
  664. Assert(lastArgOutStackSym->m_isSingleDef);
  665. const auto secondLastArgOutOpnd = lastArgOutStackSym->m_instrDef->GetSrc2();
  666. if (secondLastArgOutOpnd->IsSymOpnd())
  667. {
  668. const auto secondLastOffset = secondLastArgOutOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_offset;
  669. if (secondLastOffset > lastOffset)
  670. {
  671. lastOffset = secondLastOffset;
  672. }
  673. }
  674. lastOffset += MachPtr;
  675. }
  676. else
  677. {
  678. Assert(this->GetTopFunc()->GetJITFunctionBody()->IsAsmJsMode());
  679. lastOffset = MachPtr;
  680. }
  681. int32 firstActualStackOffset = lastOffset - ((this->actualCount + Js::Constants::InlineeMetaArgCount) * MachPtr);
  682. Assert((this->firstActualStackOffset == -1) || (this->firstActualStackOffset == firstActualStackOffset));
  683. this->firstActualStackOffset = firstActualStackOffset;
  684. }
  685. int32
  686. Func::GetLocalVarSlotOffset(int32 slotId)
  687. {
  688. this->EnsureLocalVarSlots();
  689. Assert(m_localVarSlotsOffset != Js::Constants::InvalidOffset);
  690. int32 slotOffset = slotId * GetDiagLocalSlotSize();
  691. return m_localVarSlotsOffset + slotOffset;
  692. }
  693. void Func::OnAddSym(Sym* sym)
  694. {
  695. Assert(sym);
  696. if (this->IsJitInDebugMode() && this->IsNonTempLocalVar(sym->m_id))
  697. {
  698. Assert(m_nonTempLocalVars);
  699. m_nonTempLocalVars->Set(sym->m_id);
  700. }
  701. }
  702. ///
  703. /// Returns offset of the flag (1 byte) whether any local was changed (in debugger).
  704. /// If the function does not have any locals, returns -1.
  705. ///
  706. int32
  707. Func::GetHasLocalVarChangedOffset()
  708. {
  709. this->EnsureLocalVarSlots();
  710. return m_hasLocalVarChangedOffset;
  711. }
  712. bool
  713. Func::IsJitInDebugMode() const
  714. {
  715. return m_workItem->IsJitInDebugMode();
  716. }
  717. bool
  718. Func::IsNonTempLocalVar(uint32 slotIndex)
  719. {
  720. return GetJITFunctionBody()->IsNonTempLocalVar(slotIndex);
  721. }
  722. int32
  723. Func::AdjustOffsetValue(int32 offset)
  724. {
  725. #ifdef MD_GROW_LOCALS_AREA_UP
  726. return -(offset + BailOutInfo::StackSymBias);
  727. #else
  728. // Stack offset are negative, includes the PUSH EBP and return address
  729. return offset - (2 * MachPtr);
  730. #endif
  731. }
  732. #ifdef MD_GROW_LOCALS_AREA_UP
  733. // Note: this is called during jit-compile when we finalize bail out record.
  734. void
  735. Func::AjustLocalVarSlotOffset()
  736. {
  737. if (GetJITFunctionBody()->GetNonTempLocalVarCount())
  738. {
  739. // Turn positive SP-relative base locals offset into negative frame-pointer-relative offset
  740. // This is changing value for restoring the locals when read due to locals inspection.
  741. int localsOffset = m_localVarSlotsOffset - (m_localStackHeight + m_ArgumentsOffset);
  742. int valueChangeOffset = m_hasLocalVarChangedOffset - (m_localStackHeight + m_ArgumentsOffset);
  743. m_output.SetVarSlotsOffset(localsOffset);
  744. m_output.SetVarChangedOffset(valueChangeOffset);
  745. }
  746. }
  747. #endif
  748. bool
  749. Func::DoGlobOptsForGeneratorFunc() const
  750. {
  751. // Disable GlobOpt optimizations for generators initially. Will visit and enable each one by one.
  752. return !GetJITFunctionBody()->IsCoroutine();
  753. }
  754. bool
  755. Func::DoSimpleJitDynamicProfile() const
  756. {
  757. return IsSimpleJit() && !PHASE_OFF(Js::SimpleJitDynamicProfilePhase, GetTopFunc()) && !CONFIG_FLAG(NewSimpleJit);
  758. }
  759. void
  760. Func::SetDoFastPaths()
  761. {
  762. // Make sure we only call this once!
  763. Assert(!this->hasCalledSetDoFastPaths);
  764. bool doFastPaths = false;
  765. if(!PHASE_OFF(Js::FastPathPhase, this) && (!IsSimpleJit() || CONFIG_FLAG(NewSimpleJit)))
  766. {
  767. doFastPaths = true;
  768. }
  769. this->m_doFastPaths = doFastPaths;
  770. #ifdef DBG
  771. this->hasCalledSetDoFastPaths = true;
  772. #endif
  773. }
  774. #if LOWER_SPLIT_INT64
  775. Int64RegPair Func::FindOrCreateInt64Pair(IR::Opnd* opnd)
  776. {
  777. if (!this->IsTopFunc())
  778. {
  779. return GetTopFunc()->FindOrCreateInt64Pair(opnd);
  780. }
  781. AssertMsg(currentPhases.Top() == Js::LowererPhase, "New Int64 sym map is only allowed during lower");
  782. Int64RegPair pair;
  783. IRType pairType = opnd->GetType();
  784. if (opnd->IsInt64())
  785. {
  786. pairType = IRType_IsSignedInt(pairType) ? TyInt32 : TyUint32;
  787. }
  788. if (opnd->IsIndirOpnd())
  789. {
  790. IR::IndirOpnd* indir = opnd->AsIndirOpnd();
  791. indir->SetType(pairType);
  792. pair.low = indir;
  793. pair.high = indir->Copy(this)->AsIndirOpnd();
  794. pair.high->AsIndirOpnd()->SetOffset(indir->GetOffset() + 4);
  795. return pair;
  796. }
  797. // Only indir opnd can have a type other than int64
  798. Assert(opnd->IsInt64());
  799. if (opnd->IsImmediateOpnd())
  800. {
  801. int64 value = opnd->GetImmediateValue(this);
  802. pair.low = IR::IntConstOpnd::New((int32)value, pairType, this);
  803. pair.high = IR::IntConstOpnd::New((int32)(value >> 32), pairType, this);
  804. return pair;
  805. }
  806. Int64SymPair symPair;
  807. if (!m_int64SymPairMap)
  808. {
  809. m_int64SymPairMap = Anew(m_alloc, Int64SymPairMap, m_alloc);
  810. }
  811. StackSym* stackSym = opnd->GetStackSym();
  812. AssertOrFailFastMsg(stackSym, "Invalid int64 operand type");
  813. SymID symId = stackSym->m_id;
  814. if (!m_int64SymPairMap->TryGetValue(symId, &symPair))
  815. {
  816. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym())
  817. {
  818. const bool isArg = stackSym->IsArgSlotSym();
  819. if (isArg)
  820. {
  821. Js::ArgSlot slotNumber = stackSym->GetArgSlotNum();
  822. symPair.low = StackSym::NewArgSlotSym(slotNumber, this, pairType);
  823. symPair.high = StackSym::NewArgSlotSym(slotNumber, this, pairType);
  824. }
  825. else
  826. {
  827. Js::ArgSlot slotNumber = stackSym->GetParamSlotNum();
  828. symPair.low = StackSym::NewParamSlotSym(slotNumber, this, pairType);
  829. symPair.high = StackSym::NewParamSlotSym(slotNumber + 1, this, pairType);
  830. }
  831. symPair.low->m_allocated = true;
  832. symPair.low->m_offset = stackSym->m_offset;
  833. symPair.high->m_allocated = true;
  834. symPair.high->m_offset = stackSym->m_offset + 4;
  835. }
  836. else
  837. {
  838. symPair.low = StackSym::New(pairType, this);
  839. symPair.high = StackSym::New(pairType, this);
  840. }
  841. m_int64SymPairMap->Add(symId, symPair);
  842. }
  843. if (opnd->IsSymOpnd())
  844. {
  845. pair.low = IR::SymOpnd::New(symPair.low, opnd->AsSymOpnd()->m_offset, pairType, this);
  846. pair.high = IR::SymOpnd::New(symPair.high, opnd->AsSymOpnd()->m_offset, pairType, this);
  847. }
  848. else
  849. {
  850. pair.low = IR::RegOpnd::New(symPair.low, pairType, this);
  851. pair.high = IR::RegOpnd::New(symPair.high, pairType, this);
  852. }
  853. return pair;
  854. }
  855. void Func::Int64SplitExtendLoopLifetime(Loop* loop)
  856. {
  857. if (!this->IsTopFunc())
  858. {
  859. GetTopFunc()->Int64SplitExtendLoopLifetime(loop);
  860. return;
  861. }
  862. if (m_int64SymPairMap)
  863. {
  864. BVSparse<JitArenaAllocator> *liveOnBackEdgeSyms = loop->regAlloc.liveOnBackEdgeSyms;
  865. FOREACH_BITSET_IN_SPARSEBV(symId, liveOnBackEdgeSyms)
  866. {
  867. Int64SymPair pair;
  868. if (m_int64SymPairMap->TryGetValue(symId, &pair))
  869. {
  870. // If we have replaced any sym that was live on the back edge for 2 other syms
  871. // these 2 syms needs to be live on back edge as well.
  872. liveOnBackEdgeSyms->Set(pair.low->m_id);
  873. liveOnBackEdgeSyms->Set(pair.high->m_id);
  874. }
  875. } NEXT_BITSET_IN_SPARSEBV;
  876. }
  877. }
  878. #endif
  879. #if defined(_M_ARM32_OR_ARM64)
  880. RegNum
  881. Func::GetLocalsPointer() const
  882. {
  883. #ifdef DBG
  884. if (Js::Configuration::Global.flags.IsEnabled(Js::ForceLocalsPtrFlag))
  885. {
  886. return ALT_LOCALS_PTR;
  887. }
  888. #endif
  889. if (GetJITFunctionBody()->HasTry())
  890. {
  891. return ALT_LOCALS_PTR;
  892. }
  893. return RegSP;
  894. }
  895. #endif
  896. void Func::AddSlotArrayCheck(IR::SymOpnd *fieldOpnd)
  897. {
  898. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  899. {
  900. return;
  901. }
  902. Assert(IsTopFunc());
  903. if (this->slotArrayCheckTable == nullptr)
  904. {
  905. this->slotArrayCheckTable = SlotArrayCheckTable::New(m_alloc, 4);
  906. }
  907. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  908. uint32 slot = propertySym->m_propertyId;
  909. uint32 *pSlotId = this->slotArrayCheckTable->FindOrInsert(slot, propertySym->m_stackSym->m_id);
  910. if (pSlotId && (*pSlotId == (uint32)-1 || *pSlotId < slot))
  911. {
  912. *pSlotId = propertySym->m_propertyId;
  913. }
  914. }
  915. void Func::AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId)
  916. {
  917. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  918. {
  919. return;
  920. }
  921. Assert(IsTopFunc());
  922. if (this->frameDisplayCheckTable == nullptr)
  923. {
  924. this->frameDisplayCheckTable = FrameDisplayCheckTable::New(m_alloc, 4);
  925. }
  926. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  927. FrameDisplayCheckRecord **record = this->frameDisplayCheckTable->FindOrInsertNew(propertySym->m_stackSym->m_id);
  928. if (*record == nullptr)
  929. {
  930. *record = JitAnew(m_alloc, FrameDisplayCheckRecord);
  931. }
  932. uint32 frameDisplaySlot = propertySym->m_propertyId;
  933. if ((*record)->table == nullptr || (*record)->slotId < frameDisplaySlot)
  934. {
  935. (*record)->slotId = frameDisplaySlot;
  936. }
  937. if (slotId != (uint32)-1)
  938. {
  939. if ((*record)->table == nullptr)
  940. {
  941. (*record)->table = SlotArrayCheckTable::New(m_alloc, 4);
  942. }
  943. uint32 *pSlotId = (*record)->table->FindOrInsert(slotId, frameDisplaySlot);
  944. if (pSlotId && *pSlotId < slotId)
  945. {
  946. *pSlotId = slotId;
  947. }
  948. }
  949. }
  950. void Func::InitLocalClosureSyms()
  951. {
  952. Assert(this->m_localClosureSym == nullptr);
  953. // Allocate stack space for closure pointers. Do this only if we're jitting for stack closures, and
  954. // tell bailout that these are not byte code symbols so that we don't try to encode them in the bailout record,
  955. // as they don't have normal lifetimes.
  956. Js::RegSlot regSlot = GetJITFunctionBody()->GetLocalClosureReg();
  957. if (regSlot != Js::Constants::NoRegister)
  958. {
  959. this->m_localClosureSym =
  960. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  961. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  962. this);
  963. }
  964. regSlot = this->GetJITFunctionBody()->GetParamClosureReg();
  965. if (regSlot != Js::Constants::NoRegister)
  966. {
  967. Assert(this->GetParamClosureSym() == nullptr && !this->GetJITFunctionBody()->IsParamAndBodyScopeMerged());
  968. this->m_paramClosureSym =
  969. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  970. this->DoStackFrameDisplay() ? (Js::RegSlot) - 1 : regSlot,
  971. this);
  972. }
  973. regSlot = GetJITFunctionBody()->GetLocalFrameDisplayReg();
  974. if (regSlot != Js::Constants::NoRegister)
  975. {
  976. this->m_localFrameDisplaySym =
  977. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  978. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  979. this);
  980. }
  981. }
  982. bool
  983. Func::IsTrackCompoundedIntOverflowDisabled() const
  984. {
  985. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsTrackCompoundedIntOverflowDisabled()) || m_output.IsTrackCompoundedIntOverflowDisabled();
  986. }
  987. bool
  988. Func::IsArrayCheckHoistDisabled() const
  989. {
  990. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsArrayCheckHoistDisabled(IsLoopBody())) || m_output.IsArrayCheckHoistDisabled();
  991. }
  992. bool
  993. Func::IsStackArgOptDisabled() const
  994. {
  995. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsStackArgOptDisabled()) || m_output.IsStackArgOptDisabled();
  996. }
  997. bool
  998. Func::IsSwitchOptDisabled() const
  999. {
  1000. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsSwitchOptDisabled()) || m_output.IsSwitchOptDisabled();
  1001. }
  1002. bool
  1003. Func::IsAggressiveIntTypeSpecDisabled() const
  1004. {
  1005. return (HasProfileInfo() && GetReadOnlyProfileInfo()->IsAggressiveIntTypeSpecDisabled(IsLoopBody())) || m_output.IsAggressiveIntTypeSpecDisabled();
  1006. }
  1007. bool Func::CanAllocInPreReservedHeapPageSegment ()
  1008. {
  1009. #ifdef _CONTROL_FLOW_GUARD
  1010. return PHASE_FORCE1(Js::PreReservedHeapAllocPhase) || (!PHASE_OFF1(Js::PreReservedHeapAllocPhase) &&
  1011. !IsJitInDebugMode()
  1012. #if _M_IX86
  1013. && m_workItem->GetJitMode() == ExecutionMode::FullJit
  1014. #if ENABLE_OOP_NATIVE_CODEGEN
  1015. && (JITManager::GetJITManager()->IsJITServer()
  1016. ? GetOOPCodeGenAllocators()->canCreatePreReservedSegment
  1017. : GetInProcCodeGenAllocators()->canCreatePreReservedSegment)
  1018. #else
  1019. && GetInProcCodeGenAllocators()->canCreatePreReservedSegment
  1020. #endif
  1021. );
  1022. #elif TARGET_64
  1023. && true);
  1024. #else
  1025. && false); //Not yet implemented for architectures other than x86 and amd64.
  1026. #endif //_M_ARCH
  1027. #else
  1028. return false;
  1029. #endif//_CONTROL_FLOW_GUARD
  1030. }
  1031. ///----------------------------------------------------------------------------
  1032. ///
  1033. /// Func::GetInstrCount
  1034. ///
  1035. /// Returns the number of instrs.
  1036. /// Note: It counts all instrs for now, including labels, etc.
  1037. ///
  1038. ///----------------------------------------------------------------------------
  1039. uint32
  1040. Func::GetInstrCount()
  1041. {
  1042. uint instrCount = 0;
  1043. FOREACH_INSTR_IN_FUNC(instr, this)
  1044. {
  1045. instrCount++;
  1046. }NEXT_INSTR_IN_FUNC;
  1047. return instrCount;
  1048. }
  1049. ///----------------------------------------------------------------------------
  1050. ///
  1051. /// Func::NumberInstrs
  1052. ///
  1053. /// Number each instruction in order of appearance in the function.
  1054. ///
  1055. ///----------------------------------------------------------------------------
  1056. void
  1057. Func::NumberInstrs()
  1058. {
  1059. #if DBG_DUMP
  1060. Assert(this->IsTopFunc());
  1061. Assert(!this->hasInstrNumber);
  1062. this->hasInstrNumber = true;
  1063. #endif
  1064. uint instrCount = 1;
  1065. FOREACH_INSTR_IN_FUNC(instr, this)
  1066. {
  1067. instr->SetNumber(instrCount++);
  1068. }
  1069. NEXT_INSTR_IN_FUNC;
  1070. }
  1071. #if DBG
  1072. BVSparse<JitArenaAllocator>* Func::GetByteCodeOffsetUses(uint offset) const
  1073. {
  1074. InstrByteCodeRegisterUses uses;
  1075. if (byteCodeRegisterUses->TryGetValue(offset, &uses))
  1076. {
  1077. return uses.bv;
  1078. }
  1079. return nullptr;
  1080. }
  1081. ///----------------------------------------------------------------------------
  1082. ///
  1083. /// Func::IsInPhase
  1084. ///
  1085. /// Determines whether the function is currently in the provided phase
  1086. ///
  1087. ///----------------------------------------------------------------------------
  1088. bool
  1089. Func::IsInPhase(Js::Phase tag)
  1090. {
  1091. return this->GetTopFunc()->currentPhases.Contains(tag);
  1092. }
  1093. #endif
  1094. ///----------------------------------------------------------------------------
  1095. ///
  1096. /// Func::BeginPhase
  1097. ///
  1098. /// Takes care of the profiler
  1099. ///
  1100. ///----------------------------------------------------------------------------
  1101. void
  1102. Func::BeginPhase(Js::Phase tag)
  1103. {
  1104. #ifdef DBG
  1105. this->GetTopFunc()->currentPhases.Push(tag);
  1106. if (PHASE_DEBUGBREAK_ON_PHASE_BEGIN(tag, this))
  1107. {
  1108. __debugbreak();
  1109. }
  1110. #endif
  1111. #ifdef PROFILE_EXEC
  1112. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  1113. "Profiler tag is supplied but the profiler pointer is NULL");
  1114. if (this->m_codeGenProfiler)
  1115. {
  1116. this->m_codeGenProfiler->ProfileBegin(tag);
  1117. }
  1118. #endif
  1119. }
  1120. ///----------------------------------------------------------------------------
  1121. ///
  1122. /// Func::EndPhase
  1123. ///
  1124. /// Takes care of the profiler and dumper
  1125. ///
  1126. ///----------------------------------------------------------------------------
  1127. void
  1128. Func::EndProfiler(Js::Phase tag)
  1129. {
  1130. #ifdef DBG
  1131. Assert(this->GetTopFunc()->currentPhases.Count() > 0);
  1132. Js::Phase popped = this->GetTopFunc()->currentPhases.Pop();
  1133. Assert(tag == popped);
  1134. #endif
  1135. #ifdef PROFILE_EXEC
  1136. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  1137. "Profiler tag is supplied but the profiler pointer is NULL");
  1138. if (this->m_codeGenProfiler)
  1139. {
  1140. this->m_codeGenProfiler->ProfileEnd(tag);
  1141. }
  1142. #endif
  1143. }
  1144. void
  1145. Func::EndPhase(Js::Phase tag, bool dump)
  1146. {
  1147. this->EndProfiler(tag);
  1148. #if DBG_DUMP
  1149. if(dump && (PHASE_DUMP(tag, this)
  1150. || PHASE_DUMP(Js::BackEndPhase, this)))
  1151. {
  1152. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1153. if (IsLoopBody())
  1154. {
  1155. Output::Print(_u("************ IR after %s (%S) Loop %d ************\n"),
  1156. Js::PhaseNames[tag],
  1157. ExecutionModeName(m_workItem->GetJitMode()),
  1158. m_workItem->GetLoopNumber());
  1159. }
  1160. else
  1161. {
  1162. Output::Print(_u("************ IR after %s (%S) ************\n"),
  1163. Js::PhaseNames[tag],
  1164. ExecutionModeName(m_workItem->GetJitMode()));
  1165. }
  1166. this->Dump(Js::Configuration::Global.flags.AsmDiff? IRDumpFlags_AsmDumpMode : IRDumpFlags_None);
  1167. }
  1168. #endif
  1169. if (tag == Js::RegAllocPhase)
  1170. {
  1171. this->legalizePostRegAlloc = true;
  1172. }
  1173. #if DBG
  1174. if (tag == Js::LowererPhase)
  1175. {
  1176. Assert(!this->isPostLower);
  1177. this->isPostLower = true;
  1178. }
  1179. else if (tag == Js::RegAllocPhase)
  1180. {
  1181. Assert(!this->isPostRegAlloc);
  1182. this->isPostRegAlloc = true;
  1183. }
  1184. else if (tag == Js::PeepsPhase)
  1185. {
  1186. Assert(this->isPostLower && !this->isPostLayout);
  1187. this->isPostPeeps = true;
  1188. }
  1189. else if (tag == Js::LayoutPhase)
  1190. {
  1191. Assert(this->isPostPeeps && !this->isPostLayout);
  1192. this->isPostLayout = true;
  1193. }
  1194. else if (tag == Js::FinalLowerPhase)
  1195. {
  1196. Assert(this->isPostLayout && !this->isPostFinalLower);
  1197. this->isPostFinalLower = true;
  1198. }
  1199. if (this->isPostLower)
  1200. {
  1201. #ifndef _M_ARM // Need to verify ARM is clean.
  1202. DbCheckPostLower dbCheck(this);
  1203. dbCheck.Check();
  1204. #endif
  1205. }
  1206. this->m_alloc->MergeDelayFreeList();
  1207. #endif
  1208. }
  1209. StackSym *
  1210. Func::EnsureLoopParamSym()
  1211. {
  1212. if (this->m_loopParamSym == nullptr)
  1213. {
  1214. this->m_loopParamSym = StackSym::New(TyMachPtr, this);
  1215. }
  1216. return this->m_loopParamSym;
  1217. }
  1218. void
  1219. Func::UpdateMaxInlineeArgOutSize(uint inlineeArgOutSize)
  1220. {
  1221. if (this->maxInlineeArgOutSize < inlineeArgOutSize)
  1222. {
  1223. this->maxInlineeArgOutSize = inlineeArgOutSize;
  1224. }
  1225. }
  1226. void
  1227. Func::BeginClone(Lowerer * lowerer, JitArenaAllocator *alloc)
  1228. {
  1229. Assert(this->IsTopFunc());
  1230. AssertMsg(m_cloner == nullptr, "Starting new clone while one is in progress");
  1231. m_cloner = JitAnew(alloc, Cloner, lowerer, alloc);
  1232. if (m_cloneMap == nullptr)
  1233. {
  1234. m_cloneMap = JitAnew(alloc, InstrMap, alloc, 7);
  1235. }
  1236. }
  1237. void
  1238. Func::EndClone()
  1239. {
  1240. Assert(this->IsTopFunc());
  1241. if (m_cloner)
  1242. {
  1243. m_cloner->Finish();
  1244. JitAdelete(m_cloner->alloc, m_cloner);
  1245. m_cloner = nullptr;
  1246. }
  1247. }
  1248. IR::SymOpnd *
  1249. Func::GetInlineeOpndAtOffset(int32 offset)
  1250. {
  1251. Assert(IsInlinee());
  1252. StackSym *stackSym = CreateInlineeStackSym();
  1253. this->SetArgOffset(stackSym, stackSym->m_offset + offset);
  1254. Assert(stackSym->m_offset >= 0);
  1255. return IR::SymOpnd::New(stackSym, 0, TyMachReg, this);
  1256. }
  1257. StackSym *
  1258. Func::CreateInlineeStackSym()
  1259. {
  1260. // Make sure this is an inlinee and that GlobOpt has initialized the offset
  1261. // in the inlinee's frame.
  1262. Assert(IsInlinee());
  1263. Assert(m_inlineeFrameStartSym->m_offset != -1);
  1264. StackSym *stackSym = m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1265. stackSym->m_isInlinedArgSlot = true;
  1266. stackSym->m_offset = m_inlineeFrameStartSym->m_offset;
  1267. stackSym->m_allocated = true;
  1268. return stackSym;
  1269. }
  1270. uint16
  1271. Func::GetArgUsedForBranch() const
  1272. {
  1273. // this value can change while JITing, so or these together
  1274. return GetJITFunctionBody()->GetArgUsedForBranch() | GetJITOutput()->GetArgUsedForBranch();
  1275. }
  1276. intptr_t
  1277. Func::GetJittedLoopIterationsSinceLastBailoutAddress() const
  1278. {
  1279. Assert(this->m_workItem->Type() == JsLoopBodyWorkItemType);
  1280. return m_workItem->GetJittedLoopIterationsSinceLastBailoutAddr();
  1281. }
  1282. intptr_t
  1283. Func::GetWeakFuncRef() const
  1284. {
  1285. // TODO: OOP JIT figure out if this can be null
  1286. return m_workItem->GetJITTimeInfo()->GetWeakFuncRef();
  1287. }
  1288. intptr_t
  1289. Func::GetRuntimeInlineCache(const uint index) const
  1290. {
  1291. if(m_runtimeInfo != nullptr && m_runtimeInfo->HasClonedInlineCaches())
  1292. {
  1293. intptr_t inlineCache = m_runtimeInfo->GetClonedInlineCache(index);
  1294. if(inlineCache)
  1295. {
  1296. return inlineCache;
  1297. }
  1298. }
  1299. return GetJITFunctionBody()->GetInlineCache(index);
  1300. }
  1301. JITTimePolymorphicInlineCache *
  1302. Func::GetRuntimePolymorphicInlineCache(const uint index) const
  1303. {
  1304. if (this->m_polymorphicInlineCacheInfo && this->m_polymorphicInlineCacheInfo->HasInlineCaches())
  1305. {
  1306. return this->m_polymorphicInlineCacheInfo->GetInlineCache(index);
  1307. }
  1308. return nullptr;
  1309. }
  1310. byte
  1311. Func::GetPolyCacheUtilToInitialize(const uint index) const
  1312. {
  1313. return this->GetRuntimePolymorphicInlineCache(index) ? this->GetPolyCacheUtil(index) : PolymorphicInlineCacheUtilizationMinValue;
  1314. }
  1315. byte
  1316. Func::GetPolyCacheUtil(const uint index) const
  1317. {
  1318. return this->m_polymorphicInlineCacheInfo->GetUtil(index);
  1319. }
  1320. ObjTypeSpecFldInfo*
  1321. Func::GetObjTypeSpecFldInfo(const uint index) const
  1322. {
  1323. if (GetJITFunctionBody()->GetInlineCacheCount() == 0)
  1324. {
  1325. Assert(UNREACHED);
  1326. return nullptr;
  1327. }
  1328. return GetWorkItem()->GetJITTimeInfo()->GetObjTypeSpecFldInfo(index);
  1329. }
  1330. ObjTypeSpecFldInfo*
  1331. Func::GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const
  1332. {
  1333. Assert(propertyInfoId < GetTopFunc()->GetWorkItem()->GetJITTimeInfo()->GetGlobalObjTypeSpecFldInfoCount());
  1334. return GetTopFunc()->m_globalObjTypeSpecFldInfoArray[propertyInfoId];
  1335. }
  1336. void
  1337. Func::EnsurePinnedTypeRefs()
  1338. {
  1339. if (this->pinnedTypeRefs == nullptr)
  1340. {
  1341. this->pinnedTypeRefs = JitAnew(this->m_alloc, TypeRefSet, this->m_alloc);
  1342. }
  1343. }
  1344. void
  1345. Func::PinTypeRef(void* typeRef)
  1346. {
  1347. EnsurePinnedTypeRefs();
  1348. this->pinnedTypeRefs->AddNew(typeRef);
  1349. }
  1350. void
  1351. Func::EnsureSingleTypeGuards()
  1352. {
  1353. if (this->singleTypeGuards == nullptr)
  1354. {
  1355. this->singleTypeGuards = JitAnew(this->m_alloc, TypePropertyGuardDictionary, this->m_alloc);
  1356. }
  1357. }
  1358. Js::JitTypePropertyGuard*
  1359. Func::GetOrCreateSingleTypeGuard(intptr_t typeAddr)
  1360. {
  1361. EnsureSingleTypeGuards();
  1362. Js::JitTypePropertyGuard* guard = nullptr;
  1363. if (!this->singleTypeGuards->TryGetValue(typeAddr, &guard))
  1364. {
  1365. // Property guards are allocated by NativeCodeData::Allocator so that their lifetime extends as long as the EntryPointInfo is alive.
  1366. guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitTypePropertyGuard, typeAddr, this->indexedPropertyGuardCount++);
  1367. this->singleTypeGuards->Add(typeAddr, guard);
  1368. }
  1369. else
  1370. {
  1371. Assert(guard->GetTypeAddr() == typeAddr);
  1372. }
  1373. return guard;
  1374. }
  1375. void
  1376. Func::EnsureEquivalentTypeGuards()
  1377. {
  1378. AssertMsg(!PHASE_OFF(Js::EquivObjTypeSpecPhase, this), "Why do we have equivalent type guards if we don't do equivalent object type spec?");
  1379. if (this->equivalentTypeGuards == nullptr)
  1380. {
  1381. this->equivalentTypeGuards = JitAnew(this->m_alloc, EquivalentTypeGuardList, this->m_alloc);
  1382. }
  1383. }
  1384. Js::JitEquivalentTypeGuard*
  1385. Func::CreateEquivalentTypeGuard(JITTypeHolder type, uint32 objTypeSpecFldId)
  1386. {
  1387. EnsureEquivalentTypeGuards();
  1388. Js::JitEquivalentTypeGuard* guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitEquivalentTypeGuard, type->GetAddr(), this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1389. this->InitializeEquivalentTypeGuard(guard);
  1390. return guard;
  1391. }
  1392. Js::JitPolyEquivalentTypeGuard*
  1393. Func::CreatePolyEquivalentTypeGuard(uint32 objTypeSpecFldId)
  1394. {
  1395. EnsureEquivalentTypeGuards();
  1396. Js::JitPolyEquivalentTypeGuard* guard = NativeCodeDataNewNoFixup(GetNativeCodeDataAllocator(), Js::JitPolyEquivalentTypeGuard, this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1397. this->InitializeEquivalentTypeGuard(guard);
  1398. return guard;
  1399. }
  1400. void
  1401. Func::InitializeEquivalentTypeGuard(Js::JitEquivalentTypeGuard * guard)
  1402. {
  1403. // If we want to hard code the address of the cache, we will need to go back to allocating it from the native code data allocator.
  1404. // We would then need to maintain consistency (double write) to both the recycler allocated cache and the one on the heap.
  1405. Js::EquivalentTypeCache* cache = nullptr;
  1406. if (this->IsOOPJIT())
  1407. {
  1408. cache = JitAnewZ(this->m_alloc, Js::EquivalentTypeCache);
  1409. }
  1410. else
  1411. {
  1412. cache = NativeCodeDataNewZNoFixup(GetTransferDataAllocator(), Js::EquivalentTypeCache);
  1413. }
  1414. guard->SetCache(cache);
  1415. // Give the cache a back-pointer to the guard so that the guard can be cleared at runtime if necessary.
  1416. cache->SetGuard(guard);
  1417. this->equivalentTypeGuards->Prepend(guard);
  1418. }
  1419. void
  1420. Func::EnsurePropertyGuardsByPropertyId()
  1421. {
  1422. if (this->propertyGuardsByPropertyId == nullptr)
  1423. {
  1424. this->propertyGuardsByPropertyId = JitAnew(this->m_alloc, PropertyGuardByPropertyIdMap, this->m_alloc);
  1425. }
  1426. }
  1427. void
  1428. Func::EnsureCtorCachesByPropertyId()
  1429. {
  1430. if (this->ctorCachesByPropertyId == nullptr)
  1431. {
  1432. this->ctorCachesByPropertyId = JitAnew(this->m_alloc, CtorCachesByPropertyIdMap, this->m_alloc);
  1433. }
  1434. }
  1435. void
  1436. Func::LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard)
  1437. {
  1438. Assert(guard != nullptr);
  1439. Assert(guard->GetValue() != NULL);
  1440. Assert(this->propertyGuardsByPropertyId != nullptr);
  1441. IndexedPropertyGuardSet* set;
  1442. if (!this->propertyGuardsByPropertyId->TryGetValue(propertyId, &set))
  1443. {
  1444. set = JitAnew(this->m_alloc, IndexedPropertyGuardSet, this->m_alloc);
  1445. this->propertyGuardsByPropertyId->Add(propertyId, set);
  1446. }
  1447. set->Item(guard);
  1448. }
  1449. void
  1450. Func::LinkCtorCacheToPropertyId(Js::PropertyId propertyId, JITTimeConstructorCache* cache)
  1451. {
  1452. Assert(cache != nullptr);
  1453. Assert(this->ctorCachesByPropertyId != nullptr);
  1454. CtorCacheSet* set;
  1455. if (!this->ctorCachesByPropertyId->TryGetValue(propertyId, &set))
  1456. {
  1457. set = JitAnew(this->m_alloc, CtorCacheSet, this->m_alloc);
  1458. this->ctorCachesByPropertyId->Add(propertyId, set);
  1459. }
  1460. set->Item(cache->GetRuntimeCacheAddr());
  1461. }
  1462. JITTimeConstructorCache* Func::GetConstructorCache(const Js::ProfileId profiledCallSiteId)
  1463. {
  1464. Assert(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1465. Assert(this->constructorCaches != nullptr);
  1466. return this->constructorCaches[profiledCallSiteId];
  1467. }
  1468. void Func::SetConstructorCache(const Js::ProfileId profiledCallSiteId, JITTimeConstructorCache* constructorCache)
  1469. {
  1470. Assert(profiledCallSiteId < GetJITFunctionBody()->GetProfiledCallSiteCount());
  1471. Assert(constructorCache != nullptr);
  1472. Assert(this->constructorCaches != nullptr);
  1473. Assert(this->constructorCaches[profiledCallSiteId] == nullptr);
  1474. this->constructorCacheCount++;
  1475. this->constructorCaches[profiledCallSiteId] = constructorCache;
  1476. }
  1477. void Func::EnsurePropertiesWrittenTo()
  1478. {
  1479. if (this->propertiesWrittenTo == nullptr)
  1480. {
  1481. this->propertiesWrittenTo = JitAnew(this->m_alloc, PropertyIdSet, this->m_alloc);
  1482. }
  1483. }
  1484. void Func::EnsureCallSiteToArgumentsOffsetFixupMap()
  1485. {
  1486. if (this->callSiteToArgumentsOffsetFixupMap == nullptr)
  1487. {
  1488. this->callSiteToArgumentsOffsetFixupMap = JitAnew(this->m_alloc, CallSiteToArgumentsOffsetFixupMap, this->m_alloc);
  1489. }
  1490. }
  1491. IR::LabelInstr *
  1492. Func::GetFuncStartLabel()
  1493. {
  1494. return m_funcStartLabel;
  1495. }
  1496. IR::LabelInstr *
  1497. Func::EnsureFuncStartLabel()
  1498. {
  1499. if(m_funcStartLabel == nullptr)
  1500. {
  1501. m_funcStartLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1502. m_funcStartLabel->m_isDataLabel = true;
  1503. }
  1504. return m_funcStartLabel;
  1505. }
  1506. IR::LabelInstr *
  1507. Func::GetFuncEndLabel()
  1508. {
  1509. return m_funcEndLabel;
  1510. }
  1511. IR::LabelInstr *
  1512. Func::EnsureFuncEndLabel()
  1513. {
  1514. if(m_funcEndLabel == nullptr)
  1515. {
  1516. m_funcEndLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1517. m_funcEndLabel->m_isDataLabel = true;
  1518. }
  1519. return m_funcEndLabel;
  1520. }
  1521. void
  1522. Func::EnsureStackArgWithFormalsTracker()
  1523. {
  1524. if (stackArgWithFormalsTracker == nullptr)
  1525. {
  1526. stackArgWithFormalsTracker = JitAnew(m_alloc, StackArgWithFormalsTracker, m_alloc);
  1527. }
  1528. }
  1529. BOOL
  1530. Func::IsFormalsArraySym(SymID symId)
  1531. {
  1532. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsArraySyms() == nullptr)
  1533. {
  1534. return false;
  1535. }
  1536. return stackArgWithFormalsTracker->GetFormalsArraySyms()->Test(symId);
  1537. }
  1538. void
  1539. Func::TrackFormalsArraySym(SymID symId)
  1540. {
  1541. EnsureStackArgWithFormalsTracker();
  1542. stackArgWithFormalsTracker->SetFormalsArraySyms(symId);
  1543. }
  1544. void
  1545. Func::TrackStackSymForFormalIndex(Js::ArgSlot formalsIndex, StackSym * sym)
  1546. {
  1547. EnsureStackArgWithFormalsTracker();
  1548. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1549. stackArgWithFormalsTracker->SetStackSymInFormalsIndexMap(sym, formalsIndex, formalsCount);
  1550. }
  1551. StackSym *
  1552. Func::GetStackSymForFormal(Js::ArgSlot formalsIndex)
  1553. {
  1554. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1555. {
  1556. return nullptr;
  1557. }
  1558. Js::ArgSlot formalsCount = GetJITFunctionBody()->GetInParamsCount() - 1;
  1559. StackSym ** formalsIndexToStackSymMap = stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap();
  1560. AssertMsg(formalsIndex < formalsCount, "OutOfRange ? ");
  1561. return formalsIndexToStackSymMap[formalsIndex];
  1562. }
  1563. bool
  1564. Func::HasStackSymForFormal(Js::ArgSlot formalsIndex)
  1565. {
  1566. if (stackArgWithFormalsTracker == nullptr || stackArgWithFormalsTracker->GetFormalsIndexToStackSymMap() == nullptr)
  1567. {
  1568. return false;
  1569. }
  1570. return GetStackSymForFormal(formalsIndex) != nullptr;
  1571. }
  1572. void
  1573. Func::SetScopeObjSym(StackSym * sym)
  1574. {
  1575. EnsureStackArgWithFormalsTracker();
  1576. stackArgWithFormalsTracker->SetScopeObjSym(sym);
  1577. }
  1578. StackSym *
  1579. Func::GetNativeCodeDataSym() const
  1580. {
  1581. Assert(IsOOPJIT());
  1582. return m_nativeCodeDataSym;
  1583. }
  1584. void
  1585. Func::SetNativeCodeDataSym(StackSym * opnd)
  1586. {
  1587. Assert(IsOOPJIT());
  1588. m_nativeCodeDataSym = opnd;
  1589. }
  1590. StackSym*
  1591. Func::GetScopeObjSym()
  1592. {
  1593. if (stackArgWithFormalsTracker == nullptr)
  1594. {
  1595. return nullptr;
  1596. }
  1597. return stackArgWithFormalsTracker->GetScopeObjSym();
  1598. }
  1599. BVSparse<JitArenaAllocator> *
  1600. StackArgWithFormalsTracker::GetFormalsArraySyms()
  1601. {
  1602. return formalsArraySyms;
  1603. }
  1604. void
  1605. StackArgWithFormalsTracker::SetFormalsArraySyms(SymID symId)
  1606. {
  1607. if (formalsArraySyms == nullptr)
  1608. {
  1609. formalsArraySyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1610. }
  1611. formalsArraySyms->Set(symId);
  1612. }
  1613. StackSym **
  1614. StackArgWithFormalsTracker::GetFormalsIndexToStackSymMap()
  1615. {
  1616. return formalsIndexToStackSymMap;
  1617. }
  1618. void
  1619. StackArgWithFormalsTracker::SetStackSymInFormalsIndexMap(StackSym * sym, Js::ArgSlot formalsIndex, Js::ArgSlot formalsCount)
  1620. {
  1621. if(formalsIndexToStackSymMap == nullptr)
  1622. {
  1623. formalsIndexToStackSymMap = JitAnewArrayZ(alloc, StackSym*, formalsCount);
  1624. }
  1625. AssertMsg(formalsIndex < formalsCount, "Out of range ?");
  1626. formalsIndexToStackSymMap[formalsIndex] = sym;
  1627. }
  1628. void
  1629. StackArgWithFormalsTracker::SetScopeObjSym(StackSym * sym)
  1630. {
  1631. m_scopeObjSym = sym;
  1632. }
  1633. StackSym *
  1634. StackArgWithFormalsTracker::GetScopeObjSym()
  1635. {
  1636. return m_scopeObjSym;
  1637. }
  1638. void
  1639. Cloner::AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone)
  1640. {
  1641. if (!this->instrFirst)
  1642. {
  1643. this->instrFirst = instrClone;
  1644. }
  1645. this->instrLast = instrClone;
  1646. }
  1647. void
  1648. Cloner::Finish()
  1649. {
  1650. this->RetargetClonedBranches();
  1651. if (this->lowerer)
  1652. {
  1653. lowerer->LowerRange(this->instrFirst, this->instrLast, false, false);
  1654. }
  1655. }
  1656. void
  1657. Cloner::RetargetClonedBranches()
  1658. {
  1659. if (!this->fRetargetClonedBranch)
  1660. {
  1661. return;
  1662. }
  1663. FOREACH_INSTR_IN_RANGE(instr, this->instrFirst, this->instrLast)
  1664. {
  1665. if (instr->IsBranchInstr())
  1666. {
  1667. instr->AsBranchInstr()->RetargetClonedBranch();
  1668. }
  1669. }
  1670. NEXT_INSTR_IN_RANGE;
  1671. }
  1672. void Func::ThrowIfScriptClosed()
  1673. {
  1674. if (GetScriptContextInfo()->IsClosed())
  1675. {
  1676. // Should not be jitting something in the foreground when the script context is actually closed
  1677. Assert(IsBackgroundJIT() || !GetScriptContext()->IsActuallyClosed());
  1678. throw Js::OperationAbortedException();
  1679. }
  1680. }
  1681. IR::IndirOpnd * Func::GetConstantAddressIndirOpnd(intptr_t address, IR::Opnd * largeConstOpnd, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode)
  1682. {
  1683. Assert(this->GetTopFunc() == this);
  1684. if (!canHoistConstantAddressLoad)
  1685. {
  1686. // We can't hoist constant address load after lower, as we can't mark the sym as
  1687. // live on back edge
  1688. return nullptr;
  1689. }
  1690. int offset = 0;
  1691. IR::RegOpnd ** foundRegOpnd = this->constantAddressRegOpnd.Find([address, &offset](IR::RegOpnd * regOpnd)
  1692. {
  1693. Assert(regOpnd->m_sym->IsSingleDef());
  1694. Assert(regOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd() || regOpnd->m_sym->m_instrDef->GetSrc1()->IsIntConstOpnd());
  1695. void * curr = regOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd() ?
  1696. regOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address :
  1697. (void *)regOpnd->m_sym->m_instrDef->GetSrc1()->AsIntConstOpnd()->GetValue();
  1698. ptrdiff_t diff = (uintptr_t)address - (uintptr_t)curr;
  1699. if (!Math::FitsInDWord(diff))
  1700. {
  1701. return false;
  1702. }
  1703. offset = (int)diff;
  1704. return true;
  1705. });
  1706. IR::RegOpnd * addressRegOpnd;
  1707. if (foundRegOpnd != nullptr)
  1708. {
  1709. addressRegOpnd = *foundRegOpnd;
  1710. }
  1711. else
  1712. {
  1713. Assert(offset == 0);
  1714. addressRegOpnd = IR::RegOpnd::New(TyMachPtr, this);
  1715. IR::Instr *const newInstr =
  1716. IR::Instr::New(
  1717. loadOpCode,
  1718. addressRegOpnd,
  1719. largeConstOpnd,
  1720. this);
  1721. this->constantAddressRegOpnd.Prepend(addressRegOpnd);
  1722. IR::Instr * insertBeforeInstr = this->lastConstantAddressRegLoadInstr;
  1723. if (insertBeforeInstr == nullptr)
  1724. {
  1725. insertBeforeInstr = this->GetFunctionEntryInsertionPoint();
  1726. this->lastConstantAddressRegLoadInstr = newInstr;
  1727. }
  1728. insertBeforeInstr->InsertBefore(newInstr);
  1729. }
  1730. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(addressRegOpnd, offset, type, this, true);
  1731. #if DBG_DUMP
  1732. // TODO: michhol make intptr_t
  1733. indirOpnd->SetAddrKind(kind, (void*)address);
  1734. #endif
  1735. return indirOpnd;
  1736. }
  1737. void Func::MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv)
  1738. {
  1739. Assert(this->GetTopFunc() == this);
  1740. this->constantAddressRegOpnd.Iterate([bv](IR::RegOpnd * regOpnd)
  1741. {
  1742. bv->Set(regOpnd->m_sym->m_id);
  1743. });
  1744. }
  1745. IR::Instr *
  1746. Func::GetFunctionEntryInsertionPoint()
  1747. {
  1748. Assert(this->GetTopFunc() == this);
  1749. IR::Instr * insertInsert = this->lastConstantAddressRegLoadInstr;
  1750. if (insertInsert != nullptr)
  1751. {
  1752. return insertInsert->m_next;
  1753. }
  1754. insertInsert = this->m_headInstr;
  1755. if (this->HasTry())
  1756. {
  1757. // Insert it inside the root region
  1758. insertInsert = insertInsert->m_next;
  1759. Assert(insertInsert->IsLabelInstr() && insertInsert->AsLabelInstr()->GetRegion()->GetType() == RegionTypeRoot);
  1760. }
  1761. return insertInsert->m_next;
  1762. }
  1763. Js::Var
  1764. Func::AllocateNumber(double value)
  1765. {
  1766. Js::Var number = nullptr;
  1767. #if FLOATVAR
  1768. number = Js::JavascriptNumber::NewCodeGenInstance((double)value, nullptr);
  1769. #else
  1770. if (!IsOOPJIT()) // in-proc jit
  1771. {
  1772. number = Js::JavascriptNumber::NewCodeGenInstance(GetNumberAllocator(), (double)value, GetScriptContext());
  1773. }
  1774. else // OOP JIT
  1775. {
  1776. number = GetXProcNumberAllocator()->AllocateNumber(this, value);
  1777. }
  1778. #endif
  1779. return number;
  1780. }
  1781. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1782. void
  1783. Func::DumpFullFunctionName()
  1784. {
  1785. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1786. Output::Print(_u("Function %s (%s)"), GetJITFunctionBody()->GetDisplayName(), GetDebugNumberSet(debugStringBuffer));
  1787. }
  1788. #endif
  1789. void
  1790. Func::UpdateForInLoopMaxDepth(uint forInLoopMaxDepth)
  1791. {
  1792. Assert(this->IsTopFunc());
  1793. this->m_forInLoopMaxDepth = max(this->m_forInLoopMaxDepth, forInLoopMaxDepth);
  1794. }
  1795. int
  1796. Func::GetForInEnumeratorArrayOffset() const
  1797. {
  1798. Func const* topFunc = this->GetTopFunc();
  1799. Assert(this->m_forInLoopBaseDepth + this->GetJITFunctionBody()->GetForInLoopDepth() <= topFunc->m_forInLoopMaxDepth);
  1800. return topFunc->m_forInEnumeratorArrayOffset
  1801. + this->m_forInLoopBaseDepth * sizeof(Js::ForInObjectEnumerator);
  1802. }
  1803. #if DBG_DUMP
  1804. ///----------------------------------------------------------------------------
  1805. ///
  1806. /// Func::DumpHeader
  1807. ///
  1808. ///----------------------------------------------------------------------------
  1809. void
  1810. Func::DumpHeader()
  1811. {
  1812. Output::Print(_u("-----------------------------------------------------------------------------\n"));
  1813. DumpFullFunctionName();
  1814. Output::SkipToColumn(50);
  1815. Output::Print(_u("Instr Count:%d"), GetInstrCount());
  1816. if(m_codeSize > 0)
  1817. {
  1818. Output::Print(_u("\t\tSize:%d\n\n"), m_codeSize);
  1819. }
  1820. else
  1821. {
  1822. Output::Print(_u("\n\n"));
  1823. }
  1824. }
  1825. ///----------------------------------------------------------------------------
  1826. ///
  1827. /// Func::Dump
  1828. ///
  1829. ///----------------------------------------------------------------------------
  1830. void
  1831. Func::Dump(IRDumpFlags flags)
  1832. {
  1833. this->DumpHeader();
  1834. FOREACH_INSTR_IN_FUNC(instr, this)
  1835. {
  1836. instr->DumpGlobOptInstrString();
  1837. instr->Dump(flags);
  1838. }NEXT_INSTR_IN_FUNC;
  1839. Output::Flush();
  1840. }
  1841. void
  1842. Func::Dump()
  1843. {
  1844. this->Dump(IRDumpFlags_None);
  1845. }
  1846. #endif
  1847. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  1848. LPCSTR
  1849. Func::GetVtableName(INT_PTR address)
  1850. {
  1851. #if DBG
  1852. if (vtableMap == nullptr)
  1853. {
  1854. vtableMap = VirtualTableRegistry::CreateVtableHashMap(this->m_alloc);
  1855. };
  1856. LPCSTR name = vtableMap->Lookup(address, nullptr);
  1857. if (name)
  1858. {
  1859. if (strncmp(name, "class ", _countof("class ") - 1) == 0)
  1860. {
  1861. name += _countof("class ") - 1;
  1862. }
  1863. }
  1864. return name;
  1865. #else
  1866. return "";
  1867. #endif
  1868. }
  1869. #endif
  1870. #if DBG_DUMP | defined(VTUNE_PROFILING)
  1871. bool Func::DoRecordNativeMap() const
  1872. {
  1873. #if defined(VTUNE_PROFILING)
  1874. if (VTuneChakraProfile::isJitProfilingActive)
  1875. {
  1876. return true;
  1877. }
  1878. #endif
  1879. #if DBG_DUMP
  1880. return PHASE_DUMP(Js::EncoderPhase, this) && Js::Configuration::Global.flags.Verbose;
  1881. #else
  1882. return false;
  1883. #endif
  1884. }
  1885. #endif
  1886. #ifdef PERF_HINT
  1887. void WritePerfHint(PerfHints hint, Func* func, uint byteCodeOffset /*= Js::Constants::NoByteCodeOffset*/)
  1888. {
  1889. if (!func->IsOOPJIT())
  1890. {
  1891. WritePerfHint(hint, (Js::FunctionBody*)func->GetJITFunctionBody()->GetAddr(), byteCodeOffset);
  1892. }
  1893. }
  1894. #endif