ByteCodeWriter.cpp 127 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Copyright (c) ChakraCore Project Contributors. All rights reserved.
  4. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  5. //-------------------------------------------------------------------------------------------------------
  6. #include "RuntimeByteCodePch.h"
  7. namespace Js
  8. {
  9. CompileAssert(!OpCodeInfo<Js::OpCode::Br>::HasMultiSizeLayout);
  10. CompileAssert(!OpCodeInfo<Js::OpCode::BrLong>::HasMultiSizeLayout);
  11. const uint ByteCodeWriter::JumpAroundSize = OpCodeUtil::EncodedSize(Js::OpCode::Br, SmallLayout) + sizeof(OpLayoutBr);
  12. const uint ByteCodeWriter::LongBranchSize = OpCodeUtil::EncodedSize(Js::OpCode::BrLong, SmallLayout) + sizeof(OpLayoutBrLong);
  13. void ByteCodeWriter::Create()
  14. {
  15. m_loopNest = 0;
  16. m_byteCodeCount = 0;
  17. m_byteCodeWithoutLDACount = 0;
  18. m_byteCodeInLoopCount = 0;
  19. m_functionWrite = nullptr;
  20. m_pMatchingNode = nullptr;
  21. m_matchingNodeRefCount = 0;
  22. m_tmpRegCount = 0;
  23. DebugOnly(isInitialized = false);
  24. DebugOnly(isInUse = false);
  25. }
  26. void ByteCodeWriter::InitData(ArenaAllocator* alloc, int32 initCodeBufferSize)
  27. {
  28. Assert(!isInUse);
  29. Assert(!isInitialized);
  30. DebugOnly(isInitialized = true);
  31. m_labelOffsets = JsUtil::List<uint, ArenaAllocator>::New(alloc);
  32. m_jumpOffsets = JsUtil::List<JumpInfo, ArenaAllocator>::New(alloc);
  33. m_loopHeaders = JsUtil::List<LoopHeaderData, ArenaAllocator>::New(alloc);
  34. m_byteCodeData.Create(initCodeBufferSize, alloc);
  35. m_subexpressionNodesStack = Anew(alloc, JsUtil::Stack<SubexpressionNode>, alloc);
  36. // These data units have exponential growth strategy - let's start small and grow them
  37. m_auxiliaryData.Create(256, alloc);
  38. m_auxContextData.Create(256, alloc);
  39. callRegToLdFldCacheIndexMap = Anew(alloc, CallRegToLdFldCacheIndexMap,
  40. alloc,
  41. 17);
  42. #ifdef BYTECODE_BRANCH_ISLAND
  43. useBranchIsland = true;
  44. inEnsureLongBranch = false;
  45. lastOpcode = Js::OpCode::FunctionEntry;
  46. this->UpdateNextBranchIslandOffset(0, 0);
  47. m_longJumpOffsets = JsUtil::List<JumpInfo, ArenaAllocator>::New(alloc);
  48. #endif
  49. }
  50. ///----------------------------------------------------------------------------
  51. ///
  52. /// Begin() configures this instance to generate byte-code for a specific
  53. /// JavascriptFunction:
  54. ///
  55. /// - Byte-code will be written until the caller uses End() to close and commit
  56. /// the stream to the given function, or Reset() to discard and reset to an
  57. /// empty state.
  58. ///
  59. /// - Each ByteCodeWriter may be used multiple times, but may only generate a
  60. /// single byte-code stream for a single function at a time.
  61. ///
  62. ///----------------------------------------------------------------------------
  63. void ByteCodeWriter::Begin(FunctionBody* functionWrite, ArenaAllocator* alloc, bool doJitLoopBodies, bool hasLoop, bool inDebugMode)
  64. {
  65. Assert(!isInUse);
  66. AssertMsg(m_functionWrite == nullptr, "Cannot nest Begin() calls");
  67. AssertMsg(functionWrite != nullptr, "Must have valid function to write");
  68. AssertMsg(functionWrite->GetByteCode() == nullptr, "Function should not already have a byte-code body");
  69. AssertMsg(functionWrite->GetLocalsCount() > 0, "Must always have R0 for return-value");
  70. DebugOnly(isInUse = true);
  71. m_functionWrite = functionWrite;
  72. m_doJitLoopBodies = doJitLoopBodies;
  73. m_doInterruptProbe = functionWrite->GetScriptContext()->GetThreadContext()->DoInterruptProbe(functionWrite);
  74. m_hasLoop = hasLoop;
  75. m_isInDebugMode = inDebugMode;
  76. }
  77. template <typename T>
  78. void ByteCodeWriter::PatchJumpOffset(JsUtil::List<JumpInfo, ArenaAllocator> * jumpOffset, byte * byteBuffer, uint byteCount)
  79. {
  80. jumpOffset->Map([=](int index, JumpInfo& jumpInfo)
  81. {
  82. //
  83. // Read "labelID" stored at the offset within the byte-code.
  84. //
  85. uint jumpByteOffset = jumpInfo.patchOffset;
  86. AssertMsg(jumpByteOffset < byteCount - sizeof(T),
  87. "Must have valid jump site within byte-code to back-patch");
  88. unaligned T * pnBackPatch = reinterpret_cast<unaligned T *>(&byteBuffer[jumpByteOffset]);
  89. ByteCodeLabel labelID = jumpInfo.labelId;
  90. CheckLabel(labelID);
  91. uint offsetToEndOfLayoutByteSize = *pnBackPatch;
  92. Assert(offsetToEndOfLayoutByteSize < 0x20);
  93. //
  94. // Use "labelID" to lookup the destination offset, replacing the temporary data in the
  95. // byte-code.
  96. //
  97. uint labelByteOffset = m_labelOffsets->Item(labelID);
  98. AssertMsg(labelByteOffset != UINT_MAX, "ERROR: Destination labels must be marked before closing");
  99. int relativeJumpOffset = labelByteOffset - jumpByteOffset - offsetToEndOfLayoutByteSize;
  100. #ifdef BYTECODE_BRANCH_ISLAND
  101. Assert(!useBranchIsland || (jumpOffset != m_jumpOffsets || (relativeJumpOffset < GetBranchLimit() && relativeJumpOffset >= -GetBranchLimit())));
  102. #endif
  103. Assert((T)relativeJumpOffset == relativeJumpOffset);
  104. *pnBackPatch = (T)relativeJumpOffset;
  105. });
  106. }
  107. ///----------------------------------------------------------------------------
  108. ///
  109. /// End() completes generating byte-code for the given JavascriptFunction and
  110. /// commits it to the function's body.
  111. ///
  112. ///----------------------------------------------------------------------------
  113. #ifdef LOG_BYTECODE_AST_RATIO
  114. void ByteCodeWriter::End(int32 currentAstSize, int32 maxAstSize)
  115. #else
  116. void ByteCodeWriter::End()
  117. #endif
  118. {
  119. Assert(isInUse);
  120. CheckOpen();
  121. Empty(OpCode::EndOfBlock);
  122. ByteBlock* finalByteCodeBlock = nullptr;
  123. ScriptContext* scriptContext = m_functionWrite->GetScriptContext();
  124. m_byteCodeData.Copy(scriptContext->GetRecycler(), &finalByteCodeBlock);
  125. byte * byteBuffer = finalByteCodeBlock->GetBuffer();
  126. uint byteCount = m_byteCodeData.GetCurrentOffset();
  127. //
  128. // Update all branch targets with their actual label destinations.
  129. //
  130. #ifdef BYTECODE_BRANCH_ISLAND
  131. if (useBranchIsland)
  132. {
  133. PatchJumpOffset<JumpOffset>(m_jumpOffsets, byteBuffer, byteCount);
  134. PatchJumpOffset<LongJumpOffset>(m_longJumpOffsets, byteBuffer, byteCount);
  135. }
  136. else
  137. {
  138. PatchJumpOffset<LongJumpOffset>(m_jumpOffsets, byteBuffer, byteCount);
  139. }
  140. #else
  141. PatchJumpOffset<JumpOffset>(m_jumpOffsets, byteBuffer, byteCount);
  142. #endif
  143. // Patch up the root object load inline cache with the start index
  144. uint rootObjectLoadInlineCacheStart = this->m_functionWrite->GetRootObjectLoadInlineCacheStart();
  145. rootObjectLoadInlineCacheOffsets.Map([=](size_t offset)
  146. {
  147. Assert(offset < byteCount - sizeof(int));
  148. unaligned uint * pnBackPatch = reinterpret_cast<unaligned uint *>(&byteBuffer[offset]);
  149. *pnBackPatch += rootObjectLoadInlineCacheStart;
  150. });
  151. // Patch up the root object load method inline cache with the start index
  152. uint rootObjectLoadMethodInlineCacheStart = this->m_functionWrite->GetRootObjectLoadMethodInlineCacheStart();
  153. rootObjectLoadMethodInlineCacheOffsets.Map([=](size_t offset)
  154. {
  155. Assert(offset < byteCount - sizeof(int));
  156. unaligned uint * pnBackPatch = reinterpret_cast<unaligned uint *>(&byteBuffer[offset]);
  157. *pnBackPatch += rootObjectLoadMethodInlineCacheStart;
  158. });
  159. // Patch up the root object store inline cache with the start index
  160. uint rootObjectStoreInlineCacheStart = this->m_functionWrite->GetRootObjectStoreInlineCacheStart();
  161. rootObjectStoreInlineCacheOffsets.Map([=](size_t offset)
  162. {
  163. Assert(offset < byteCount - sizeof(int));
  164. unaligned uint * pnBackPatch = reinterpret_cast<unaligned uint *>(&byteBuffer[offset]);
  165. *pnBackPatch += rootObjectStoreInlineCacheStart;
  166. });
  167. //
  168. // Store the final trimmed byte-code on the function.
  169. //
  170. ByteBlock* finalAuxiliaryBlock = nullptr;
  171. ByteBlock* finalAuxiliaryContextBlock = nullptr;
  172. m_auxiliaryData.Copy(m_functionWrite->GetScriptContext()->GetRecycler(), &finalAuxiliaryBlock);
  173. m_auxContextData.Copy(m_functionWrite->GetScriptContext()->GetRecycler(), &finalAuxiliaryContextBlock);
  174. m_functionWrite->AllocateInlineCache();
  175. m_functionWrite->AllocateObjectLiteralTypeArray();
  176. m_functionWrite->AllocateForInCache();
  177. if (!PHASE_OFF(Js::ScriptFunctionWithInlineCachePhase, m_functionWrite) && !PHASE_OFF(Js::InlineApplyTargetPhase, m_functionWrite))
  178. {
  179. if (m_functionWrite->CanFunctionObjectHaveInlineCaches())
  180. {
  181. m_functionWrite->SetInlineCachesOnFunctionObject(true);
  182. }
  183. }
  184. if (this->DoJitLoopBodies() && this->HasLoopWithoutYield() &&
  185. !(this->m_functionWrite->GetFunctionBody()->GetHasTry() && PHASE_OFF(Js::JITLoopBodyInTryCatchPhase, this->m_functionWrite)) &&
  186. !(this->m_functionWrite->GetFunctionBody()->GetHasFinally() && PHASE_OFF(Js::JITLoopBodyInTryFinallyPhase, this->m_functionWrite)))
  187. {
  188. AllocateLoopHeaders();
  189. }
  190. m_functionWrite->MarkScript(finalByteCodeBlock, finalAuxiliaryBlock, finalAuxiliaryContextBlock,
  191. m_byteCodeCount, m_byteCodeInLoopCount, m_byteCodeWithoutLDACount);
  192. #if ENABLE_PROFILE_INFO
  193. m_functionWrite->LoadDynamicProfileInfo();
  194. #endif
  195. JS_ETW(EventWriteJSCRIPT_BYTECODEGEN_METHOD(m_functionWrite->GetHostSourceContext(), m_functionWrite->GetScriptContext(), m_functionWrite->GetLocalFunctionId(), m_functionWrite->GetByteCodeCount(), this->GetTotalSize(), m_functionWrite->GetExternalDisplayName()));
  196. #ifdef LOG_BYTECODE_AST_RATIO
  197. // log the bytecode AST ratio
  198. if (currentAstSize == maxAstSize)
  199. {
  200. float astBytecodeRatio = (float)currentAstSize / (float)byteCount;
  201. Output::Print(_u("\tAST Bytecode ratio: %f\n"), astBytecodeRatio);
  202. }
  203. #endif
  204. // TODO: add validation for source mapping under #dbg
  205. //
  206. // Reset the writer to prepare for the next user.
  207. //
  208. Reset();
  209. }
  210. void ByteCodeWriter::AllocateLoopHeaders()
  211. {
  212. m_functionWrite->AllocateLoopHeaders();
  213. m_loopHeaders->Map([this](int index, ByteCodeWriter::LoopHeaderData& data)
  214. {
  215. LoopHeader *loopHeader = m_functionWrite->GetLoopHeader(index);
  216. loopHeader->startOffset = data.startOffset;
  217. loopHeader->endOffset = data.endOffset;
  218. loopHeader->isNested = data.isNested;
  219. loopHeader->hasYield = data.hasYield;
  220. });
  221. }
  222. ///----------------------------------------------------------------------------
  223. ///
  224. /// Reset() discards any current byte-code and resets to a known "empty" state:
  225. /// - This method may be called at any time between Create() and Dispose().
  226. ///
  227. ///----------------------------------------------------------------------------
  228. void ByteCodeWriter::Reset()
  229. {
  230. DebugOnly(isInUse = false);
  231. Assert(isInitialized);
  232. m_byteCodeData.Reset();
  233. m_auxiliaryData.Reset();
  234. m_auxContextData.Reset();
  235. #ifdef BYTECODE_BRANCH_ISLAND
  236. lastOpcode = Js::OpCode::FunctionEntry;
  237. this->UpdateNextBranchIslandOffset(0, 0);
  238. m_longJumpOffsets->Clear();
  239. #endif
  240. m_labelOffsets->Clear();
  241. m_jumpOffsets->Clear();
  242. m_loopHeaders->Clear();
  243. rootObjectLoadInlineCacheOffsets.Clear(m_labelOffsets->GetAllocator());
  244. rootObjectStoreInlineCacheOffsets.Clear(m_labelOffsets->GetAllocator());
  245. rootObjectLoadMethodInlineCacheOffsets.Clear(m_labelOffsets->GetAllocator());
  246. callRegToLdFldCacheIndexMap->ResetNoDelete();
  247. m_pMatchingNode = nullptr;
  248. m_matchingNodeRefCount = 0;
  249. m_functionWrite = nullptr;
  250. m_byteCodeCount = 0;
  251. m_byteCodeWithoutLDACount = 0;
  252. m_byteCodeInLoopCount = 0;
  253. m_loopNest = 0;
  254. m_currentDebuggerScope = nullptr;
  255. }
  256. inline Js::RegSlot ByteCodeWriter::ConsumeReg(Js::RegSlot reg)
  257. {
  258. CheckReg(reg);
  259. Assert(this->m_functionWrite);
  260. return this->m_functionWrite->MapRegSlot(reg);
  261. }
  262. void ByteCodeWriter::CheckOpen()
  263. {
  264. AssertMsg(m_functionWrite != nullptr, "Must Begin() a function to write byte-code into");
  265. }
  266. inline void ByteCodeWriter::CheckOp(OpCode op, OpLayoutType layoutType)
  267. {
  268. AssertMsg(OpCodeUtil::IsValidByteCodeOpcode(op), "Ensure valid OpCode");
  269. #if ENABLE_NATIVE_CODEGEN
  270. AssertMsg(!OpCodeAttr::BackEndOnly(op), "Can't write back end only OpCode");
  271. #endif
  272. AssertMsg(OpCodeUtil::GetOpCodeLayout(op) == layoutType, "Ensure correct layout for OpCode");
  273. AssertMsg(!CONFIG_FLAG(LdChakraLib) || !OpCodeAttr::LoadRoot(op), "JsBuiltIn code shouldn't touch the global");
  274. }
  275. void ByteCodeWriter::CheckLabel(ByteCodeLabel labelID)
  276. {
  277. AssertMsg(labelID >= 0 && labelID < m_labelOffsets->Count(),
  278. "Label must be previously defined before being marked in the byte-code");
  279. }
  280. inline void ByteCodeWriter::CheckReg(RegSlot registerID)
  281. {
  282. AssertMsg(registerID != Js::Constants::NoRegister, "bad register");
  283. if (registerID == Js::Constants::NoRegister)
  284. Js::Throw::InternalError();
  285. }
  286. void ByteCodeWriter::Empty(OpCode op)
  287. {
  288. CheckOpen();
  289. CheckOp(op, OpLayoutType::Empty);
  290. m_byteCodeData.Encode(op, this);
  291. }
  292. #define MULTISIZE_LAYOUT_WRITE(layout, ...) \
  293. if (!TryWrite##layout<SmallLayoutSizePolicy>(__VA_ARGS__) && !TryWrite##layout<MediumLayoutSizePolicy>(__VA_ARGS__)) \
  294. { \
  295. bool success = TryWrite##layout<LargeLayoutSizePolicy>(__VA_ARGS__); \
  296. Assert(success); \
  297. }
  298. template <typename SizePolicy>
  299. bool ByteCodeWriter::TryWriteReg1(OpCode op, RegSlot R0)
  300. {
  301. OpLayoutT_Reg1<SizePolicy> layout;
  302. if (SizePolicy::Assign(layout.R0, R0))
  303. {
  304. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  305. return true;
  306. }
  307. return false;
  308. }
  309. void ByteCodeWriter::Reg1(OpCode op, RegSlot R0)
  310. {
  311. CheckOpen();
  312. CheckOp(op, OpLayoutType::Reg1);
  313. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  314. R0 = ConsumeReg(R0);
  315. MULTISIZE_LAYOUT_WRITE(Reg1, op, R0);
  316. }
  317. template <typename SizePolicy>
  318. bool ByteCodeWriter::TryWriteReg2(OpCode op, RegSlot R0, RegSlot R1)
  319. {
  320. OpLayoutT_Reg2<SizePolicy> layout;
  321. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.R1, R1))
  322. {
  323. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  324. return true;
  325. }
  326. return false;
  327. }
  328. void ByteCodeWriter::Reg2(OpCode op, RegSlot R0, RegSlot R1)
  329. {
  330. CheckOpen();
  331. CheckOp(op, OpLayoutType::Reg2);
  332. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  333. R0 = ConsumeReg(R0);
  334. R1 = ConsumeReg(R1);
  335. bool isProfiled = false;
  336. bool isProfiled2 = false;
  337. Js::ProfileId profileId = Js::Constants::NoProfileId;
  338. Js::ProfileId profileId2 = Js::Constants::NoProfileId;
  339. if (op == Js::OpCode::BeginSwitch && DoDynamicProfileOpcode(SwitchOptPhase) &&
  340. this->m_functionWrite->AllocProfiledSwitch(&profileId))
  341. {
  342. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  343. isProfiled = true;
  344. }
  345. Assert(DoProfileNewScObjArrayOp(op) == false);
  346. Assert(DoProfileNewScObjectOp(op) == false);
  347. MULTISIZE_LAYOUT_WRITE(Reg2, op, R0, R1);
  348. if (isProfiled)
  349. {
  350. m_byteCodeData.Encode(&profileId, sizeof(Js::ProfileId));
  351. if (isProfiled2)
  352. {
  353. m_byteCodeData.Encode(&profileId2, sizeof(Js::ProfileId));
  354. }
  355. }
  356. }
  357. template <typename SizePolicy>
  358. bool ByteCodeWriter::TryWriteReg3(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2)
  359. {
  360. OpLayoutT_Reg3<SizePolicy> layout;
  361. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.R2, R2))
  362. {
  363. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  364. return true;
  365. }
  366. return false;
  367. }
  368. void ByteCodeWriter::Reg3(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2)
  369. {
  370. CheckOpen();
  371. CheckOp(op, OpLayoutType::Reg3);
  372. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  373. R0 = ConsumeReg(R0);
  374. R1 = ConsumeReg(R1);
  375. R2 = ConsumeReg(R2);
  376. ProfileId profileId = 0;
  377. bool isProfiled = false;
  378. if ((DoDynamicProfileOpcode(FloatTypeSpecPhase) && (op == Js::OpCode::Div_A || op == Js::OpCode::Rem_A)) &&
  379. this->m_functionWrite->AllocProfiledDivOrRem(&profileId))
  380. {
  381. isProfiled = true;
  382. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  383. }
  384. else if (op == Js::OpCode::IsIn && this->m_functionWrite->AllocProfiledLdElemId(&profileId))
  385. {
  386. isProfiled = true;
  387. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  388. }
  389. MULTISIZE_LAYOUT_WRITE(Reg3, op, R0, R1, R2);
  390. if (isProfiled)
  391. {
  392. m_byteCodeData.Encode(&profileId, sizeof(Js::ProfileId));
  393. }
  394. }
  395. template <typename SizePolicy>
  396. bool ByteCodeWriter::TryWriteReg3C(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, CacheId cacheId)
  397. {
  398. OpLayoutT_Reg3C<SizePolicy> layout;
  399. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.R2, R2)
  400. && SizePolicy::Assign(layout.inlineCacheIndex, cacheId))
  401. {
  402. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  403. return true;
  404. }
  405. return false;
  406. }
  407. void ByteCodeWriter::Reg3C(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, uint cacheId)
  408. {
  409. CheckOpen();
  410. CheckOp(op, OpLayoutType::Reg3C);
  411. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  412. R0 = ConsumeReg(R0);
  413. R1 = ConsumeReg(R1);
  414. R2 = ConsumeReg(R2);
  415. MULTISIZE_LAYOUT_WRITE(Reg3C, op, R0, R1, R2, cacheId);
  416. }
  417. template <typename SizePolicy>
  418. bool ByteCodeWriter::TryWriteReg2U(OpCode op, RegSlot R0, RegSlot R1, uint index)
  419. {
  420. OpLayoutT_Reg2U<SizePolicy> layout;
  421. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.SlotIndex, index))
  422. {
  423. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  424. return true;
  425. }
  426. return false;
  427. }
  428. template <typename SizePolicy>
  429. bool ByteCodeWriter::TryWriteReg4(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, RegSlot R3)
  430. {
  431. OpLayoutT_Reg4<SizePolicy> layout;
  432. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.R2, R2)
  433. && SizePolicy::Assign(layout.R3, R3))
  434. {
  435. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  436. return true;
  437. }
  438. return false;
  439. }
  440. void ByteCodeWriter::Reg4(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, RegSlot R3)
  441. {
  442. CheckOpen();
  443. CheckOp(op, OpLayoutType::Reg4);
  444. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  445. R0 = ConsumeReg(R0);
  446. R1 = ConsumeReg(R1);
  447. R2 = ConsumeReg(R2);
  448. R3 = ConsumeReg(R3);
  449. MULTISIZE_LAYOUT_WRITE(Reg4, op, R0, R1, R2, R3);
  450. }
  451. template <typename SizePolicy>
  452. bool ByteCodeWriter::TryWriteReg4U(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, RegSlot R3, uint index)
  453. {
  454. OpLayoutT_Reg4U<SizePolicy> layout;
  455. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.R2, R2)
  456. && SizePolicy::Assign(layout.R3, R3) && SizePolicy::Assign(layout.SlotIndex, index))
  457. {
  458. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  459. return true;
  460. }
  461. return false;
  462. }
  463. void ByteCodeWriter::Reg4U(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, RegSlot R3, uint slotIndex)
  464. {
  465. CheckOpen();
  466. CheckOp(op, OpLayoutType::Reg4U);
  467. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  468. R0 = ConsumeReg(R0);
  469. R1 = ConsumeReg(R1);
  470. R2 = ConsumeReg(R2);
  471. R3 = ConsumeReg(R3);
  472. MULTISIZE_LAYOUT_WRITE(Reg4U, op, R0, R1, R2, R3, slotIndex);
  473. }
  474. template <typename SizePolicy>
  475. bool ByteCodeWriter::TryWriteReg5U(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, RegSlot R3, RegSlot R4, uint index)
  476. {
  477. OpLayoutT_Reg5U<SizePolicy> layout;
  478. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.R2, R2)
  479. && SizePolicy::Assign(layout.R3, R3) && SizePolicy::Assign(layout.R4, R4) && SizePolicy::Assign(layout.SlotIndex, index))
  480. {
  481. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  482. return true;
  483. }
  484. return false;
  485. }
  486. void ByteCodeWriter::Reg5U(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, RegSlot R3, RegSlot R4, uint slotIndex)
  487. {
  488. CheckOpen();
  489. CheckOp(op, OpLayoutType::Reg5U);
  490. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  491. R0 = ConsumeReg(R0);
  492. R1 = ConsumeReg(R1);
  493. R2 = ConsumeReg(R2);
  494. R3 = ConsumeReg(R3);
  495. R4 = ConsumeReg(R4);
  496. MULTISIZE_LAYOUT_WRITE(Reg5U, op, R0, R1, R2, R3, R4, slotIndex);
  497. }
  498. template <typename SizePolicy>
  499. bool ByteCodeWriter::TryWriteReg2B1(OpCode op, RegSlot R0, RegSlot R1, uint8 B2)
  500. {
  501. OpLayoutT_Reg2B1<SizePolicy> layout;
  502. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.B2, B2))
  503. {
  504. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  505. return true;
  506. }
  507. return false;
  508. }
  509. void ByteCodeWriter::Reg2B1(OpCode op, RegSlot R0, RegSlot R1, uint8 B2)
  510. {
  511. CheckOpen();
  512. CheckOp(op, OpLayoutType::Reg2B1);
  513. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  514. R0 = ConsumeReg(R0);
  515. R1 = ConsumeReg(R1);
  516. MULTISIZE_LAYOUT_WRITE(Reg2B1, op, R0, R1, B2);
  517. }
  518. template <typename SizePolicy>
  519. bool ByteCodeWriter::TryWriteReg3B1(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, uint8 B3)
  520. {
  521. OpLayoutT_Reg3B1<SizePolicy> layout;
  522. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.R2, R2)
  523. && SizePolicy::Assign(layout.B3, B3))
  524. {
  525. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  526. return true;
  527. }
  528. return false;
  529. }
  530. void ByteCodeWriter::Reg3B1(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, uint8 B3)
  531. {
  532. CheckOpen();
  533. CheckOp(op, OpLayoutType::Reg3B1);
  534. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  535. R0 = ConsumeReg(R0);
  536. R1 = ConsumeReg(R1);
  537. R2 = ConsumeReg(R2);
  538. MULTISIZE_LAYOUT_WRITE(Reg3B1, op, R0, R1, R2, B3);
  539. }
  540. template <typename SizePolicy>
  541. bool ByteCodeWriter::TryWriteReg5(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, RegSlot R3, RegSlot R4)
  542. {
  543. OpLayoutT_Reg5<SizePolicy> layout;
  544. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.R2, R2)
  545. && SizePolicy::Assign(layout.R3, R3) && SizePolicy::Assign(layout.R4, R4))
  546. {
  547. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  548. return true;
  549. }
  550. return false;
  551. }
  552. void ByteCodeWriter::Reg5(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, RegSlot R3, RegSlot R4)
  553. {
  554. CheckOpen();
  555. CheckOp(op, OpLayoutType::Reg5);
  556. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  557. R0 = ConsumeReg(R0);
  558. R1 = ConsumeReg(R1);
  559. R2 = ConsumeReg(R2);
  560. R3 = ConsumeReg(R3);
  561. R4 = ConsumeReg(R4);
  562. MULTISIZE_LAYOUT_WRITE(Reg5, op, R0, R1, R2, R3, R4);
  563. }
  564. template <typename SizePolicy>
  565. bool ByteCodeWriter::TryWriteUnsigned1(OpCode op, uint C1)
  566. {
  567. OpLayoutT_Unsigned1<SizePolicy> layout;
  568. if (SizePolicy::Assign(layout.C1, C1))
  569. {
  570. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  571. return true;
  572. }
  573. return false;
  574. }
  575. void ByteCodeWriter::Unsigned1(OpCode op, uint C1)
  576. {
  577. CheckOpen();
  578. CheckOp(op, OpLayoutType::Unsigned1);
  579. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  580. MULTISIZE_LAYOUT_WRITE(Unsigned1, op, C1);
  581. }
  582. void ByteCodeWriter::ArgIn0(RegSlot reg)
  583. {
  584. AssertMsg(0 < m_functionWrite->GetInParamsCount(),
  585. "Ensure source arg was declared in prologue");
  586. Reg1(OpCode::ArgIn0, reg);
  587. }
  588. template void ByteCodeWriter::ArgOut<true>(ArgSlot arg, RegSlot reg, ProfileId callSiteId, bool emitProfiledArgout);
  589. template void ByteCodeWriter::ArgOut<false>(ArgSlot arg, RegSlot reg, ProfileId callSiteId, bool emitProfiledArgout);
  590. template <typename SizePolicy>
  591. bool ByteCodeWriter::TryWriteArg(OpCode op, ArgSlot arg, RegSlot reg)
  592. {
  593. OpLayoutT_Arg<SizePolicy> layout;
  594. if (SizePolicy::Assign(layout.Arg, arg) && SizePolicy::Assign(layout.Reg, reg))
  595. {
  596. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  597. return true;
  598. }
  599. return false;
  600. }
  601. template <bool isVar>
  602. void ByteCodeWriter::ArgOut(ArgSlot arg, RegSlot reg, ProfileId callSiteId, bool emitProfiledArgout)
  603. {
  604. CheckOpen();
  605. Assert(OpCodeAttr::HasMultiSizeLayout(OpCode::ArgOut_A) && OpCodeAttr::HasMultiSizeLayout(OpCode::ArgOut_ANonVar));
  606. // Note: don't "consume" the arg slot, as the passed-in value is the final one.
  607. reg = ConsumeReg(reg);
  608. OpCode op;
  609. if (isVar)
  610. {
  611. op = OpCode::ArgOut_A;
  612. }
  613. else
  614. {
  615. op = OpCode::ArgOut_ANonVar;
  616. MULTISIZE_LAYOUT_WRITE(Arg, op, arg, reg);
  617. return;
  618. }
  619. if (emitProfiledArgout
  620. && DoDynamicProfileOpcode(InlinePhase)
  621. && arg > 0 && arg < Js::Constants::MaximumArgumentCountForConstantArgumentInlining
  622. && reg > FunctionBody::FirstRegSlot
  623. && callSiteId != Js::Constants::NoProfileId
  624. )
  625. {
  626. MULTISIZE_LAYOUT_WRITE(Arg, Js::OpCode::ProfiledArgOut_A, arg, reg);
  627. m_byteCodeData.Encode(&callSiteId, sizeof(Js::ProfileId));
  628. }
  629. else
  630. {
  631. MULTISIZE_LAYOUT_WRITE(Arg, op, arg, reg);
  632. return;
  633. }
  634. }
  635. template <typename SizePolicy>
  636. bool ByteCodeWriter::TryWriteArgNoSrc(OpCode op, ArgSlot arg)
  637. {
  638. OpLayoutT_ArgNoSrc<SizePolicy> layout;
  639. if (SizePolicy::Assign(layout.Arg, arg))
  640. {
  641. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  642. return true;
  643. }
  644. return false;
  645. }
  646. void ByteCodeWriter::ArgOutEnv(ArgSlot arg)
  647. {
  648. CheckOpen();
  649. Assert(OpCodeAttr::HasMultiSizeLayout(OpCode::ArgOut_Env));
  650. MULTISIZE_LAYOUT_WRITE(ArgNoSrc, OpCode::ArgOut_Env, arg);
  651. }
  652. void ByteCodeWriter::Br(ByteCodeLabel labelID)
  653. {
  654. Br(OpCode::Br, labelID);
  655. }
  656. // For switch case - default branching
  657. void ByteCodeWriter::Br(OpCode op, ByteCodeLabel labelID)
  658. {
  659. CheckOpen();
  660. CheckOp(op, OpLayoutType::Br);
  661. CheckLabel(labelID);
  662. Assert(!OpCodeAttr::HasMultiSizeLayout(op));
  663. size_t const offsetOfRelativeJumpOffsetFromEnd = sizeof(OpLayoutBr) - offsetof(OpLayoutBr, RelativeJumpOffset);
  664. OpLayoutBr data;
  665. data.RelativeJumpOffset = offsetOfRelativeJumpOffsetFromEnd;
  666. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  667. AddJumpOffset(op, labelID, offsetOfRelativeJumpOffsetFromEnd);
  668. }
  669. void ByteCodeWriter::BrS(OpCode op, ByteCodeLabel labelID, byte val)
  670. {
  671. CheckOpen();
  672. CheckOp(op, OpLayoutType::BrS);
  673. CheckLabel(labelID);
  674. Assert(!OpCodeAttr::HasMultiSizeLayout(op));
  675. size_t const offsetOfRelativeJumpOffsetFromEnd = sizeof(OpLayoutBrS) - offsetof(OpLayoutBrS, RelativeJumpOffset);
  676. OpLayoutBrS data;
  677. data.RelativeJumpOffset = offsetOfRelativeJumpOffsetFromEnd;
  678. data.val = val;
  679. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  680. AddJumpOffset(op, labelID, offsetOfRelativeJumpOffsetFromEnd);
  681. }
  682. template <typename SizePolicy>
  683. bool ByteCodeWriter::TryWriteBrReg1(OpCode op, ByteCodeLabel labelID, RegSlot R1)
  684. {
  685. OpLayoutT_BrReg1<SizePolicy> layout;
  686. if (SizePolicy::Assign(layout.R1, R1))
  687. {
  688. size_t const offsetOfRelativeJumpOffsetFromEnd = sizeof(OpLayoutT_BrReg1<SizePolicy>) - offsetof(OpLayoutT_BrReg1<SizePolicy>, RelativeJumpOffset);
  689. layout.RelativeJumpOffset = offsetOfRelativeJumpOffsetFromEnd;
  690. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  691. AddJumpOffset(op, labelID, offsetOfRelativeJumpOffsetFromEnd);
  692. return true;
  693. }
  694. return false;
  695. }
  696. void ByteCodeWriter::BrReg1(OpCode op, ByteCodeLabel labelID, RegSlot R1)
  697. {
  698. CheckOpen();
  699. CheckOp(op, OpLayoutType::BrReg1);
  700. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  701. CheckLabel(labelID);
  702. R1 = ConsumeReg(R1);
  703. MULTISIZE_LAYOUT_WRITE(BrReg1, op, labelID, R1);
  704. }
  705. template <typename SizePolicy>
  706. bool ByteCodeWriter::TryWriteBrReg1Unsigned1(OpCode op, ByteCodeLabel labelID, RegSlot R1, uint C2)
  707. {
  708. OpLayoutT_BrReg1Unsigned1<SizePolicy> layout;
  709. if (SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.C2, C2))
  710. {
  711. size_t const offsetOfRelativeJumpOffsetFromEnd = sizeof(OpLayoutT_BrReg2<SizePolicy>) - offsetof(OpLayoutT_BrReg2<SizePolicy>, RelativeJumpOffset);
  712. layout.RelativeJumpOffset = offsetOfRelativeJumpOffsetFromEnd;
  713. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  714. AddJumpOffset(op, labelID, offsetOfRelativeJumpOffsetFromEnd);
  715. return true;
  716. }
  717. return false;
  718. }
  719. void ByteCodeWriter::BrReg1Unsigned1(OpCode op, ByteCodeLabel labelID, RegSlot R1, uint C2)
  720. {
  721. CheckOpen();
  722. CheckOp(op, OpLayoutType::BrReg1Unsigned1);
  723. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  724. CheckLabel(labelID);
  725. R1 = ConsumeReg(R1);
  726. MULTISIZE_LAYOUT_WRITE(BrReg1Unsigned1, op, labelID, R1, C2);
  727. }
  728. template <typename SizePolicy>
  729. bool ByteCodeWriter::TryWriteBrReg2(OpCode op, ByteCodeLabel labelID, RegSlot R1, RegSlot R2)
  730. {
  731. OpLayoutT_BrReg2<SizePolicy> layout;
  732. if (SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.R2, R2))
  733. {
  734. size_t const offsetOfRelativeJumpOffsetFromEnd = sizeof(OpLayoutT_BrReg2<SizePolicy>) - offsetof(OpLayoutT_BrReg2<SizePolicy>, RelativeJumpOffset);
  735. layout.RelativeJumpOffset = offsetOfRelativeJumpOffsetFromEnd;
  736. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  737. AddJumpOffset(op, labelID, offsetOfRelativeJumpOffsetFromEnd);
  738. return true;
  739. }
  740. return false;
  741. }
  742. void ByteCodeWriter::BrReg2(OpCode op, ByteCodeLabel labelID, RegSlot R1, RegSlot R2)
  743. {
  744. CheckOpen();
  745. CheckOp(op, OpLayoutType::BrReg2);
  746. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  747. CheckLabel(labelID);
  748. R1 = ConsumeReg(R1);
  749. R2 = ConsumeReg(R2);
  750. MULTISIZE_LAYOUT_WRITE(BrReg2, op, labelID, R1, R2);
  751. }
  752. void ByteCodeWriter::BrProperty(OpCode op, ByteCodeLabel labelID, RegSlot instance, PropertyIdIndexType index)
  753. {
  754. CheckOpen();
  755. CheckOp(op, OpLayoutType::BrProperty);
  756. Assert(!OpCodeAttr::HasMultiSizeLayout(op));
  757. CheckLabel(labelID);
  758. instance = ConsumeReg(instance);
  759. size_t const offsetOfRelativeJumpOffsetFromEnd = sizeof(OpLayoutBrProperty) - offsetof(OpLayoutBrProperty, RelativeJumpOffset);
  760. OpLayoutBrProperty data;
  761. data.RelativeJumpOffset = offsetOfRelativeJumpOffsetFromEnd;
  762. data.Instance = instance;
  763. data.PropertyIdIndex = index;
  764. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  765. AddJumpOffset(op, labelID, offsetOfRelativeJumpOffsetFromEnd);
  766. }
  767. void ByteCodeWriter::BrLocalProperty(OpCode op, ByteCodeLabel labelID, PropertyIdIndexType index)
  768. {
  769. CheckOpen();
  770. CheckOp(op, OpLayoutType::BrLocalProperty);
  771. Assert(!OpCodeAttr::HasMultiSizeLayout(op));
  772. CheckLabel(labelID);
  773. size_t const offsetOfRelativeJumpOffsetFromEnd = sizeof(OpLayoutBrLocalProperty) - offsetof(OpLayoutBrLocalProperty, RelativeJumpOffset);
  774. OpLayoutBrLocalProperty data;
  775. data.RelativeJumpOffset = offsetOfRelativeJumpOffsetFromEnd;
  776. data.PropertyIdIndex = index;
  777. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  778. AddJumpOffset(op, labelID, offsetOfRelativeJumpOffsetFromEnd);
  779. }
  780. void ByteCodeWriter::BrEnvProperty(OpCode op, ByteCodeLabel labelID, PropertyIdIndexType index, int32 slotIndex)
  781. {
  782. CheckOpen();
  783. CheckOp(op, OpLayoutType::BrEnvProperty);
  784. Assert(!OpCodeAttr::HasMultiSizeLayout(op));
  785. CheckLabel(labelID);
  786. size_t const offsetOfRelativeJumpOffsetFromEnd = sizeof(OpLayoutBrEnvProperty) - offsetof(OpLayoutBrEnvProperty, RelativeJumpOffset);
  787. OpLayoutBrEnvProperty data;
  788. data.RelativeJumpOffset = offsetOfRelativeJumpOffsetFromEnd;
  789. data.SlotIndex = slotIndex;
  790. data.PropertyIdIndex = index;
  791. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  792. AddJumpOffset(op, labelID, offsetOfRelativeJumpOffsetFromEnd);
  793. }
  794. bool ByteCodeWriter::DoDynamicProfileOpcode(Phase tag, bool noHeuristics) const
  795. {
  796. #if ENABLE_PROFILE_INFO
  797. if (!DynamicProfileInfo::IsEnabled(tag, this->m_functionWrite))
  798. {
  799. return false;
  800. }
  801. // Other heuristics
  802. switch (tag)
  803. {
  804. case Phase::InlinePhase:
  805. // Do profile opcode everywhere if we are an inline candidate
  806. // Otherwise, only in loops if the function has loop
  807. #pragma prefast(suppress:6236, "DevDiv bug 830883. False positive when PHASE_OFF is #defined as '(false)'.")
  808. return PHASE_FORCE(Phase::InlinePhase, this->m_functionWrite) ||
  809. ((noHeuristics || !this->m_hasLoop || (this->m_loopNest != 0) ||
  810. !(PHASE_OFF(InlineOutsideLoopsPhase, this->m_functionWrite))));
  811. default:
  812. return true;
  813. }
  814. #else
  815. return false;
  816. #endif
  817. }
  818. bool ByteCodeWriter::ShouldIncrementCallSiteId(OpCode op)
  819. {
  820. if ((DoProfileCallOp(op) && DoDynamicProfileOpcode(InlinePhase)) ||
  821. (DoProfileNewScObjArrayOp(op) && (DoDynamicProfileOpcode(NativeArrayPhase, true) || DoDynamicProfileOpcode(InlinePhase, true))) ||
  822. (DoProfileNewScObjectOp(op) && (DoDynamicProfileOpcode(InlinePhase, true) || DoDynamicProfileOpcode(FixedNewObjPhase, true))))
  823. {
  824. return true;
  825. }
  826. return false;
  827. }
  828. void ByteCodeWriter::StartCall(OpCode op, ArgSlot ArgCount)
  829. {
  830. CheckOpen();
  831. CheckOp(op, OpLayoutType::StartCall);
  832. OpLayoutStartCall data;
  833. data.ArgCount = ArgCount;
  834. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  835. }
  836. template <typename SizePolicy>
  837. bool ByteCodeWriter::TryWriteCallIExtended(OpCode op, RegSlot returnValueRegister, RegSlot functionRegister, ArgSlot givenArgCount, CallIExtendedOptions options, uint32 spreadArgsOffset)
  838. {
  839. OpLayoutT_CallIExtended<SizePolicy> layout;
  840. if (SizePolicy::Assign(layout.Return, returnValueRegister) && SizePolicy::Assign(layout.Function, functionRegister)
  841. && SizePolicy::Assign(layout.ArgCount, givenArgCount) && SizePolicy::Assign(layout.Options, options)
  842. && SizePolicy::Assign(layout.SpreadAuxOffset, spreadArgsOffset))
  843. {
  844. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  845. return true;
  846. }
  847. return false;
  848. }
  849. template <typename SizePolicy>
  850. bool ByteCodeWriter::TryWriteCallIExtendedWithICIndex(OpCode op, RegSlot returnValueRegister, RegSlot functionRegister, ArgSlot givenArgCount, InlineCacheIndex inlineCacheIndex, bool isRootLoad, CallIExtendedOptions options, uint32 spreadArgsOffset)
  851. {
  852. OpLayoutT_CallIExtendedWithICIndex<SizePolicy> layout;
  853. if (SizePolicy::Assign(layout.Return, returnValueRegister) && SizePolicy::Assign(layout.Function, functionRegister)
  854. && SizePolicy::Assign(layout.ArgCount, givenArgCount) && SizePolicy::Assign(layout.inlineCacheIndex, inlineCacheIndex)
  855. && SizePolicy::Assign(layout.Options, options) && SizePolicy::Assign(layout.SpreadAuxOffset, spreadArgsOffset))
  856. {
  857. size_t offset = m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  858. if (isRootLoad)
  859. {
  860. Assert(m_byteCodeData.GetCurrentOffset() == offset + OpCodeUtil::EncodedSize(op, SizePolicy::LayoutEnum) + sizeof(OpLayoutT_CallIExtendedWithICIndex<SizePolicy>));
  861. size_t inlineCacheOffset = offset + OpCodeUtil::EncodedSize(op, SizePolicy::LayoutEnum)
  862. + offsetof(OpLayoutT_CallIExtendedWithICIndex<SizePolicy>, inlineCacheIndex);
  863. rootObjectLoadMethodInlineCacheOffsets.Prepend(m_labelOffsets->GetAllocator(), inlineCacheOffset);
  864. }
  865. return true;
  866. }
  867. return false;
  868. }
  869. template <typename SizePolicy>
  870. bool ByteCodeWriter::TryWriteCallIExtendedFlags(OpCode op, RegSlot returnValueRegister, RegSlot functionRegister, ArgSlot givenArgCount, CallIExtendedOptions options, uint32 spreadArgsOffset, CallFlags callFlags)
  871. {
  872. OpLayoutT_CallIExtendedFlags<SizePolicy> layout;
  873. if (SizePolicy::Assign(layout.Return, returnValueRegister) && SizePolicy::Assign(layout.Function, functionRegister)
  874. && SizePolicy::Assign(layout.ArgCount, givenArgCount) && SizePolicy::Assign(layout.Options, options)
  875. && SizePolicy::Assign(layout.SpreadAuxOffset, spreadArgsOffset) && SizePolicy::Assign(layout.callFlags, callFlags))
  876. {
  877. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  878. return true;
  879. }
  880. return false;
  881. }
  882. template <typename SizePolicy>
  883. bool ByteCodeWriter::TryWriteCallIExtendedFlagsWithICIndex(OpCode op, RegSlot returnValueRegister, RegSlot functionRegister, ArgSlot givenArgCount, InlineCacheIndex inlineCacheIndex, bool isRootLoad, CallIExtendedOptions options, uint32 spreadArgsOffset, CallFlags callFlags)
  884. {
  885. OpLayoutT_CallIExtendedFlagsWithICIndex<SizePolicy> layout;
  886. if (SizePolicy::Assign(layout.Return, returnValueRegister) && SizePolicy::Assign(layout.Function, functionRegister)
  887. && SizePolicy::Assign(layout.ArgCount, givenArgCount) && SizePolicy::Assign(layout.inlineCacheIndex, inlineCacheIndex)
  888. && SizePolicy::Assign(layout.Options, options) && SizePolicy::Assign(layout.SpreadAuxOffset, spreadArgsOffset)
  889. && SizePolicy::Assign(layout.callFlags, callFlags))
  890. {
  891. size_t offset = m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  892. if (isRootLoad)
  893. {
  894. Assert(m_byteCodeData.GetCurrentOffset() == offset + OpCodeUtil::EncodedSize(op, SizePolicy::LayoutEnum) + sizeof(OpLayoutT_CallIExtendedFlagsWithICIndex<SizePolicy>));
  895. size_t inlineCacheOffset = offset + OpCodeUtil::EncodedSize(op, SizePolicy::LayoutEnum)
  896. + offsetof(OpLayoutT_CallIExtendedFlagsWithICIndex<SizePolicy>, inlineCacheIndex);
  897. rootObjectLoadMethodInlineCacheOffsets.Prepend(m_labelOffsets->GetAllocator(), inlineCacheOffset);
  898. }
  899. return true;
  900. }
  901. return false;
  902. }
  903. void ByteCodeWriter::CallIExtended(OpCode op, RegSlot returnValueRegister, RegSlot functionRegister, ArgSlot givenArgCount, CallIExtendedOptions options, const void *buffer, uint byteCount, ProfileId callSiteId, CallFlags callFlags)
  904. {
  905. CheckOpen();
  906. bool hasCallFlags = !(callFlags == CallFlags_None);
  907. if (hasCallFlags)
  908. {
  909. CheckOp(op, OpLayoutType::CallIExtendedFlags);
  910. }
  911. else
  912. {
  913. CheckOp(op, OpLayoutType::CallIExtended);
  914. }
  915. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  916. // givenArgCount could be <, ==, or > than Function's "InParams" count
  917. if (returnValueRegister != Js::Constants::NoRegister)
  918. {
  919. returnValueRegister = ConsumeReg(returnValueRegister);
  920. }
  921. functionRegister = ConsumeReg(functionRegister);
  922. // CallISpread is not going to use the ldFld cache index, but still remove it from the map as we expect
  923. // the entry for a cache index to be removed once we have seen the corresponding call.
  924. CacheIdUnit unit;
  925. unit.cacheId = Js::Constants::NoInlineCacheIndex;
  926. callRegToLdFldCacheIndexMap->TryGetValueAndRemove(functionRegister, &unit);
  927. bool isProfiled = false, isProfiled2 = false;
  928. ProfileId profileId = callSiteId, profileId2 = Constants::NoProfileId;
  929. bool isCallWithICIndex = false;
  930. if (DoProfileCallOp(op))
  931. {
  932. if (DoDynamicProfileOpcode(InlinePhase) &&
  933. callSiteId != Js::Constants::NoProfileId)
  934. {
  935. op = Js::OpCodeUtil::ConvertCallOpToProfiled(op);
  936. isProfiled = true;
  937. }
  938. else if ((DoDynamicProfileOpcode(AggressiveIntTypeSpecPhase) || DoDynamicProfileOpcode(FloatTypeSpecPhase)) &&
  939. this->m_functionWrite->AllocProfiledReturnTypeId(&profileId))
  940. {
  941. op = Js::OpCodeUtil::ConvertCallOpToProfiledReturnType(op);
  942. isProfiled = true;
  943. }
  944. }
  945. else if (DoProfileNewScObjArrayOp(op) &&
  946. (DoDynamicProfileOpcode(NativeArrayPhase, true) || DoDynamicProfileOpcode(InlinePhase, true)) &&
  947. callSiteId != Js::Constants::NoProfileId &&
  948. this->m_functionWrite->AllocProfiledArrayCallSiteId(&profileId2))
  949. {
  950. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  951. isProfiled = true;
  952. isProfiled2 = true;
  953. }
  954. else if (DoProfileNewScObjectOp(op) && (DoDynamicProfileOpcode(InlinePhase, true) || DoDynamicProfileOpcode(FixedNewObjPhase, true)) &&
  955. callSiteId != Js::Constants::NoProfileId)
  956. {
  957. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  958. isProfiled = true;
  959. }
  960. uint spreadArgsOffset = 0;
  961. if (options & CallIExtended_SpreadArgs)
  962. {
  963. Assert(buffer != nullptr && byteCount > 0);
  964. spreadArgsOffset = InsertAuxiliaryData(buffer, byteCount);
  965. }
  966. if (isCallWithICIndex)
  967. {
  968. if (hasCallFlags == true)
  969. {
  970. MULTISIZE_LAYOUT_WRITE(CallIExtendedFlagsWithICIndex, op, returnValueRegister, functionRegister, givenArgCount, unit.cacheId, unit.isRootObjectCache, options, spreadArgsOffset, callFlags);
  971. }
  972. else
  973. {
  974. MULTISIZE_LAYOUT_WRITE(CallIExtendedWithICIndex, op, returnValueRegister, functionRegister, givenArgCount, unit.cacheId, unit.isRootObjectCache, options, spreadArgsOffset);
  975. }
  976. }
  977. else
  978. {
  979. if (hasCallFlags == true)
  980. {
  981. MULTISIZE_LAYOUT_WRITE(CallIExtendedFlags, op, returnValueRegister, functionRegister, givenArgCount, options, spreadArgsOffset, callFlags);
  982. }
  983. else
  984. {
  985. MULTISIZE_LAYOUT_WRITE(CallIExtended, op, returnValueRegister, functionRegister, givenArgCount, options, spreadArgsOffset);
  986. }
  987. }
  988. if (isProfiled)
  989. {
  990. m_byteCodeData.Encode(&profileId, sizeof(Js::ProfileId));
  991. if (isProfiled2)
  992. {
  993. m_byteCodeData.Encode(&profileId2, sizeof(Js::ProfileId));
  994. }
  995. }
  996. }
  997. template <typename SizePolicy>
  998. bool ByteCodeWriter::TryWriteCallIWithICIndex(OpCode op, RegSlot returnValueRegister, RegSlot functionRegister, ArgSlot givenArgCount, InlineCacheIndex inlineCacheIndex, bool isRootLoad)
  999. {
  1000. OpLayoutT_CallIWithICIndex<SizePolicy> layout;
  1001. if (SizePolicy::Assign(layout.Return, returnValueRegister) && SizePolicy::Assign(layout.Function, functionRegister)
  1002. && SizePolicy::Assign(layout.ArgCount, givenArgCount) && SizePolicy::Assign(layout.inlineCacheIndex, inlineCacheIndex))
  1003. {
  1004. size_t offset = m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1005. if (isRootLoad)
  1006. {
  1007. Assert(m_byteCodeData.GetCurrentOffset() == offset + OpCodeUtil::EncodedSize(op, SizePolicy::LayoutEnum) + sizeof(OpLayoutT_CallIWithICIndex<SizePolicy>));
  1008. size_t inlineCacheOffset = offset + OpCodeUtil::EncodedSize(op, SizePolicy::LayoutEnum)
  1009. + offsetof(OpLayoutT_CallIWithICIndex<SizePolicy>, inlineCacheIndex);
  1010. rootObjectLoadMethodInlineCacheOffsets.Prepend(m_labelOffsets->GetAllocator(), inlineCacheOffset);
  1011. }
  1012. return true;
  1013. }
  1014. return false;
  1015. }
  1016. template <typename SizePolicy>
  1017. bool ByteCodeWriter::TryWriteCallIFlagsWithICIndex(OpCode op, RegSlot returnValueRegister, RegSlot functionRegister, ArgSlot givenArgCount, InlineCacheIndex inlineCacheIndex, bool isRootLoad, CallFlags callFlags)
  1018. {
  1019. OpLayoutT_CallIFlagsWithICIndex<SizePolicy> layout;
  1020. if (SizePolicy::Assign(layout.Return, returnValueRegister) && SizePolicy::Assign(layout.Function, functionRegister)
  1021. && SizePolicy::Assign(layout.ArgCount, givenArgCount) && SizePolicy::Assign(layout.inlineCacheIndex, inlineCacheIndex)
  1022. && SizePolicy::Assign(layout.callFlags, callFlags))
  1023. {
  1024. size_t offset = m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1025. if (isRootLoad)
  1026. {
  1027. Assert(m_byteCodeData.GetCurrentOffset() == offset + OpCodeUtil::EncodedSize(op, SizePolicy::LayoutEnum) + sizeof(OpLayoutT_CallIFlagsWithICIndex<SizePolicy>));
  1028. size_t inlineCacheOffset = offset + OpCodeUtil::EncodedSize(op, SizePolicy::LayoutEnum)
  1029. + offsetof(OpLayoutT_CallIFlagsWithICIndex<SizePolicy>, inlineCacheIndex);
  1030. rootObjectLoadMethodInlineCacheOffsets.Prepend(m_labelOffsets->GetAllocator(), inlineCacheOffset);
  1031. }
  1032. return true;
  1033. }
  1034. return false;
  1035. }
  1036. template <typename SizePolicy>
  1037. bool ByteCodeWriter::TryWriteCallI(OpCode op, RegSlot returnValueRegister, RegSlot functionRegister, ArgSlot givenArgCount)
  1038. {
  1039. OpLayoutT_CallI<SizePolicy> layout;
  1040. if (SizePolicy::Assign(layout.Return, returnValueRegister) && SizePolicy::Assign(layout.Function, functionRegister)
  1041. && SizePolicy::Assign(layout.ArgCount, givenArgCount))
  1042. {
  1043. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1044. return true;
  1045. }
  1046. return false;
  1047. }
  1048. template <typename SizePolicy>
  1049. bool ByteCodeWriter::TryWriteCallIFlags(OpCode op, RegSlot returnValueRegister, RegSlot functionRegister, ArgSlot givenArgCount, CallFlags callFlags)
  1050. {
  1051. OpLayoutT_CallIFlags<SizePolicy> layout;
  1052. if (SizePolicy::Assign(layout.Return, returnValueRegister) && SizePolicy::Assign(layout.Function, functionRegister)
  1053. && SizePolicy::Assign(layout.ArgCount, givenArgCount) && SizePolicy::Assign(layout.callFlags, callFlags))
  1054. {
  1055. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1056. return true;
  1057. }
  1058. return false;
  1059. }
  1060. void ByteCodeWriter::RemoveEntryForRegSlotFromCacheIdMap(RegSlot regSlot)
  1061. {
  1062. regSlot = ConsumeReg(regSlot);
  1063. CacheIdUnit unit;
  1064. unit.cacheId = Js::Constants::NoInlineCacheIndex;
  1065. callRegToLdFldCacheIndexMap->TryGetValueAndRemove(regSlot, &unit);
  1066. }
  1067. void ByteCodeWriter::CallI(OpCode op, RegSlot returnValueRegister, RegSlot functionRegister, ArgSlot givenArgCount, ProfileId callSiteId, CallFlags callFlags)
  1068. {
  1069. CheckOpen();
  1070. bool hasCallFlags = !(callFlags == CallFlags_None);
  1071. if (hasCallFlags == true)
  1072. {
  1073. CheckOp(op, OpLayoutType::CallIFlags);
  1074. }
  1075. else
  1076. {
  1077. CheckOp(op, OpLayoutType::CallI);
  1078. }
  1079. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1080. // givenArgCount could be <, ==, or > than Function's "InParams" count
  1081. if (returnValueRegister != Js::Constants::NoRegister)
  1082. {
  1083. returnValueRegister = ConsumeReg(returnValueRegister);
  1084. }
  1085. functionRegister = ConsumeReg(functionRegister);
  1086. bool isProfiled = false;
  1087. bool isProfiled2 = false;
  1088. bool isCallWithICIndex = false;
  1089. ProfileId profileId = callSiteId;
  1090. ProfileId profileId2 = Constants::NoProfileId;
  1091. CacheIdUnit unit;
  1092. unit.cacheId = Js::Constants::NoInlineCacheIndex;
  1093. callRegToLdFldCacheIndexMap->TryGetValueAndRemove(functionRegister, &unit);
  1094. if (DoProfileCallOp(op))
  1095. {
  1096. if (DoDynamicProfileOpcode(InlinePhase) &&
  1097. callSiteId != Js::Constants::NoProfileId)
  1098. {
  1099. if (unit.cacheId == Js::Constants::NoInlineCacheIndex)
  1100. {
  1101. op = Js::OpCodeUtil::ConvertCallOpToProfiled(op);
  1102. }
  1103. else
  1104. {
  1105. isCallWithICIndex = true;
  1106. op = Js::OpCodeUtil::ConvertCallOpToProfiled(op, true);
  1107. }
  1108. isProfiled = true;
  1109. }
  1110. else if ((DoDynamicProfileOpcode(AggressiveIntTypeSpecPhase) || DoDynamicProfileOpcode(FloatTypeSpecPhase)) &&
  1111. this->m_functionWrite->AllocProfiledReturnTypeId(&profileId))
  1112. {
  1113. op = Js::OpCodeUtil::ConvertCallOpToProfiledReturnType(op);
  1114. isProfiled = true;
  1115. }
  1116. }
  1117. else if (DoProfileNewScObjArrayOp(op) &&
  1118. (DoDynamicProfileOpcode(NativeArrayPhase, true) || DoDynamicProfileOpcode(InlinePhase, true)) &&
  1119. callSiteId != Js::Constants::NoProfileId &&
  1120. this->m_functionWrite->AllocProfiledArrayCallSiteId(&profileId2))
  1121. {
  1122. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1123. isProfiled = true;
  1124. isProfiled2 = true;
  1125. }
  1126. else if (DoProfileNewScObjectOp(op) &&
  1127. (DoDynamicProfileOpcode(InlinePhase, true) || DoDynamicProfileOpcode(FixedNewObjPhase, true)) &&
  1128. callSiteId != Js::Constants::NoProfileId)
  1129. {
  1130. if (unit.cacheId == Js::Constants::NoInlineCacheIndex)
  1131. {
  1132. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1133. }
  1134. else
  1135. {
  1136. isCallWithICIndex = true;
  1137. OpCodeUtil::ConvertNonCallOpToProfiledWithICIndex(op);
  1138. }
  1139. isProfiled = true;
  1140. }
  1141. if (isCallWithICIndex)
  1142. {
  1143. if (hasCallFlags == true)
  1144. {
  1145. MULTISIZE_LAYOUT_WRITE(CallIFlagsWithICIndex, op, returnValueRegister, functionRegister, givenArgCount, unit.cacheId, unit.isRootObjectCache, callFlags);
  1146. }
  1147. else
  1148. {
  1149. MULTISIZE_LAYOUT_WRITE(CallIWithICIndex, op, returnValueRegister, functionRegister, givenArgCount, unit.cacheId, unit.isRootObjectCache);
  1150. }
  1151. }
  1152. else
  1153. {
  1154. if (hasCallFlags == true)
  1155. {
  1156. MULTISIZE_LAYOUT_WRITE(CallIFlags, op, returnValueRegister, functionRegister, givenArgCount, callFlags);
  1157. }
  1158. else
  1159. {
  1160. MULTISIZE_LAYOUT_WRITE(CallI, op, returnValueRegister, functionRegister, givenArgCount);
  1161. }
  1162. }
  1163. if (isProfiled)
  1164. {
  1165. m_byteCodeData.Encode(&profileId, sizeof(Js::ProfileId));
  1166. if (isProfiled2)
  1167. {
  1168. m_byteCodeData.Encode(&profileId2, sizeof(Js::ProfileId));
  1169. }
  1170. }
  1171. }
  1172. template <typename SizePolicy>
  1173. bool ByteCodeWriter::TryWriteElementI(OpCode op, RegSlot Value, RegSlot Instance, RegSlot Element)
  1174. {
  1175. OpLayoutT_ElementI<SizePolicy> layout;
  1176. if (SizePolicy::Assign(layout.Value, Value) && SizePolicy::Assign(layout.Instance, Instance)
  1177. && SizePolicy::Assign(layout.Element, Element))
  1178. {
  1179. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1180. return true;
  1181. }
  1182. return false;
  1183. }
  1184. void ByteCodeWriter::Element(OpCode op, RegSlot Value, RegSlot Instance, RegSlot Element, bool instanceAtReturnRegOK, bool forceStrictMode)
  1185. {
  1186. CheckOpen();
  1187. CheckOp(op, OpLayoutType::ElementI);
  1188. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1189. Value = ConsumeReg(Value);
  1190. Instance = ConsumeReg(Instance);
  1191. Element = ConsumeReg(Element);
  1192. if (this->m_functionWrite->GetIsStrictMode() || forceStrictMode)
  1193. {
  1194. if (op == OpCode::DeleteElemI_A)
  1195. {
  1196. op = OpCode::DeleteElemIStrict_A;
  1197. }
  1198. }
  1199. bool isProfiledLayout = false;
  1200. Js::ProfileId profileId = Js::Constants::NoProfileId;
  1201. Assert(instanceAtReturnRegOK || Instance != 0);
  1202. if (DoDynamicProfileOpcode(AggressiveIntTypeSpecPhase) ||
  1203. DoDynamicProfileOpcode(FloatTypeSpecPhase) ||
  1204. DoDynamicProfileOpcode(TypedArrayTypeSpecPhase) ||
  1205. DoDynamicProfileOpcode(ArrayCheckHoistPhase))
  1206. {
  1207. OpCode newop;
  1208. switch (op)
  1209. {
  1210. case OpCode::LdElemI_A:
  1211. newop = OpCode::ProfiledLdElemI_A;
  1212. if (this->m_functionWrite->AllocProfiledLdElemId(&profileId))
  1213. {
  1214. isProfiledLayout = true;
  1215. op = newop;
  1216. }
  1217. break;
  1218. case Js::OpCode::StElemI_A:
  1219. newop = OpCode::ProfiledStElemI_A;
  1220. goto StoreCommon;
  1221. case Js::OpCode::StElemI_A_Strict:
  1222. newop = OpCode::ProfiledStElemI_A_Strict;
  1223. StoreCommon:
  1224. if (this->m_functionWrite->AllocProfiledStElemId(&profileId))
  1225. {
  1226. isProfiledLayout = true;
  1227. op = newop;
  1228. }
  1229. break;
  1230. }
  1231. }
  1232. MULTISIZE_LAYOUT_WRITE(ElementI, op, Value, Instance, Element);
  1233. if (isProfiledLayout)
  1234. {
  1235. Assert(profileId != Js::Constants::NoProfileId);
  1236. m_byteCodeData.Encode(&profileId, sizeof(Js::ProfileId));
  1237. }
  1238. }
  1239. template <typename SizePolicy>
  1240. bool ByteCodeWriter::TryWriteElementUnsigned1(OpCode op, RegSlot Value, RegSlot Instance, uint32 Element)
  1241. {
  1242. OpLayoutT_ElementUnsigned1<SizePolicy> layout;
  1243. if (SizePolicy::Assign(layout.Value, Value) && SizePolicy::Assign(layout.Instance, Instance)
  1244. && SizePolicy::Assign(layout.Element, Element))
  1245. {
  1246. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1247. return true;
  1248. }
  1249. return false;
  1250. }
  1251. void ByteCodeWriter::ElementUnsigned1(OpCode op, RegSlot Value, RegSlot Instance, uint32 Element)
  1252. {
  1253. CheckOpen();
  1254. CheckOp(op, OpLayoutType::ElementUnsigned1);
  1255. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1256. Value = ConsumeReg(Value);
  1257. Instance = ConsumeReg(Instance);
  1258. MULTISIZE_LAYOUT_WRITE(ElementUnsigned1, op, Value, Instance, Element);
  1259. }
  1260. template <typename SizePolicy>
  1261. bool ByteCodeWriter::TryWriteElementScopedC(OpCode op, RegSlot value, PropertyIdIndexType propertyIdIndex)
  1262. {
  1263. OpLayoutT_ElementScopedC<SizePolicy> layout;
  1264. if (SizePolicy::Assign(layout.Value, value)
  1265. && SizePolicy::Assign(layout.PropertyIdIndex, propertyIdIndex))
  1266. {
  1267. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1268. return true;
  1269. }
  1270. return false;
  1271. }
  1272. void ByteCodeWriter::ScopedProperty(OpCode op, RegSlot value, PropertyIdIndexType propertyIdIndex, bool forceStrictMode)
  1273. {
  1274. CheckOpen();
  1275. CheckOp(op, OpLayoutType::ElementScopedC);
  1276. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1277. value = ConsumeReg(value);
  1278. #if DBG
  1279. switch (op)
  1280. {
  1281. case OpCode::ScopedDeleteFld:
  1282. case OpCode::ScopedEnsureNoRedeclFld:
  1283. case OpCode::ScopedInitFunc:
  1284. break;
  1285. default:
  1286. AssertMsg(false, "The specified OpCode is not intended for scoped field-access");
  1287. break;
  1288. }
  1289. #endif
  1290. if (this->m_functionWrite->GetIsStrictMode() || forceStrictMode)
  1291. {
  1292. if (op == OpCode::ScopedDeleteFld)
  1293. {
  1294. op = OpCode::ScopedDeleteFldStrict;
  1295. }
  1296. }
  1297. MULTISIZE_LAYOUT_WRITE(ElementScopedC, op, value, propertyIdIndex);
  1298. }
  1299. template <typename SizePolicy>
  1300. bool ByteCodeWriter::TryWriteElementC(OpCode op, RegSlot value, RegSlot instance, PropertyIdIndexType propertyIdIndex)
  1301. {
  1302. OpLayoutT_ElementC<SizePolicy> layout;
  1303. if (SizePolicy::Assign(layout.Value, value) && SizePolicy::Assign(layout.Instance, instance)
  1304. && SizePolicy::Assign(layout.PropertyIdIndex, propertyIdIndex))
  1305. {
  1306. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1307. return true;
  1308. }
  1309. return false;
  1310. }
  1311. void ByteCodeWriter::Property(OpCode op, RegSlot value, RegSlot instance, PropertyIdIndexType propertyIdIndex, bool forceStrictMode)
  1312. {
  1313. CheckOpen();
  1314. CheckOp(op, OpLayoutType::ElementC);
  1315. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1316. value = ConsumeReg(value);
  1317. instance = ConsumeReg(instance);
  1318. #if DBG
  1319. switch (op)
  1320. {
  1321. case OpCode::InitSetFld:
  1322. case OpCode::InitGetFld:
  1323. case OpCode::InitClassMemberGet:
  1324. case OpCode::InitClassMemberSet:
  1325. case OpCode::InitProto:
  1326. case OpCode::DeleteFld:
  1327. case OpCode::DeleteFld_ReuseLoc:
  1328. case OpCode::DeleteRootFld:
  1329. case OpCode::LdElemUndefScoped:
  1330. case OpCode::StFuncExpr:
  1331. break;
  1332. default:
  1333. AssertMsg(false, "The specified OpCode is not intended for field-access");
  1334. break;
  1335. }
  1336. #endif
  1337. if (this->m_functionWrite->GetIsStrictMode() || forceStrictMode)
  1338. {
  1339. if (op == OpCode::DeleteFld)
  1340. {
  1341. op = OpCode::DeleteFldStrict;
  1342. }
  1343. else if (op == OpCode::DeleteRootFld)
  1344. {
  1345. // We will reach here when in the language service mode, since in that mode we have skipped that error.
  1346. op = OpCode::DeleteRootFldStrict;
  1347. }
  1348. }
  1349. MULTISIZE_LAYOUT_WRITE(ElementC, op, value, instance, propertyIdIndex);
  1350. }
  1351. template <typename SizePolicy>
  1352. bool ByteCodeWriter::TryWriteElementSlot(OpCode op, RegSlot value, RegSlot instance, uint32 slotId)
  1353. {
  1354. OpLayoutT_ElementSlot<SizePolicy> layout;
  1355. if (SizePolicy::Assign(layout.Value, value) && SizePolicy::Assign(layout.Instance, instance)
  1356. && SizePolicy::Assign(layout.SlotIndex, slotId))
  1357. {
  1358. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1359. return true;
  1360. }
  1361. return false;
  1362. }
  1363. void ByteCodeWriter::Slot(OpCode op, RegSlot value, RegSlot instance, uint32 slotId)
  1364. {
  1365. CheckOpen();
  1366. CheckOp(op, OpLayoutType::ElementSlot);
  1367. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1368. value = ConsumeReg(value);
  1369. instance = ConsumeReg(instance);
  1370. #if DBG
  1371. switch (op)
  1372. {
  1373. #if ENABLE_NATIVE_CODEGEN
  1374. case OpCode::LdSlotArr:
  1375. case OpCode::StSlot:
  1376. case OpCode::StSlotChkUndecl:
  1377. #endif
  1378. case OpCode::StObjSlot:
  1379. case OpCode::StObjSlotChkUndecl:
  1380. case OpCode::StPropIdArrFromVar:
  1381. break;
  1382. default:
  1383. AssertMsg(false, "The specified OpCode is not intended for slot access");
  1384. break;
  1385. }
  1386. #endif
  1387. MULTISIZE_LAYOUT_WRITE(ElementSlot, op, value, instance, slotId);
  1388. }
  1389. void ByteCodeWriter::Slot(OpCode op, RegSlot value, RegSlot instance, uint32 slotId, ProfileId profileId)
  1390. {
  1391. CheckOpen();
  1392. CheckOp(op, OpLayoutType::ElementSlot);
  1393. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1394. value = ConsumeReg(value);
  1395. instance = ConsumeReg(instance);
  1396. switch (op)
  1397. {
  1398. case OpCode::LdSlot:
  1399. case OpCode::LdObjSlot:
  1400. if ((DoDynamicProfileOpcode(AggressiveIntTypeSpecPhase) || DoDynamicProfileOpcode(FloatTypeSpecPhase)) &&
  1401. profileId != Constants::NoProfileId)
  1402. {
  1403. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1404. }
  1405. break;
  1406. default:
  1407. AssertMsg(false, "The specified OpCode is not intended for slot access");
  1408. break;
  1409. }
  1410. MULTISIZE_LAYOUT_WRITE(ElementSlot, op, value, instance, slotId);
  1411. if (OpCodeAttr::IsProfiledOp(op))
  1412. {
  1413. m_byteCodeData.Encode(&profileId, sizeof(Js::ProfileId));
  1414. }
  1415. }
  1416. template <typename SizePolicy>
  1417. bool ByteCodeWriter::TryWriteElementSlotI1(OpCode op, RegSlot value, uint32 slotId)
  1418. {
  1419. OpLayoutT_ElementSlotI1<SizePolicy> layout;
  1420. if (SizePolicy::Assign(layout.Value, value)
  1421. && SizePolicy::Assign(layout.SlotIndex, slotId))
  1422. {
  1423. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1424. return true;
  1425. }
  1426. return false;
  1427. }
  1428. void ByteCodeWriter::SlotI1(OpCode op, RegSlot value, uint32 slotId)
  1429. {
  1430. CheckOpen();
  1431. CheckOp(op, OpLayoutType::ElementSlotI1);
  1432. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1433. value = ConsumeReg(value);
  1434. #if DBG
  1435. switch (op)
  1436. {
  1437. case OpCode::LdEnvObj:
  1438. case OpCode::LdEnvObj_ReuseLoc:
  1439. case OpCode::StLocalSlot:
  1440. case OpCode::StParamSlot:
  1441. case OpCode::StLocalObjSlot:
  1442. case OpCode::StParamObjSlot:
  1443. case OpCode::StLocalSlotChkUndecl:
  1444. case OpCode::StParamSlotChkUndecl:
  1445. case OpCode::StLocalObjSlotChkUndecl:
  1446. case OpCode::StParamObjSlotChkUndecl:
  1447. {
  1448. break;
  1449. }
  1450. default:
  1451. {
  1452. AssertMsg(false, "The specified OpCode is not intended for slot access");
  1453. break;
  1454. }
  1455. }
  1456. #endif
  1457. MULTISIZE_LAYOUT_WRITE(ElementSlotI1, op, value, slotId);
  1458. }
  1459. void ByteCodeWriter::SlotI1(OpCode op, RegSlot value, uint32 slotId, ProfileId profileId)
  1460. {
  1461. CheckOpen();
  1462. CheckOp(op, OpLayoutType::ElementSlotI1);
  1463. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1464. value = ConsumeReg(value);
  1465. switch (op)
  1466. {
  1467. case OpCode::LdLocalSlot:
  1468. case OpCode::LdParamSlot:
  1469. case OpCode::LdLocalObjSlot:
  1470. case OpCode::LdParamObjSlot:
  1471. if ((DoDynamicProfileOpcode(AggressiveIntTypeSpecPhase) || DoDynamicProfileOpcode(FloatTypeSpecPhase)) &&
  1472. profileId != Constants::NoProfileId)
  1473. {
  1474. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1475. }
  1476. break;
  1477. default:
  1478. {
  1479. AssertMsg(false, "The specified OpCode is not intended for slot access");
  1480. break;
  1481. }
  1482. }
  1483. MULTISIZE_LAYOUT_WRITE(ElementSlotI1, op, value, slotId);
  1484. if (OpCodeAttr::IsProfiledOp(op))
  1485. {
  1486. m_byteCodeData.Encode(&profileId, sizeof(Js::ProfileId));
  1487. }
  1488. }
  1489. template <typename SizePolicy>
  1490. bool ByteCodeWriter::TryWriteElementSlotI2(OpCode op, RegSlot value, uint32 slotId1, uint32 slotId2)
  1491. {
  1492. OpLayoutT_ElementSlotI2<SizePolicy> layout;
  1493. if (SizePolicy::Assign(layout.Value, value)
  1494. && SizePolicy::Assign(layout.SlotIndex1, slotId1)
  1495. && SizePolicy::Assign(layout.SlotIndex2, slotId2))
  1496. {
  1497. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1498. return true;
  1499. }
  1500. return false;
  1501. }
  1502. void ByteCodeWriter::SlotI2(OpCode op, RegSlot value, uint32 slotId1, uint32 slotId2)
  1503. {
  1504. CheckOpen();
  1505. CheckOp(op, OpLayoutType::ElementSlotI2);
  1506. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1507. value = ConsumeReg(value);
  1508. #if DBG
  1509. switch (op)
  1510. {
  1511. case OpCode::StInnerSlot:
  1512. case OpCode::StInnerSlotChkUndecl:
  1513. case OpCode::StInnerObjSlot:
  1514. case OpCode::StInnerObjSlotChkUndecl:
  1515. case OpCode::StEnvSlot:
  1516. case OpCode::StEnvSlotChkUndecl:
  1517. case OpCode::StEnvObjSlot:
  1518. case OpCode::StEnvObjSlotChkUndecl:
  1519. case OpCode::StModuleSlot:
  1520. case OpCode::LdModuleSlot:
  1521. {
  1522. break;
  1523. }
  1524. default:
  1525. {
  1526. AssertMsg(false, "The specified OpCode is not intended for slot access");
  1527. break;
  1528. }
  1529. }
  1530. #endif
  1531. MULTISIZE_LAYOUT_WRITE(ElementSlotI2, op, value, slotId1, slotId2);
  1532. }
  1533. void ByteCodeWriter::SlotI2(OpCode op, RegSlot value, uint32 slotId1, uint32 slotId2, ProfileId profileId)
  1534. {
  1535. CheckOpen();
  1536. CheckOp(op, OpLayoutType::ElementSlotI2);
  1537. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1538. value = ConsumeReg(value);
  1539. switch (op)
  1540. {
  1541. case OpCode::LdInnerSlot:
  1542. case OpCode::LdInnerObjSlot:
  1543. case OpCode::LdEnvSlot:
  1544. case OpCode::LdEnvObjSlot:
  1545. case OpCode::LdModuleSlot:
  1546. if ((DoDynamicProfileOpcode(AggressiveIntTypeSpecPhase) || DoDynamicProfileOpcode(FloatTypeSpecPhase)) &&
  1547. profileId != Constants::NoProfileId)
  1548. {
  1549. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1550. }
  1551. break;
  1552. default:
  1553. {
  1554. AssertMsg(false, "The specified OpCode is not intended for slot access");
  1555. break;
  1556. }
  1557. }
  1558. MULTISIZE_LAYOUT_WRITE(ElementSlotI2, op, value, slotId1, slotId2);
  1559. if (OpCodeAttr::IsProfiledOp(op))
  1560. {
  1561. m_byteCodeData.Encode(&profileId, sizeof(Js::ProfileId));
  1562. }
  1563. }
  1564. template <typename SizePolicy>
  1565. bool ByteCodeWriter::TryWriteElementSlotI3(OpCode op, RegSlot value, RegSlot instance, uint32 slotId, RegSlot homeObj)
  1566. {
  1567. OpLayoutT_ElementSlotI3<SizePolicy> layout;
  1568. if (SizePolicy::Assign(layout.Value, value)
  1569. && SizePolicy::Assign(layout.Instance, instance)
  1570. && SizePolicy::Assign(layout.SlotIndex, slotId)
  1571. && SizePolicy::Assign(layout.HomeObj, homeObj))
  1572. {
  1573. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1574. return true;
  1575. }
  1576. return false;
  1577. }
  1578. template <typename SizePolicy>
  1579. bool ByteCodeWriter::TryWriteElementU(OpCode op, RegSlot instance, PropertyIdIndexType index)
  1580. {
  1581. OpLayoutT_ElementU<SizePolicy> layout;
  1582. if (SizePolicy::Assign(layout.Instance, instance) && SizePolicy::Assign(layout.PropertyIdIndex, index))
  1583. {
  1584. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1585. return true;
  1586. }
  1587. return false;
  1588. }
  1589. void ByteCodeWriter::ElementU(OpCode op, RegSlot instance, PropertyIdIndexType index)
  1590. {
  1591. CheckOpen();
  1592. CheckOp(op, OpLayoutType::ElementU);
  1593. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1594. instance = ConsumeReg(instance);
  1595. MULTISIZE_LAYOUT_WRITE(ElementU, op, instance, index);
  1596. }
  1597. template <typename SizePolicy>
  1598. bool ByteCodeWriter::TryWriteElementScopedU(OpCode op, PropertyIdIndexType index)
  1599. {
  1600. OpLayoutT_ElementScopedU<SizePolicy> layout;
  1601. if (SizePolicy::Assign(layout.PropertyIdIndex, index))
  1602. {
  1603. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1604. return true;
  1605. }
  1606. return false;
  1607. }
  1608. void ByteCodeWriter::ElementScopedU(OpCode op, PropertyIdIndexType index)
  1609. {
  1610. CheckOpen();
  1611. CheckOp(op, OpLayoutType::ElementScopedU);
  1612. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1613. MULTISIZE_LAYOUT_WRITE(ElementScopedU, op, index);
  1614. }
  1615. template <typename SizePolicy>
  1616. bool ByteCodeWriter::TryWriteElementRootU(OpCode op, PropertyIdIndexType index)
  1617. {
  1618. OpLayoutT_ElementRootU<SizePolicy> layout;
  1619. if (SizePolicy::Assign(layout.PropertyIdIndex, index))
  1620. {
  1621. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1622. return true;
  1623. }
  1624. return false;
  1625. }
  1626. void ByteCodeWriter::ElementRootU(OpCode op, PropertyIdIndexType index)
  1627. {
  1628. CheckOpen();
  1629. CheckOp(op, OpLayoutType::ElementRootU);
  1630. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1631. MULTISIZE_LAYOUT_WRITE(ElementRootU, op, index);
  1632. }
  1633. template <typename SizePolicy>
  1634. bool ByteCodeWriter::TryWriteElementRootCP(OpCode op, RegSlot value, uint cacheId, bool isLoadMethod, bool isStore)
  1635. {
  1636. Assert(!isLoadMethod || !isStore);
  1637. OpLayoutT_ElementRootCP<SizePolicy> layout;
  1638. if (SizePolicy::Assign(layout.Value, value) && SizePolicy::Assign(layout.inlineCacheIndex, cacheId))
  1639. {
  1640. size_t offset = m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1641. Assert(m_byteCodeData.GetCurrentOffset() == offset + OpCodeUtil::EncodedSize(op, SizePolicy::LayoutEnum) + sizeof(OpLayoutT_ElementRootCP<SizePolicy>));
  1642. size_t inlineCacheOffset = offset + OpCodeUtil::EncodedSize(op, SizePolicy::LayoutEnum)
  1643. + offsetof(OpLayoutT_ElementRootCP<SizePolicy>, inlineCacheIndex);
  1644. // Root object inline cache index are given out from 0, but it will be at index after
  1645. // all the plain inline cache. Store the offset of the inline cache index to patch it up later.
  1646. SListBase<size_t> * rootObjectInlineCacheOffsets = isStore ?
  1647. &rootObjectStoreInlineCacheOffsets : isLoadMethod ? &rootObjectLoadMethodInlineCacheOffsets : &rootObjectLoadInlineCacheOffsets;
  1648. rootObjectInlineCacheOffsets->Prepend(this->m_labelOffsets->GetAllocator(), inlineCacheOffset);
  1649. return true;
  1650. }
  1651. return false;
  1652. }
  1653. void ByteCodeWriter::PatchableRootProperty(OpCode op, RegSlot value, uint cacheId, bool isLoadMethod, bool isStore, bool registerCacheIdForCall)
  1654. {
  1655. CheckOpen();
  1656. CheckOp(op, OpLayoutType::ElementRootCP);
  1657. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1658. Assert(!isLoadMethod || !isStore);
  1659. value = ConsumeReg(value);
  1660. switch (op)
  1661. {
  1662. case OpCode::LdRootFld:
  1663. case OpCode::LdRootFldForTypeOf:
  1664. if (DoDynamicProfileOpcode(AggressiveIntTypeSpecPhase) ||
  1665. DoDynamicProfileOpcode(FloatTypeSpecPhase) ||
  1666. DoDynamicProfileOpcode(ObjTypeSpecPhase) ||
  1667. DoDynamicProfileOpcode(InlinePhase) ||
  1668. DoDynamicProfileOpcode(ProfileBasedFldFastPathPhase))
  1669. {
  1670. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1671. }
  1672. break;
  1673. case OpCode::LdRootMethodFld:
  1674. if (registerCacheIdForCall)
  1675. {
  1676. CacheIdUnit unit(cacheId, true);
  1677. Assert(!callRegToLdFldCacheIndexMap->TryGetValue(value, &unit));
  1678. callRegToLdFldCacheIndexMap->Add(value, unit);
  1679. }
  1680. case OpCode::StRootFld:
  1681. case OpCode::StRootFldStrict:
  1682. case OpCode::InitRootFld:
  1683. if (DoDynamicProfileOpcode(ProfileBasedFldFastPathPhase) ||
  1684. DoDynamicProfileOpcode(InlinePhase) ||
  1685. DoDynamicProfileOpcode(ObjTypeSpecPhase))
  1686. {
  1687. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1688. }
  1689. break;
  1690. case OpCode::InitRootLetFld:
  1691. case OpCode::InitRootConstFld:
  1692. break;
  1693. default:
  1694. AssertMsg(false, "The specified OpCode is not intended for patchable root field-access");
  1695. break;
  1696. }
  1697. MULTISIZE_LAYOUT_WRITE(ElementRootCP, op, value, cacheId, isLoadMethod, isStore);
  1698. }
  1699. template <typename SizePolicy>
  1700. bool ByteCodeWriter::TryWriteElementP(OpCode op, RegSlot value, CacheId cacheId)
  1701. {
  1702. OpLayoutT_ElementP<SizePolicy> layout;
  1703. if (SizePolicy::Assign(layout.Value, value)
  1704. && SizePolicy::Assign(layout.inlineCacheIndex, cacheId))
  1705. {
  1706. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1707. return true;
  1708. }
  1709. return false;
  1710. }
  1711. void ByteCodeWriter::ElementP(OpCode op, RegSlot value, uint cacheId, bool isCtor, bool registerCacheIdForCall)
  1712. {
  1713. CheckOpen();
  1714. CheckOp(op, OpLayoutType::ElementP);
  1715. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1716. value = ConsumeReg(value);
  1717. switch (op)
  1718. {
  1719. case OpCode::ScopedLdFld:
  1720. case OpCode::ScopedLdFldForTypeOf:
  1721. case OpCode::ScopedStFld:
  1722. case OpCode::ConsoleScopedStFld:
  1723. case OpCode::ScopedStFldStrict:
  1724. case OpCode::ConsoleScopedStFldStrict:
  1725. break;
  1726. case OpCode::LdLocalFld:
  1727. case OpCode::LdLocalFld_ReuseLoc:
  1728. if (isCtor) // The symbol loaded by this LdFld will be used as a constructor
  1729. {
  1730. if (registerCacheIdForCall)
  1731. {
  1732. CacheIdUnit unit(cacheId);
  1733. Assert(!callRegToLdFldCacheIndexMap->TryGetValue(value, &unit));
  1734. callRegToLdFldCacheIndexMap->Add(value, unit);
  1735. }
  1736. }
  1737. if (DoDynamicProfileOpcode(AggressiveIntTypeSpecPhase) ||
  1738. DoDynamicProfileOpcode(FloatTypeSpecPhase) ||
  1739. DoDynamicProfileOpcode(ObjTypeSpecPhase) ||
  1740. DoDynamicProfileOpcode(InlinePhase) ||
  1741. DoDynamicProfileOpcode(ProfileBasedFldFastPathPhase))
  1742. {
  1743. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1744. }
  1745. break;
  1746. case OpCode::LdLocalMethodFld:
  1747. if (registerCacheIdForCall)
  1748. {
  1749. CacheIdUnit unit(cacheId);
  1750. Assert(!callRegToLdFldCacheIndexMap->TryGetValue(value, &unit));
  1751. callRegToLdFldCacheIndexMap->Add(value, unit);
  1752. }
  1753. // fall-through
  1754. case OpCode::StLocalFld:
  1755. case OpCode::InitLocalFld:
  1756. if (DoDynamicProfileOpcode(ProfileBasedFldFastPathPhase) ||
  1757. DoDynamicProfileOpcode(InlinePhase) ||
  1758. DoDynamicProfileOpcode(ObjTypeSpecPhase))
  1759. {
  1760. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1761. }
  1762. break;
  1763. case OpCode::InitLocalLetFld:
  1764. case OpCode::InitUndeclLocalLetFld:
  1765. case OpCode::InitUndeclLocalConstFld:
  1766. break;
  1767. default:
  1768. AssertMsg(false, "The specified OpCode not intended for base-less patchable field access");
  1769. break;
  1770. }
  1771. MULTISIZE_LAYOUT_WRITE(ElementP, op, value, cacheId);
  1772. }
  1773. template <typename SizePolicy>
  1774. bool ByteCodeWriter::TryWriteElementPIndexed(OpCode op, RegSlot value, uint32 scopeIndex, CacheId cacheId)
  1775. {
  1776. OpLayoutT_ElementPIndexed<SizePolicy> layout;
  1777. if (SizePolicy::Assign(layout.Value, value)
  1778. && SizePolicy::Assign(layout.inlineCacheIndex, cacheId)
  1779. && SizePolicy::Assign(layout.scopeIndex, scopeIndex))
  1780. {
  1781. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1782. return true;
  1783. }
  1784. return false;
  1785. }
  1786. void ByteCodeWriter::ElementPIndexed(OpCode op, RegSlot value, uint32 scopeIndex, uint cacheId)
  1787. {
  1788. CheckOpen();
  1789. CheckOp(op, OpLayoutType::ElementPIndexed);
  1790. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1791. value = ConsumeReg(value);
  1792. switch (op)
  1793. {
  1794. case OpCode::InitInnerFld:
  1795. case OpCode::InitInnerLetFld:
  1796. case OpCode::InitUndeclLetFld:
  1797. case OpCode::InitUndeclConstFld:
  1798. break;
  1799. break;
  1800. default:
  1801. AssertMsg(false, "The specified OpCode not intended for base-less patchable inner field access");
  1802. break;
  1803. }
  1804. MULTISIZE_LAYOUT_WRITE(ElementPIndexed, op, value, scopeIndex, cacheId);
  1805. }
  1806. template <typename SizePolicy>
  1807. bool ByteCodeWriter::TryWriteElementCP(OpCode op, RegSlot value, RegSlot instance, CacheId cacheId)
  1808. {
  1809. OpLayoutT_ElementCP<SizePolicy> layout;
  1810. if (SizePolicy::Assign(layout.Value, value) && SizePolicy::Assign(layout.Instance, instance)
  1811. && SizePolicy::Assign(layout.inlineCacheIndex, cacheId))
  1812. {
  1813. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1814. return true;
  1815. }
  1816. return false;
  1817. }
  1818. void ByteCodeWriter::PatchableProperty(OpCode op, RegSlot value, RegSlot instance, uint cacheId, bool isCtor, bool registerCacheIdForCall)
  1819. {
  1820. CheckOpen();
  1821. CheckOp(op, OpLayoutType::ElementCP);
  1822. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1823. value = ConsumeReg(value);
  1824. instance = ConsumeReg(instance);
  1825. bool isProfiled = false;
  1826. Js::ProfileId profileId = Js::Constants::NoProfileId;
  1827. switch (op)
  1828. {
  1829. case OpCode::LdLen_A:
  1830. {
  1831. if ((DoDynamicProfileOpcode(AggressiveIntTypeSpecPhase) ||
  1832. DoDynamicProfileOpcode(FloatTypeSpecPhase) ||
  1833. DoDynamicProfileOpcode(TypedArrayTypeSpecPhase) ||
  1834. DoDynamicProfileOpcode(ArrayCheckHoistPhase) ||
  1835. DoDynamicProfileOpcode(ObjTypeSpecPhase) ||
  1836. DoDynamicProfileOpcode(InlinePhase) ||
  1837. DoDynamicProfileOpcode(ProfileBasedFldFastPathPhase))
  1838. && this->m_functionWrite->AllocProfiledLdLenId(&profileId))
  1839. {
  1840. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1841. isProfiled = true;
  1842. }
  1843. break;
  1844. }
  1845. case OpCode::LdFldForTypeOf:
  1846. case OpCode::LdFld:
  1847. case OpCode::LdFld_ReuseLoc:
  1848. if (isCtor) // The symbol loaded by this LdFld will be used as a constructor
  1849. {
  1850. if (registerCacheIdForCall)
  1851. {
  1852. CacheIdUnit unit(cacheId);
  1853. Assert(!callRegToLdFldCacheIndexMap->TryGetValue(value, &unit));
  1854. callRegToLdFldCacheIndexMap->Add(value, unit);
  1855. }
  1856. }
  1857. case OpCode::LdFldForCallApplyTarget:
  1858. if (DoDynamicProfileOpcode(AggressiveIntTypeSpecPhase) ||
  1859. DoDynamicProfileOpcode(FloatTypeSpecPhase) ||
  1860. DoDynamicProfileOpcode(ObjTypeSpecPhase) ||
  1861. DoDynamicProfileOpcode(InlinePhase) ||
  1862. DoDynamicProfileOpcode(ProfileBasedFldFastPathPhase))
  1863. {
  1864. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1865. }
  1866. break;
  1867. case OpCode::LdMethodFld:
  1868. if (registerCacheIdForCall)
  1869. {
  1870. CacheIdUnit unit(cacheId);
  1871. Assert(!callRegToLdFldCacheIndexMap->TryGetValue(value, &unit));
  1872. callRegToLdFldCacheIndexMap->Add(value, unit);
  1873. }
  1874. // fall-through
  1875. case OpCode::StFld:
  1876. case OpCode::StFldStrict:
  1877. case OpCode::InitFld:
  1878. if (DoDynamicProfileOpcode(ProfileBasedFldFastPathPhase) ||
  1879. DoDynamicProfileOpcode(InlinePhase) ||
  1880. DoDynamicProfileOpcode(ObjTypeSpecPhase))
  1881. {
  1882. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1883. }
  1884. break;
  1885. case OpCode::InitLetFld:
  1886. case OpCode::InitConstFld:
  1887. case OpCode::InitClassMember:
  1888. case OpCode::ScopedLdMethodFld:
  1889. break;
  1890. default:
  1891. AssertMsg(false, "The specified OpCode is not intended for patchable field-access");
  1892. break;
  1893. }
  1894. MULTISIZE_LAYOUT_WRITE(ElementCP, op, value, instance, cacheId);
  1895. if (isProfiled)
  1896. {
  1897. m_byteCodeData.Encode(&profileId, sizeof(Js::ProfileId));
  1898. }
  1899. }
  1900. template <typename SizePolicy>
  1901. bool ByteCodeWriter::TryWriteElementC2(OpCode op, RegSlot value, RegSlot instance, PropertyIdIndexType propertyIdIndex, RegSlot value2)
  1902. {
  1903. OpLayoutT_ElementC2<SizePolicy> layout;
  1904. if (SizePolicy::Assign(layout.Value, value) && SizePolicy::Assign(layout.Instance, instance)
  1905. && SizePolicy::Assign(layout.PropertyIdIndex, propertyIdIndex) && SizePolicy::Assign(layout.Value2, value2))
  1906. {
  1907. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1908. return true;
  1909. }
  1910. return false;
  1911. }
  1912. void ByteCodeWriter::PatchablePropertyWithThisPtr(OpCode op, RegSlot value, RegSlot instance, RegSlot thisInstance, uint cacheId, bool isCtor, bool registerCacheIdForCall)
  1913. {
  1914. CheckOpen();
  1915. CheckOp(op, OpLayoutType::ElementC2);
  1916. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1917. value = ConsumeReg(value);
  1918. instance = ConsumeReg(instance);
  1919. thisInstance = ConsumeReg(thisInstance);
  1920. switch (op)
  1921. {
  1922. case OpCode::LdSuperFld:
  1923. if (isCtor) // The symbol loaded by this LdSuperFld will be used as a constructor
  1924. {
  1925. if (registerCacheIdForCall)
  1926. {
  1927. CacheIdUnit unit(cacheId);
  1928. Assert(!callRegToLdFldCacheIndexMap->TryGetValue(value, &unit));
  1929. callRegToLdFldCacheIndexMap->Add(value, unit);
  1930. }
  1931. }
  1932. if (DoDynamicProfileOpcode(AggressiveIntTypeSpecPhase) ||
  1933. DoDynamicProfileOpcode(FloatTypeSpecPhase) ||
  1934. DoDynamicProfileOpcode(ObjTypeSpecPhase) ||
  1935. DoDynamicProfileOpcode(InlinePhase) ||
  1936. DoDynamicProfileOpcode(ProfileBasedFldFastPathPhase))
  1937. {
  1938. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1939. }
  1940. break;
  1941. case OpCode::StSuperFld:
  1942. case OpCode::StSuperFldStrict:
  1943. if (DoDynamicProfileOpcode(ProfileBasedFldFastPathPhase) ||
  1944. DoDynamicProfileOpcode(InlinePhase) ||
  1945. DoDynamicProfileOpcode(ObjTypeSpecPhase))
  1946. {
  1947. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  1948. }
  1949. break;
  1950. default:
  1951. AssertMsg(false, "The specified OpCode is not intended for patchable super field-access");
  1952. break;
  1953. }
  1954. MULTISIZE_LAYOUT_WRITE(ElementC2, op, value, instance, cacheId, thisInstance);
  1955. }
  1956. template <typename SizePolicy>
  1957. bool ByteCodeWriter::TryWriteElementScopedC2(OpCode op, RegSlot value, PropertyIdIndexType propertyIdIndex, RegSlot value2)
  1958. {
  1959. OpLayoutT_ElementScopedC2<SizePolicy> layout;
  1960. if (SizePolicy::Assign(layout.Value, value)
  1961. && SizePolicy::Assign(layout.PropertyIdIndex, propertyIdIndex) && SizePolicy::Assign(layout.Value2, value2))
  1962. {
  1963. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  1964. return true;
  1965. }
  1966. return false;
  1967. }
  1968. void ByteCodeWriter::ScopedProperty2(OpCode op, RegSlot value, PropertyIdIndexType propertyIdIndex, RegSlot value2)
  1969. {
  1970. CheckOpen();
  1971. CheckOp(op, OpLayoutType::ElementScopedC2);
  1972. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1973. value = ConsumeReg(value);
  1974. value2 = ConsumeReg(value2);
  1975. switch (op)
  1976. {
  1977. case OpCode::ScopedLdInst:
  1978. break;
  1979. default:
  1980. AssertMsg(false, "The specified OpCode is not intended for field-access with a second instance");
  1981. break;
  1982. }
  1983. MULTISIZE_LAYOUT_WRITE(ElementScopedC2, op, value, propertyIdIndex, value2);
  1984. }
  1985. void ByteCodeWriter::Reg2U(OpCode op, RegSlot R0, RegSlot R1, uint index)
  1986. {
  1987. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  1988. CheckOp(op, OpLayoutType::Reg2U);
  1989. CheckOpen();
  1990. R0 = ConsumeReg(R0);
  1991. R1 = ConsumeReg(R1);
  1992. MULTISIZE_LAYOUT_WRITE(Reg2U, op, R0, R1, index);
  1993. }
  1994. template <typename SizePolicy>
  1995. bool ByteCodeWriter::TryWriteReg3U(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, uint index)
  1996. {
  1997. OpLayoutT_Reg3U<SizePolicy> layout;
  1998. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.R2, R2) && SizePolicy::Assign(layout.SlotIndex, index))
  1999. {
  2000. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  2001. return true;
  2002. }
  2003. return false;
  2004. }
  2005. void ByteCodeWriter::Reg3U(OpCode op, RegSlot R0, RegSlot R1, RegSlot R2, uint index)
  2006. {
  2007. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  2008. CheckOp(op, OpLayoutType::Reg3U);
  2009. CheckOpen();
  2010. R0 = ConsumeReg(R0);
  2011. R1 = ConsumeReg(R1);
  2012. R2 = ConsumeReg(R2);
  2013. MULTISIZE_LAYOUT_WRITE(Reg3U, op, R0, R1, R2, index);
  2014. }
  2015. void ByteCodeWriter::NewFunction(RegSlot destinationRegister, uint index, bool isGenerator, RegSlot homeObjLocation)
  2016. {
  2017. CheckOpen();
  2018. bool hasHomeObj = homeObjLocation != Js::Constants::NoRegister;
  2019. destinationRegister = ConsumeReg(destinationRegister);
  2020. OpCode opcode = OpCode::NewScFunc;
  2021. if (isGenerator)
  2022. {
  2023. opcode = hasHomeObj ? OpCode::NewScGenFuncHomeObj : OpCode::NewScGenFunc;
  2024. }
  2025. else if (this->m_functionWrite->DoStackNestedFunc())
  2026. {
  2027. Assert(!hasHomeObj);
  2028. opcode = OpCode::NewStackScFunc;
  2029. }
  2030. else if (hasHomeObj)
  2031. {
  2032. opcode = OpCode::NewScFuncHomeObj;
  2033. }
  2034. Assert(OpCodeAttr::HasMultiSizeLayout(opcode));
  2035. if (hasHomeObj)
  2036. {
  2037. homeObjLocation = ConsumeReg(homeObjLocation);
  2038. MULTISIZE_LAYOUT_WRITE(ElementSlot, opcode, destinationRegister, homeObjLocation, index);
  2039. }
  2040. else
  2041. {
  2042. MULTISIZE_LAYOUT_WRITE(ElementSlotI1, opcode, destinationRegister, index);
  2043. }
  2044. }
  2045. void ByteCodeWriter::NewInnerFunction(RegSlot destinationRegister, uint index, RegSlot environmentRegister, bool isGenerator, RegSlot homeObjLocation)
  2046. {
  2047. CheckOpen();
  2048. bool hasHomeObj = homeObjLocation != Js::Constants::NoRegister;
  2049. destinationRegister = ConsumeReg(destinationRegister);
  2050. environmentRegister = ConsumeReg(environmentRegister);
  2051. OpCode opcode = OpCode::NewInnerScFunc;
  2052. if (isGenerator)
  2053. {
  2054. opcode = hasHomeObj ? OpCode::NewInnerScGenFuncHomeObj : OpCode::NewInnerScGenFunc;
  2055. }
  2056. else if (this->m_functionWrite->DoStackNestedFunc())
  2057. {
  2058. Assert(!hasHomeObj);
  2059. opcode = OpCode::NewInnerStackScFunc;
  2060. }
  2061. else if (hasHomeObj)
  2062. {
  2063. opcode = OpCode::NewInnerScFuncHomeObj;
  2064. }
  2065. Assert(OpCodeAttr::HasMultiSizeLayout(opcode));
  2066. if (hasHomeObj)
  2067. {
  2068. homeObjLocation = ConsumeReg(homeObjLocation);
  2069. MULTISIZE_LAYOUT_WRITE(ElementSlotI3, opcode, destinationRegister, environmentRegister, index, homeObjLocation);
  2070. }
  2071. else
  2072. {
  2073. MULTISIZE_LAYOUT_WRITE(ElementSlot, opcode, destinationRegister, environmentRegister, index);
  2074. }
  2075. }
  2076. template <typename SizePolicy>
  2077. bool ByteCodeWriter::TryWriteReg1Unsigned1(OpCode op, RegSlot R0, uint C1)
  2078. {
  2079. OpLayoutT_Reg1Unsigned1<SizePolicy> layout;
  2080. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.C1, C1))
  2081. {
  2082. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  2083. return true;
  2084. }
  2085. return false;
  2086. }
  2087. void ByteCodeWriter::Reg1Unsigned1(OpCode op, RegSlot R0, uint C1)
  2088. {
  2089. CheckOpen();
  2090. CheckOp(op, OpLayoutType::Reg1Unsigned1);
  2091. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  2092. R0 = ConsumeReg(R0);
  2093. ProfileId profileId = Constants::NoProfileId;
  2094. bool isProfiled = (DoProfileNewScArrayOp(op) &&
  2095. DoDynamicProfileOpcode(NativeArrayPhase, true) &&
  2096. this->m_functionWrite->AllocProfiledArrayCallSiteId(&profileId))
  2097. || (op == OpCode::InitForInEnumerator &&
  2098. this->m_functionWrite->AllocProfiledForInLoopCount(&profileId));
  2099. if (isProfiled)
  2100. {
  2101. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  2102. }
  2103. MULTISIZE_LAYOUT_WRITE(Reg1Unsigned1, op, R0, C1);
  2104. if (isProfiled)
  2105. {
  2106. m_byteCodeData.Encode(&profileId, sizeof(Js::ProfileId));
  2107. }
  2108. }
  2109. void ByteCodeWriter::W1(OpCode op, ushort C1)
  2110. {
  2111. CheckOpen();
  2112. CheckOp(op, OpLayoutType::W1);
  2113. Assert(!OpCodeAttr::HasMultiSizeLayout(op));
  2114. OpLayoutW1 data;
  2115. data.C1 = C1;
  2116. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  2117. }
  2118. template <typename SizePolicy>
  2119. bool ByteCodeWriter::TryWriteReg2Int1(OpCode op, RegSlot R0, RegSlot R1, int C1)
  2120. {
  2121. OpLayoutT_Reg2Int1<SizePolicy> layout;
  2122. if (SizePolicy::Assign(layout.R0, R0) && SizePolicy::Assign(layout.R1, R1) && SizePolicy::Assign(layout.C1, C1))
  2123. {
  2124. m_byteCodeData.EncodeT<SizePolicy::LayoutEnum>(op, &layout, sizeof(layout), this);
  2125. return true;
  2126. }
  2127. return false;
  2128. }
  2129. void ByteCodeWriter::Reg2Int1(OpCode op, RegSlot R0, RegSlot R1, int C1)
  2130. {
  2131. CheckOpen();
  2132. CheckOp(op, OpLayoutType::Reg2Int1);
  2133. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  2134. if (DoDynamicProfileOpcode(CheckThisPhase) ||
  2135. DoDynamicProfileOpcode(TypedArrayTypeSpecPhase) ||
  2136. DoDynamicProfileOpcode(ArrayCheckHoistPhase))
  2137. {
  2138. if (op == OpCode::LdThis)
  2139. {
  2140. op = OpCode::ProfiledLdThis;
  2141. }
  2142. }
  2143. R0 = ConsumeReg(R0);
  2144. R1 = ConsumeReg(R1);
  2145. MULTISIZE_LAYOUT_WRITE(Reg2Int1, op, R0, R1, C1);
  2146. }
  2147. void ByteCodeWriter::Num3(OpCode op, RegSlot C0, RegSlot C1, RegSlot C2)
  2148. {
  2149. CheckOpen();
  2150. CheckOp(op, OpLayoutType::Reg3);
  2151. Assert(OpCodeAttr::HasMultiSizeLayout(op));
  2152. MULTISIZE_LAYOUT_WRITE(Reg3, op, C0, C1, C2);
  2153. }
  2154. int ByteCodeWriter::AuxNoReg(OpCode op, const void* buffer, int byteCount, int C1)
  2155. {
  2156. CheckOpen();
  2157. //
  2158. // Write the buffer's contents
  2159. //
  2160. int currentOffset = InsertAuxiliaryData(buffer, byteCount);
  2161. //
  2162. // Write OpCode to create new auxiliary data
  2163. //
  2164. OpLayoutAuxNoReg data;
  2165. data.Offset = currentOffset;
  2166. data.C1 = C1;
  2167. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  2168. return currentOffset;
  2169. }
  2170. void ByteCodeWriter::AuxNoReg(OpCode op, uint byteOffset, int C1)
  2171. {
  2172. CheckOpen();
  2173. //
  2174. // Write the buffer's contents
  2175. //
  2176. Assert(byteOffset < m_auxiliaryData.GetCurrentOffset());
  2177. OpLayoutAuxNoReg data;
  2178. data.Offset = byteOffset;
  2179. data.C1 = C1;
  2180. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  2181. }
  2182. int ByteCodeWriter::Auxiliary(OpCode op, RegSlot destinationRegister, const void* buffer, int byteCount, int C1)
  2183. {
  2184. CheckOpen();
  2185. destinationRegister = ConsumeReg(destinationRegister);
  2186. //
  2187. // Write the buffer's contents
  2188. //
  2189. int currentOffset = InsertAuxiliaryData(buffer, byteCount);
  2190. //
  2191. // Write OpCode to create new auxiliary data
  2192. //
  2193. ProfileId profileId = Constants::NoProfileId;
  2194. if (DoProfileNewScArrayOp(op) &&
  2195. DoDynamicProfileOpcode(NativeArrayPhase, true) &&
  2196. this->m_functionWrite->AllocProfiledArrayCallSiteId(&profileId))
  2197. {
  2198. OpCodeUtil::ConvertNonCallOpToProfiled(op);
  2199. OpLayoutDynamicProfile<OpLayoutAuxiliary> data;
  2200. data.R0 = destinationRegister;
  2201. data.Offset = currentOffset;
  2202. data.C1 = C1;
  2203. data.profileId = profileId;
  2204. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  2205. }
  2206. else
  2207. {
  2208. OpLayoutAuxiliary data;
  2209. data.R0 = destinationRegister;
  2210. data.Offset = currentOffset;
  2211. data.C1 = C1;
  2212. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  2213. }
  2214. return currentOffset;
  2215. }
  2216. void ByteCodeWriter::Auxiliary(OpCode op, RegSlot destinationRegister, uint byteOffset, int C1)
  2217. {
  2218. CheckOpen();
  2219. destinationRegister = ConsumeReg(destinationRegister);
  2220. //
  2221. // Write the buffer's contents
  2222. //
  2223. Assert(byteOffset < m_auxiliaryData.GetCurrentOffset());
  2224. OpLayoutAuxiliary data;
  2225. data.R0 = destinationRegister;
  2226. data.Offset = byteOffset;
  2227. data.C1 = C1;
  2228. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  2229. }
  2230. int ByteCodeWriter::Reg2Aux(OpCode op, RegSlot R0, RegSlot R1, const void* buffer, int byteCount, int C1)
  2231. {
  2232. CheckOpen();
  2233. R0 = ConsumeReg(R0);
  2234. R1 = ConsumeReg(R1);
  2235. //
  2236. // Write the buffer's contents
  2237. //
  2238. int currentOffset = InsertAuxiliaryData(buffer, byteCount);
  2239. //
  2240. // Write OpCode to create new auxiliary data
  2241. //
  2242. OpLayoutReg2Aux data;
  2243. data.R0 = R0;
  2244. data.R1 = R1;
  2245. data.Offset = currentOffset;
  2246. data.C1 = C1;
  2247. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  2248. return currentOffset;
  2249. }
  2250. void ByteCodeWriter::Reg2Aux(OpCode op, RegSlot R0, RegSlot R1, uint byteOffset, int C1)
  2251. {
  2252. CheckOpen();
  2253. R0 = ConsumeReg(R0);
  2254. R1 = ConsumeReg(R1);
  2255. //
  2256. // Write the buffer's contents
  2257. //
  2258. Assert(byteOffset < m_auxiliaryData.GetCurrentOffset());
  2259. OpLayoutReg2Aux data;
  2260. data.R0 = R0;
  2261. data.R1 = R1;
  2262. data.Offset = byteOffset;
  2263. data.C1 = C1;
  2264. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  2265. }
  2266. void ByteCodeWriter::AuxiliaryContext(OpCode op, RegSlot destinationRegister, const void* buffer, int byteCount, Js::RegSlot C1)
  2267. {
  2268. CheckOpen();
  2269. destinationRegister = ConsumeReg(destinationRegister);
  2270. C1 = ConsumeReg(C1);
  2271. //
  2272. // Write the buffer's contents
  2273. //
  2274. int currentOffset = m_auxContextData.GetCurrentOffset();
  2275. if (byteCount > 0)
  2276. {
  2277. m_auxContextData.Encode(buffer, byteCount);
  2278. }
  2279. //
  2280. // Write OpCode to create new auxiliary data
  2281. //
  2282. OpLayoutAuxiliary data;
  2283. data.R0 = destinationRegister;
  2284. data.Offset = currentOffset;
  2285. data.C1 = C1;
  2286. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  2287. }
  2288. uint ByteCodeWriter::InsertAuxiliaryData(const void* buffer, uint byteCount)
  2289. {
  2290. uint offset = m_auxiliaryData.GetCurrentOffset();
  2291. if (byteCount > 0)
  2292. {
  2293. m_auxiliaryData.Encode(buffer, byteCount);
  2294. }
  2295. return offset;
  2296. }
  2297. ByteCodeLabel ByteCodeWriter::DefineLabel()
  2298. {
  2299. #if defined(TARGET_64)
  2300. if (m_labelOffsets->Count() == INT_MAX)
  2301. {
  2302. // Reach our limit
  2303. Js::Throw::OutOfMemory();
  2304. }
  2305. #else
  2306. // 32-bit machine don't have enough address space to get to INT_MAX
  2307. Assert(m_labelOffsets->Count() < INT_MAX);
  2308. #endif
  2309. //
  2310. // Allocate a new label:
  2311. // - All label locations start as "undefined: -1". Once the label's location is marked in
  2312. // the byte-code, this will be updated.
  2313. //
  2314. return (ByteCodeLabel)m_labelOffsets->Add(UINT_MAX);
  2315. }
  2316. void ByteCodeWriter::MarkLabel(ByteCodeLabel labelID)
  2317. {
  2318. CheckOpen();
  2319. CheckLabel(labelID);
  2320. #ifdef BYTECODE_BRANCH_ISLAND
  2321. if (useBranchIsland)
  2322. {
  2323. // If we are going to emit a branch island, it should be before the label.
  2324. EnsureLongBranch(Js::OpCode::Label);
  2325. }
  2326. #endif
  2327. //
  2328. // Define the label as the current offset within the byte-code.
  2329. //
  2330. AssertMsg(m_labelOffsets->Item(labelID) == UINT_MAX, "A label may only be defined at one location");
  2331. m_labelOffsets->SetExistingItem(labelID, m_byteCodeData.GetCurrentOffset());
  2332. }
  2333. void ByteCodeWriter::AddJumpOffset(Js::OpCode op, ByteCodeLabel labelId, uint fieldByteOffsetFromEnd) // Offset of "Offset" field in OpLayout, in bytes
  2334. {
  2335. AssertMsg(fieldByteOffsetFromEnd < 100, "Ensure valid field offset");
  2336. CheckOpen();
  2337. CheckLabel(labelId);
  2338. uint jumpByteOffset = m_byteCodeData.GetCurrentOffset() - fieldByteOffsetFromEnd;
  2339. #ifdef BYTECODE_BRANCH_ISLAND
  2340. if (useBranchIsland)
  2341. {
  2342. // Any Jump might need a long jump, account for that emit the branch island earlier.
  2343. // Even if it is a back edge and we are going to emit a long jump, we will still
  2344. // emit a branch around any way.
  2345. this->nextBranchIslandOffset -= LongBranchSize;
  2346. uint labelOffset = m_labelOffsets->Item(labelId);
  2347. if (labelOffset != UINT_MAX)
  2348. {
  2349. // Back branch, see if it needs to be long
  2350. Assert(labelOffset < m_byteCodeData.GetCurrentOffset());
  2351. LongJumpOffset jumpOffset = labelOffset - m_byteCodeData.GetCurrentOffset();
  2352. if (jumpOffset < -GetBranchLimit())
  2353. {
  2354. // Create the long jump label and add the original jump offset to the list first
  2355. ByteCodeLabel longJumpLabel = this->DefineLabel();
  2356. JumpInfo jumpInfo = { longJumpLabel, jumpByteOffset };
  2357. m_jumpOffsets->Add(jumpInfo);
  2358. // Emit the jump around (if necessary)
  2359. ByteCodeLabel jumpAroundLabel = (ByteCodeLabel)-1;
  2360. if (OpCodeAttr::HasFallThrough(op))
  2361. {
  2362. // emit jump around.
  2363. jumpAroundLabel = this->DefineLabel();
  2364. this->Br(jumpAroundLabel);
  2365. }
  2366. // emit the long jump
  2367. this->MarkLabel(longJumpLabel);
  2368. this->BrLong(Js::OpCode::BrLong, labelId);
  2369. if (jumpAroundLabel != (ByteCodeLabel)-1)
  2370. {
  2371. this->MarkLabel(jumpAroundLabel);
  2372. }
  2373. return;
  2374. }
  2375. }
  2376. }
  2377. #endif
  2378. //
  2379. // Branch targets are created in two passes:
  2380. // - In the instruction stream, write "labelID" into "OpLayoutBrC.Offset". Record this
  2381. // location in "m_jumpOffsets" to be patched later.
  2382. // - When the byte-code is closed, update all "OpLayoutBrC.Offset"'s with their actual
  2383. // destinations.
  2384. //
  2385. JumpInfo jumpInfo = { labelId, jumpByteOffset };
  2386. m_jumpOffsets->Add(jumpInfo);
  2387. }
  2388. #ifdef BYTECODE_BRANCH_ISLAND
  2389. int32 ByteCodeWriter::GetBranchLimit()
  2390. {
  2391. #ifdef BYTECODE_TESTING
  2392. if (Js::Configuration::Global.flags.IsEnabled(Js::ByteCodeBranchLimitFlag))
  2393. {
  2394. // minimum 64
  2395. return min(max(Js::Configuration::Global.flags.ByteCodeBranchLimit, 64), SHRT_MAX + 1);
  2396. }
  2397. #endif
  2398. return SHRT_MAX + 1;
  2399. }
  2400. void ByteCodeWriter::AddLongJumpOffset(ByteCodeLabel labelId, uint fieldByteOffsetFromEnd) // Offset of "Offset" field in OpLayout, in bytes
  2401. {
  2402. Assert(useBranchIsland);
  2403. AssertMsg(fieldByteOffsetFromEnd < 100, "Ensure valid field offset");
  2404. //
  2405. // Branch targets are created in two passes:
  2406. // - In the instruction stream, write "labelID" into "OpLayoutBrC.Offset". Record this
  2407. // location in "m_jumpOffsets" to be patched later.
  2408. // - When the byte-code is closed, update all "OpLayoutBrC.Offset"'s with their actual
  2409. // destinations.
  2410. //
  2411. uint jumpByteOffset = m_byteCodeData.GetCurrentOffset() - fieldByteOffsetFromEnd;
  2412. JumpInfo jumpInfo = { labelId, jumpByteOffset };
  2413. m_longJumpOffsets->Add(jumpInfo);
  2414. }
  2415. void ByteCodeWriter::BrLong(OpCode op, ByteCodeLabel labelID)
  2416. {
  2417. Assert(useBranchIsland);
  2418. CheckOpen();
  2419. CheckOp(op, OpLayoutType::BrLong);
  2420. CheckLabel(labelID);
  2421. Assert(!OpCodeAttr::HasMultiSizeLayout(op));
  2422. size_t const offsetOfRelativeJumpOffsetFromEnd = sizeof(OpLayoutBrLong) - offsetof(OpLayoutBrLong, RelativeJumpOffset);
  2423. OpLayoutBrLong data;
  2424. data.RelativeJumpOffset = offsetOfRelativeJumpOffsetFromEnd;
  2425. m_byteCodeData.Encode(op, &data, sizeof(data), this);
  2426. AddLongJumpOffset(labelID, offsetOfRelativeJumpOffsetFromEnd);
  2427. }
  2428. void ByteCodeWriter::UpdateNextBranchIslandOffset(uint firstUnknownJumpInfo, uint firstUnknownJumpOffset)
  2429. {
  2430. this->firstUnknownJumpInfo = firstUnknownJumpInfo;
  2431. // We will need to emit the next branch from the first branch + branch limit.
  2432. // But leave room for the jump around and one extra byte code instruction.
  2433. // Also account for all the long branches we may have to emit as well.
  2434. this->nextBranchIslandOffset = firstUnknownJumpOffset + GetBranchLimit()
  2435. - JumpAroundSize - MaxLayoutSize - MaxOpCodeSize - LongBranchSize * (m_jumpOffsets->Count() - firstUnknownJumpInfo);
  2436. }
  2437. void ByteCodeWriter::EnsureLongBranch(Js::OpCode op)
  2438. {
  2439. Assert(useBranchIsland);
  2440. int currentOffset = this->m_byteCodeData.GetCurrentOffset();
  2441. // See if we need to emit branch island yet, and avoid recursion.
  2442. if (currentOffset < this->nextBranchIslandOffset || this->inEnsureLongBranch)
  2443. {
  2444. lastOpcode = op;
  2445. return;
  2446. }
  2447. // Leave actually may continue right after, it is only no fall through in the JIT.
  2448. bool needBranchAround = OpCodeAttr::HasFallThrough(lastOpcode) || lastOpcode == Js::OpCode::Leave;
  2449. lastOpcode = op;
  2450. // If we are about to emit a no fall through op and the last was has fall through
  2451. // then just emit the no fall through op, and then we can skip the branch around.
  2452. // Except at label or StatementBoundary, we always want to emit before them.
  2453. if ((needBranchAround && !OpCodeAttr::HasFallThrough(op))
  2454. && op != Js::OpCode::StatementBoundary && op != Js::OpCode::Label)
  2455. {
  2456. return;
  2457. }
  2458. ByteCodeLabel branchAroundLabel = (Js::ByteCodeLabel)-1;
  2459. bool foundUnknown = m_jumpOffsets->MapUntilFrom(firstUnknownJumpInfo,
  2460. [=, &branchAroundLabel, &currentOffset](int index, JumpInfo& jumpInfo)
  2461. {
  2462. //
  2463. // Read "labelID" stored at the offset within the byte-code.
  2464. //
  2465. uint jumpByteOffset = jumpInfo.patchOffset;
  2466. AssertMsg(jumpByteOffset <= this->m_byteCodeData.GetCurrentOffset() - sizeof(JumpOffset),
  2467. "Must have valid jump site within byte-code to back-patch");
  2468. ByteCodeLabel labelID = jumpInfo.labelId;
  2469. CheckLabel(labelID);
  2470. // See if the label has bee marked yet.
  2471. uint const labelByteOffset = m_labelOffsets->Item(labelID);
  2472. if (labelByteOffset != UINT_MAX)
  2473. {
  2474. // If a label is already defined, then it should be short
  2475. // (otherwise we should have emitted a branch island for it already).
  2476. Assert((int)labelByteOffset - (int)jumpByteOffset < GetBranchLimit()
  2477. && (int)labelByteOffset - (int)jumpByteOffset >= -GetBranchLimit());
  2478. return false;
  2479. }
  2480. this->UpdateNextBranchIslandOffset(index, jumpByteOffset);
  2481. // Flush all the jump that are half of the way to the limit as well so we don't have
  2482. // as many jump around of branch island.
  2483. int flushNextBranchIslandOffset = this->nextBranchIslandOffset - GetBranchLimit() / 2;
  2484. if (currentOffset < flushNextBranchIslandOffset)
  2485. {
  2486. // No need to for long branch yet. Terminate the loop.
  2487. return true;
  2488. }
  2489. if (labelID == branchAroundLabel)
  2490. {
  2491. // Let's not flush the branchAroundLabel.
  2492. // Should happen very rarely and mostly when the branch limit is very small.
  2493. // This should be the last short jump we have just emitted (below).
  2494. Assert(index == m_jumpOffsets->Count() - 1);
  2495. Assert(currentOffset < this->nextBranchIslandOffset);
  2496. return true;
  2497. }
  2498. // Emit long branch
  2499. // Prevent recursion when we emit byte code here
  2500. this->inEnsureLongBranch = true;
  2501. // Create the branch label and update the jumpInfo.
  2502. // Need to update the jumpInfo before we add the branch island as that might resize the m_jumpOffsets list.
  2503. ByteCodeLabel longBranchLabel = this->DefineLabel();
  2504. jumpInfo.labelId = longBranchLabel;
  2505. // Emit the branch around if it hasn't been emitted already
  2506. if (branchAroundLabel == (Js::ByteCodeLabel)-1 && needBranchAround)
  2507. {
  2508. branchAroundLabel = this->DefineLabel();
  2509. this->Br(Js::OpCode::Br, branchAroundLabel);
  2510. Assert(this->m_byteCodeData.GetCurrentOffset() - currentOffset == JumpAroundSize);
  2511. currentOffset += JumpAroundSize;
  2512. // Continue to count he jumpAroundSize, because we may have to emit
  2513. // yet another branch island right after if the jumpAroundSize is included.
  2514. }
  2515. // Emit the long branch
  2516. this->MarkLabel(longBranchLabel);
  2517. this->BrLong(Js::OpCode::BrLong, labelID);
  2518. this->inEnsureLongBranch = false;
  2519. Assert(this->m_byteCodeData.GetCurrentOffset() - currentOffset == LongBranchSize);
  2520. currentOffset += LongBranchSize;
  2521. return false;
  2522. });
  2523. if (!foundUnknown)
  2524. {
  2525. // Nothing is found, just set the next branch island from the current offset
  2526. this->UpdateNextBranchIslandOffset(this->m_jumpOffsets->Count(), currentOffset);
  2527. }
  2528. if (branchAroundLabel != (Js::ByteCodeLabel)-1)
  2529. {
  2530. // Make the branch around label if we needed one
  2531. this->MarkLabel(branchAroundLabel);
  2532. }
  2533. }
  2534. #endif
  2535. void ByteCodeWriter::StartStatement(ParseNode* node, uint32 tmpRegCount)
  2536. {
  2537. if (m_pMatchingNode)
  2538. {
  2539. if (m_pMatchingNode == node)
  2540. {
  2541. m_matchingNodeRefCount++;
  2542. }
  2543. return;
  2544. }
  2545. #ifdef BYTECODE_BRANCH_ISLAND
  2546. if (useBranchIsland)
  2547. {
  2548. // If we are going to emit a branch island, it should be before the statement start
  2549. this->EnsureLongBranch(Js::OpCode::StatementBoundary);
  2550. }
  2551. #endif
  2552. m_pMatchingNode = node;
  2553. m_beginCodeSpan = m_byteCodeData.GetCurrentOffset();
  2554. if (m_isInDebugMode && m_tmpRegCount != tmpRegCount)
  2555. {
  2556. Unsigned1(OpCode::EmitTmpRegCount, tmpRegCount);
  2557. m_tmpRegCount = tmpRegCount;
  2558. }
  2559. }
  2560. void ByteCodeWriter::EndStatement(ParseNode* node)
  2561. {
  2562. AssertMsg(m_pMatchingNode, "EndStatement unmatched to StartStatement");
  2563. if (m_pMatchingNode != node)
  2564. {
  2565. return;
  2566. }
  2567. else if (m_matchingNodeRefCount > 0)
  2568. {
  2569. m_matchingNodeRefCount--;
  2570. return;
  2571. }
  2572. if (m_byteCodeData.GetCurrentOffset() != m_beginCodeSpan)
  2573. {
  2574. if (m_isInDebugMode)
  2575. {
  2576. FunctionBody::StatementMap* pCurrentStatement = FunctionBody::StatementMap::New(this->m_functionWrite->GetScriptContext()->GetRecycler());
  2577. if (pCurrentStatement)
  2578. {
  2579. pCurrentStatement->sourceSpan.begin = node->ichMin;
  2580. pCurrentStatement->sourceSpan.end = node->ichLim;
  2581. pCurrentStatement->byteCodeSpan.begin = m_beginCodeSpan;
  2582. pCurrentStatement->byteCodeSpan.end = m_byteCodeData.GetCurrentOffset() - 1;
  2583. m_functionWrite->RecordStatementMap(pCurrentStatement);
  2584. }
  2585. }
  2586. else
  2587. {
  2588. StatementData currentStatement;
  2589. currentStatement.sourceBegin = node->ichMin;
  2590. currentStatement.bytecodeBegin = m_beginCodeSpan;
  2591. m_functionWrite->RecordStatementMap(spanIter, &currentStatement);
  2592. }
  2593. }
  2594. m_pMatchingNode = nullptr;
  2595. }
  2596. void ByteCodeWriter::StartSubexpression(ParseNode* node)
  2597. {
  2598. if (!m_isInDebugMode || !m_pMatchingNode) // Subexpression not in debug mode or not enclosed in regular statement
  2599. {
  2600. return;
  2601. }
  2602. #ifdef BYTECODE_BRANCH_ISLAND
  2603. // If we are going to emit a branch island, it should be before the statement start
  2604. this->EnsureLongBranch(Js::OpCode::StatementBoundary);
  2605. #endif
  2606. m_subexpressionNodesStack->Push(SubexpressionNode(node, m_byteCodeData.GetCurrentOffset()));
  2607. }
  2608. void ByteCodeWriter::EndSubexpression(ParseNode* node)
  2609. {
  2610. if (!m_isInDebugMode || m_subexpressionNodesStack->Empty() || m_subexpressionNodesStack->Peek().node != node)
  2611. {
  2612. return;
  2613. }
  2614. if (m_byteCodeData.GetCurrentOffset() != m_beginCodeSpan)
  2615. {
  2616. FunctionBody::StatementMap* pCurrentStatement = FunctionBody::StatementMap::New(this->m_functionWrite->GetScriptContext()->GetRecycler());
  2617. if (pCurrentStatement)
  2618. {
  2619. pCurrentStatement->sourceSpan.begin = node->ichMin;
  2620. pCurrentStatement->sourceSpan.end = node->ichLim;
  2621. SubexpressionNode subexpressionNode = m_subexpressionNodesStack->Pop();
  2622. pCurrentStatement->byteCodeSpan.begin = subexpressionNode.beginCodeSpan;
  2623. pCurrentStatement->byteCodeSpan.end = m_byteCodeData.GetCurrentOffset() - 1;
  2624. pCurrentStatement->isSubexpression = true;
  2625. m_functionWrite->RecordStatementMap(pCurrentStatement);
  2626. }
  2627. }
  2628. }
  2629. // Pushes a new debugger scope onto the stack. This information is used when determining
  2630. // what the current scope is for tracking of let/const initialization offsets (for detecting
  2631. // dead zones).
  2632. void ByteCodeWriter::PushDebuggerScope(Js::DebuggerScope* debuggerScope)
  2633. {
  2634. Assert(debuggerScope);
  2635. debuggerScope->SetParentScope(m_currentDebuggerScope);
  2636. m_currentDebuggerScope = debuggerScope;
  2637. OUTPUT_VERBOSE_TRACE(Js::DebuggerPhase, _u("PushDebuggerScope() - Pushed scope 0x%p of type %d.\n"), m_currentDebuggerScope, m_currentDebuggerScope->scopeType);
  2638. }
  2639. // Pops the current debugger scope from the stack.
  2640. void ByteCodeWriter::PopDebuggerScope()
  2641. {
  2642. Assert(m_currentDebuggerScope);
  2643. OUTPUT_VERBOSE_TRACE(Js::DebuggerPhase, _u("PopDebuggerScope() - Popped scope 0x%p of type %d.\n"), m_currentDebuggerScope, m_currentDebuggerScope->scopeType);
  2644. if (m_currentDebuggerScope != nullptr)
  2645. {
  2646. m_currentDebuggerScope = m_currentDebuggerScope->GetParentScope();
  2647. }
  2648. }
  2649. DebuggerScope* ByteCodeWriter::RecordStartScopeObject(DiagExtraScopesType scopeType, RegSlot scopeLocation, int* index)
  2650. {
  2651. if (scopeLocation != Js::Constants::NoRegister)
  2652. {
  2653. scopeLocation = ConsumeReg(scopeLocation);
  2654. }
  2655. DebuggerScope* debuggerScope = m_functionWrite->RecordStartScopeObject(scopeType, m_byteCodeData.GetCurrentOffset(), scopeLocation, index);
  2656. PushDebuggerScope(debuggerScope);
  2657. return debuggerScope;
  2658. }
  2659. void ByteCodeWriter::AddPropertyToDebuggerScope(
  2660. DebuggerScope* debuggerScope,
  2661. RegSlot location,
  2662. Js::PropertyId propertyId,
  2663. bool shouldConsumeRegister /*= true*/,
  2664. DebuggerScopePropertyFlags flags /*= DebuggerScopePropertyFlags_None*/,
  2665. bool isFunctionDeclaration /*= false*/)
  2666. {
  2667. Assert(debuggerScope);
  2668. // Activation object doesn't use register and slot array location represents the
  2669. // index in the array. Only need to consume for register slots.
  2670. if (shouldConsumeRegister)
  2671. {
  2672. Assert(location != Js::Constants::NoRegister);
  2673. location = ConsumeReg(location);
  2674. }
  2675. debuggerScope->AddProperty(location, propertyId, flags);
  2676. // Only need to update properties in debug mode (even for slot array, which is tracked in non-debug mode,
  2677. // since the offset is only used for debugging).
  2678. if (this->m_isInDebugMode && isFunctionDeclaration)
  2679. {
  2680. AssertMsg(this->m_currentDebuggerScope, "Function declarations can only be added in a block scope.");
  2681. AssertMsg(debuggerScope == this->m_currentDebuggerScope
  2682. || debuggerScope == this->m_currentDebuggerScope->siblingScope,
  2683. "Function declarations should always be added to the current scope.");
  2684. // If this is a function declaration, it doesn't have a dead zone region so
  2685. // we just update its byte code initialization offset to the start of the block.
  2686. this->UpdateDebuggerPropertyInitializationOffset(
  2687. debuggerScope,
  2688. location,
  2689. propertyId,
  2690. false /*shouldConsumeRegister*/, // Register would have already been consumed above, if needed.
  2691. debuggerScope->GetStart(),
  2692. isFunctionDeclaration);
  2693. }
  2694. }
  2695. void ByteCodeWriter::RecordEndScopeObject()
  2696. {
  2697. Assert(this->m_currentDebuggerScope);
  2698. m_functionWrite->RecordEndScopeObject(this->m_currentDebuggerScope, m_byteCodeData.GetCurrentOffset() - 1);
  2699. PopDebuggerScope();
  2700. }
  2701. void ByteCodeWriter::UpdateDebuggerPropertyInitializationOffset(
  2702. Js::DebuggerScope* currentDebuggerScope,
  2703. Js::RegSlot location,
  2704. Js::PropertyId propertyId,
  2705. bool shouldConsumeRegister/* = true*/,
  2706. int byteCodeOffset/* = Constants::InvalidOffset*/,
  2707. bool isFunctionDeclaration /*= false*/)
  2708. {
  2709. #if DBG
  2710. bool isInDebugMode = m_isInDebugMode
  2711. #if DBG_DUMP
  2712. || Js::Configuration::Global.flags.Debug
  2713. #endif // DBG_DUMP
  2714. ;
  2715. AssertMsg(isInDebugMode, "Property offsets should only ever be updated in debug mode (not used in non-debug).");
  2716. #endif // DBG
  2717. Assert(currentDebuggerScope);
  2718. if (shouldConsumeRegister)
  2719. {
  2720. Assert(location != Js::Constants::NoRegister);
  2721. location = ConsumeReg(location);
  2722. }
  2723. if (byteCodeOffset == Constants::InvalidOffset)
  2724. {
  2725. // Use the current offset if no offset is passed in.
  2726. byteCodeOffset = this->m_byteCodeData.GetCurrentOffset();
  2727. }
  2728. // Search through the scope chain starting with the current up through the parents to see if the
  2729. // property can be found and updated.
  2730. while (currentDebuggerScope != nullptr)
  2731. {
  2732. if (currentDebuggerScope->UpdatePropertyInitializationOffset(location, propertyId, byteCodeOffset, isFunctionDeclaration))
  2733. {
  2734. break;
  2735. }
  2736. currentDebuggerScope = currentDebuggerScope->GetParentScope();
  2737. }
  2738. }
  2739. void ByteCodeWriter::RecordFrameDisplayRegister(RegSlot slot)
  2740. {
  2741. slot = ConsumeReg(slot);
  2742. m_functionWrite->RecordFrameDisplayRegister(slot);
  2743. }
  2744. void ByteCodeWriter::RecordObjectRegister(RegSlot slot)
  2745. {
  2746. slot = ConsumeReg(slot);
  2747. m_functionWrite->RecordObjectRegister(slot);
  2748. }
  2749. void ByteCodeWriter::RecordStatementAdjustment(FunctionBody::StatementAdjustmentType type)
  2750. {
  2751. if (m_isInDebugMode)
  2752. {
  2753. m_functionWrite->RecordStatementAdjustment(m_byteCodeData.GetCurrentOffset(), type);
  2754. }
  2755. }
  2756. void ByteCodeWriter::RecordCrossFrameEntryExitRecord(bool isEnterBlock)
  2757. {
  2758. if (m_isInDebugMode)
  2759. {
  2760. m_functionWrite->RecordCrossFrameEntryExitRecord(m_byteCodeData.GetCurrentOffset(), isEnterBlock);
  2761. }
  2762. }
  2763. void ByteCodeWriter::RecordForInOrOfCollectionScope()
  2764. {
  2765. if (m_isInDebugMode && this->m_currentDebuggerScope != nullptr)
  2766. {
  2767. this->m_currentDebuggerScope->UpdatePropertiesInForInOrOfCollectionScope();
  2768. }
  2769. }
  2770. uint ByteCodeWriter::EnterLoop(Js::ByteCodeLabel loopEntrance)
  2771. {
  2772. #ifdef BYTECODE_BRANCH_ISLAND
  2773. if (useBranchIsland)
  2774. {
  2775. // If we are going to emit a branch island, it should be before the loop header
  2776. this->EnsureLongBranch(Js::OpCode::StatementBoundary);
  2777. }
  2778. #endif
  2779. uint loopId = m_functionWrite->IncrLoopCount();
  2780. Assert((uint)m_loopHeaders->Count() == loopId);
  2781. m_loopHeaders->Add(LoopHeaderData(m_byteCodeData.GetCurrentOffset(), 0, m_loopNest > 0, false));
  2782. m_loopNest++;
  2783. m_functionWrite->SetHasNestedLoop(m_loopNest > 1);
  2784. Js::OpCode loopBodyOpcode = Js::OpCode::LoopBodyStart;
  2785. #if ENABLE_PROFILE_INFO
  2786. if (Js::DynamicProfileInfo::EnableImplicitCallFlags(GetFunctionWrite()))
  2787. {
  2788. this->Unsigned1(Js::OpCode::ProfiledLoopStart, loopId);
  2789. loopBodyOpcode = Js::OpCode::ProfiledLoopBodyStart;
  2790. }
  2791. #endif
  2792. this->MarkLabel(loopEntrance);
  2793. if (this->DoJitLoopBodies() || this->DoInterruptProbes())
  2794. {
  2795. this->Unsigned1(loopBodyOpcode, loopId);
  2796. }
  2797. return loopId;
  2798. }
  2799. void ByteCodeWriter::ExitLoop(uint loopId)
  2800. {
  2801. #if ENABLE_PROFILE_INFO
  2802. if (Js::DynamicProfileInfo::EnableImplicitCallFlags(GetFunctionWrite()))
  2803. {
  2804. this->Unsigned1(Js::OpCode::ProfiledLoopEnd, loopId);
  2805. }
  2806. #endif
  2807. Assert(m_loopNest > 0);
  2808. m_loopNest--;
  2809. m_loopHeaders->Item(loopId).endOffset = m_byteCodeData.GetCurrentOffset();
  2810. }
  2811. void ByteCodeWriter::SetCurrentLoopHasYield()
  2812. {
  2813. if (m_loopNest > 0)
  2814. {
  2815. for (int i = 0; i < m_loopHeaders->Count(); ++i)
  2816. {
  2817. if (m_loopHeaders->Item(i).endOffset == 0) // check for loops we're currently inside
  2818. {
  2819. m_loopHeaders->Item(i).hasYield = true;
  2820. }
  2821. }
  2822. }
  2823. }
  2824. void ByteCodeWriter::IncreaseByteCodeCount()
  2825. {
  2826. m_byteCodeCount++;
  2827. if (m_loopNest > 0)
  2828. {
  2829. m_byteCodeInLoopCount++;
  2830. }
  2831. }
  2832. void ByteCodeWriter::Data::Create(uint initSize, ArenaAllocator* tmpAlloc)
  2833. {
  2834. //
  2835. // Allocate the initial byte-code block to write into.
  2836. //
  2837. tempAllocator = tmpAlloc;
  2838. AssertMsg(head == nullptr, "Missing dispose?");
  2839. currentOffset = 0;
  2840. head = Anew(tempAllocator, DataChunk, tempAllocator, initSize);
  2841. current = head;
  2842. }
  2843. void ByteCodeWriter::Data::Reset()
  2844. {
  2845. currentOffset = 0;
  2846. DataChunk* currentChunk = head;
  2847. while (currentChunk)
  2848. {
  2849. // reset to the starting point
  2850. currentChunk->Reset();
  2851. currentChunk = currentChunk->nextChunk;
  2852. }
  2853. current = head;
  2854. }
  2855. void ByteCodeWriter::Data::SetCurrent(uint offset, DataChunk* currChunk)
  2856. {
  2857. this->current = currChunk;
  2858. this->currentOffset = offset;
  2859. }
  2860. /// Copies its contents to a final contiguous section of memory.
  2861. void ByteCodeWriter::Data::Copy(Recycler* alloc, ByteBlock ** finalBlock)
  2862. {
  2863. AssertMsg(finalBlock != nullptr, "Must have valid storage");
  2864. uint cbFinalData = GetCurrentOffset();
  2865. if (cbFinalData == 0)
  2866. {
  2867. *finalBlock = nullptr;
  2868. }
  2869. else
  2870. {
  2871. ByteBlock* finalByteCodeBlock = ByteBlock::New(alloc, /*initialContent*/nullptr, cbFinalData);
  2872. DataChunk* currentChunk = head;
  2873. size_t bytesLeftToCopy = cbFinalData;
  2874. byte* currentDest = finalByteCodeBlock->GetBuffer();
  2875. while (true)
  2876. {
  2877. if (bytesLeftToCopy <= currentChunk->GetSize())
  2878. {
  2879. js_memcpy_s(currentDest, bytesLeftToCopy, currentChunk->GetBuffer(), bytesLeftToCopy);
  2880. break;
  2881. }
  2882. js_memcpy_s(currentDest, bytesLeftToCopy, currentChunk->GetBuffer(), currentChunk->GetSize());
  2883. bytesLeftToCopy -= currentChunk->GetSize();
  2884. currentDest += currentChunk->GetSize();
  2885. currentChunk = currentChunk->nextChunk;
  2886. AssertMsg(currentChunk, "We are copying more data than we have!");
  2887. }
  2888. *finalBlock = finalByteCodeBlock;
  2889. }
  2890. }
  2891. template <>
  2892. void ByteCodeWriter::Data::EncodeOpCode<SmallLayout>(
  2893. uint16 op,
  2894. ByteCodeWriter* writer)
  2895. {
  2896. DebugOnly(const uint offset = currentOffset);
  2897. if (op <= (uint16)Js::OpCode::MaxByteSizedOpcodes)
  2898. {
  2899. byte byteop = (byte)op;
  2900. Write(&byteop, sizeof(byte));
  2901. }
  2902. else
  2903. {
  2904. byte byteop = (byte)Js::OpCode::ExtendedOpcodePrefix;
  2905. Write(&byteop, sizeof(byte));
  2906. Write(&op, sizeof(uint16));
  2907. }
  2908. Assert(OpCodeUtil::EncodedSize((Js::OpCode)op, SmallLayout)
  2909. == (currentOffset - offset));
  2910. }
  2911. template <LayoutSize layoutSize>
  2912. void ByteCodeWriter::Data::EncodeOpCode(uint16 op, ByteCodeWriter* writer)
  2913. {
  2914. CompileAssert(layoutSize != SmallLayout);
  2915. DebugOnly(const uint offset = currentOffset);
  2916. if (op <= (uint16)Js::OpCode::MaxByteSizedOpcodes)
  2917. {
  2918. const byte exop = (byte)(layoutSize == LargeLayout ? Js::OpCode::LargeLayoutPrefix : Js::OpCode::MediumLayoutPrefix);
  2919. Write(&exop, sizeof(byte));
  2920. byte byteop = (byte)op;
  2921. Write(&byteop, sizeof(byte));
  2922. }
  2923. else
  2924. {
  2925. const byte exop = (byte)(layoutSize == LargeLayout ? Js::OpCode::ExtendedLargeLayoutPrefix : Js::OpCode::ExtendedMediumLayoutPrefix);
  2926. Write(&exop, sizeof(byte));
  2927. Write(&op, sizeof(uint16));
  2928. }
  2929. Assert(OpCodeUtil::EncodedSize((Js::OpCode)op, layoutSize) == (currentOffset - offset));
  2930. }
  2931. template <LayoutSize layoutSize>
  2932. uint ByteCodeWriter::Data::EncodeT(OpCode op, ByteCodeWriter* writer)
  2933. {
  2934. #ifdef BYTECODE_BRANCH_ISLAND
  2935. if (writer->useBranchIsland)
  2936. {
  2937. writer->EnsureLongBranch(op);
  2938. }
  2939. #endif
  2940. Assert(op < Js::OpCode::ByteCodeLast);
  2941. Assert(!OpCodeAttr::BackEndOnly(op));
  2942. Assert(layoutSize == SmallLayout || OpCodeAttr::HasMultiSizeLayout(op));
  2943. // Capture offset before encoding the opcode
  2944. uint offset = GetCurrentOffset();
  2945. EncodeOpCode<layoutSize>((uint16)op, writer);
  2946. if (op != Js::OpCode::Ld_A)
  2947. {
  2948. writer->m_byteCodeWithoutLDACount++;
  2949. }
  2950. writer->IncreaseByteCodeCount();
  2951. return offset;
  2952. }
  2953. template <LayoutSize layoutSize>
  2954. uint ByteCodeWriter::Data::EncodeT(OpCode op, const void* rawData, int byteSize, ByteCodeWriter* writer)
  2955. {
  2956. AssertMsg((rawData != nullptr) && (byteSize < 100), "Ensure valid data for opcode");
  2957. uint offset = EncodeT<layoutSize>(op, writer);
  2958. Write(rawData, byteSize);
  2959. return offset;
  2960. }
  2961. void ByteCodeWriter::Data::Encode(const void* rawData, int byteSize)
  2962. {
  2963. AssertMsg(rawData != nullptr, "Ensure valid data for opcode");
  2964. Write(rawData, byteSize);
  2965. }
  2966. void ByteCodeWriter::Data::Write(__in_bcount(byteSize) const void* data, __in uint byteSize)
  2967. {
  2968. // Simple case where the current chunk has enough space.
  2969. uint bytesFree = current->RemainingBytes();
  2970. if (bytesFree >= byteSize)
  2971. {
  2972. current->WriteUnsafe(data, byteSize);
  2973. }
  2974. else
  2975. {
  2976. SlowWrite(data, byteSize);
  2977. }
  2978. currentOffset += byteSize;
  2979. }
  2980. /// Requires buffer extension.
  2981. _NOINLINE void ByteCodeWriter::Data::SlowWrite(__in_bcount(byteSize) const void* data, __in uint byteSize)
  2982. {
  2983. AssertMsg(byteSize > current->RemainingBytes(), "We should not need an extension if there is enough space in the current chunk");
  2984. uint bytesLeftToWrite = byteSize;
  2985. byte* dataToBeWritten = (byte*)data;
  2986. // the next chunk may already be created in the case that we are patching bytecode.
  2987. // If so, we want to move the pointer to the beginning of the buffer
  2988. if (current->nextChunk)
  2989. {
  2990. current->nextChunk->SetCurrentOffset(0);
  2991. }
  2992. while (true)
  2993. {
  2994. uint bytesFree = current->RemainingBytes();
  2995. if (bytesFree >= bytesLeftToWrite)
  2996. {
  2997. current->WriteUnsafe(dataToBeWritten, bytesLeftToWrite);
  2998. break;
  2999. }
  3000. current->WriteUnsafe(dataToBeWritten, bytesFree);
  3001. bytesLeftToWrite -= bytesFree;
  3002. dataToBeWritten += bytesFree;
  3003. // Create a new chunk when needed
  3004. if (!current->nextChunk)
  3005. {
  3006. AddChunk(bytesLeftToWrite);
  3007. }
  3008. current = current->nextChunk;
  3009. }
  3010. }
  3011. void ByteCodeWriter::Data::AddChunk(uint byteSize)
  3012. {
  3013. AssertMsg(current->nextChunk == nullptr, "Do we really need to grow?");
  3014. // For some data elements i.e. bytecode we have a good initial size and
  3015. // therefore, we use a conservative growth strategy - and grow by a fixed size.
  3016. uint newSize = fixedGrowthPolicy ? max(byteSize, static_cast<uint>(3 * AutoSystemInfo::PageSize)) : max(byteSize, static_cast<uint>(current->GetSize() * 2));
  3017. DataChunk* newChunk = Anew(tempAllocator, DataChunk, tempAllocator, newSize);
  3018. current->nextChunk = newChunk;
  3019. }
  3020. #if DBG_DUMP
  3021. uint ByteCodeWriter::ByteCodeDataSize()
  3022. {
  3023. return m_byteCodeData.GetCurrentOffset();
  3024. }
  3025. uint ByteCodeWriter::AuxiliaryDataSize()
  3026. {
  3027. return m_auxiliaryData.GetCurrentOffset();
  3028. }
  3029. uint ByteCodeWriter::AuxiliaryContextDataSize()
  3030. {
  3031. return m_auxContextData.GetCurrentOffset();
  3032. }
  3033. #endif
  3034. } // namespace Js