FlowGraph.cpp 105 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. FlowGraph *
  7. FlowGraph::New(Func * func, JitArenaAllocator * alloc)
  8. {
  9. FlowGraph * graph;
  10. graph = JitAnew(alloc, FlowGraph, func, alloc);
  11. return graph;
  12. }
  13. ///----------------------------------------------------------------------------
  14. ///
  15. /// FlowGraph::Build
  16. ///
  17. /// Construct flow graph and loop structures for the current state of the function.
  18. ///
  19. ///----------------------------------------------------------------------------
  20. void
  21. FlowGraph::Build(void)
  22. {
  23. Func * func = this->func;
  24. BEGIN_CODEGEN_PHASE(func, Js::FGPeepsPhase);
  25. this->RunPeeps();
  26. END_CODEGEN_PHASE(func, Js::FGPeepsPhase);
  27. // We don't optimize fully with SimpleJit. But, when JIT loop body is enabled, we do support
  28. // bailing out from a simple jitted function to do a full jit of a loop body in the function
  29. // (BailOnSimpleJitToFullJitLoopBody). For that purpose, we need the flow from try to catch.
  30. if (this->func->HasTry() &&
  31. (this->func->DoOptimizeTryCatch() ||
  32. this->func->IsSimpleJit() && this->func->GetJnFunction()->DoJITLoopBody()
  33. )
  34. )
  35. {
  36. this->catchLabelStack = JitAnew(this->alloc, SList<IR::LabelInstr*>, this->alloc);
  37. }
  38. IR::Instr * currLastInstr = nullptr;
  39. BasicBlock * block = nullptr;
  40. BasicBlock * nextBlock = nullptr;
  41. bool hasCall = false;
  42. FOREACH_INSTR_IN_FUNC_BACKWARD_EDITING(instr, instrPrev, func)
  43. {
  44. if (currLastInstr == nullptr || instr->EndsBasicBlock())
  45. {
  46. // Start working on a new block.
  47. // If we're currently processing a block, then wrap it up before beginning a new one.
  48. if (currLastInstr != nullptr)
  49. {
  50. nextBlock = block;
  51. block = this->AddBlock(instr->m_next, currLastInstr, nextBlock);
  52. block->hasCall = hasCall;
  53. hasCall = false;
  54. }
  55. currLastInstr = instr;
  56. }
  57. if (instr->StartsBasicBlock())
  58. {
  59. // Insert a BrOnException after the loop top if we are in a try-catch. This is required to
  60. // model flow from the loop to the catch block for loops that don't have a break condition.
  61. if (instr->IsLabelInstr() && instr->AsLabelInstr()->m_isLoopTop &&
  62. this->catchLabelStack && !this->catchLabelStack->Empty() &&
  63. instr->m_next->m_opcode != Js::OpCode::BrOnException)
  64. {
  65. IR::BranchInstr * brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, this->catchLabelStack->Top(), instr->m_func);
  66. instr->InsertAfter(brOnException);
  67. instrPrev = brOnException; // process BrOnException before adding a new block for loop top label.
  68. continue;
  69. }
  70. // Wrap up the current block and get ready to process a new one.
  71. nextBlock = block;
  72. block = this->AddBlock(instr, currLastInstr, nextBlock);
  73. block->hasCall = hasCall;
  74. hasCall = false;
  75. currLastInstr = nullptr;
  76. }
  77. switch (instr->m_opcode)
  78. {
  79. case Js::OpCode::Catch:
  80. Assert(instr->m_prev->IsLabelInstr());
  81. if (this->catchLabelStack)
  82. {
  83. this->catchLabelStack->Push(instr->m_prev->AsLabelInstr());
  84. }
  85. break;
  86. case Js::OpCode::TryCatch:
  87. if (this->catchLabelStack)
  88. {
  89. this->catchLabelStack->Pop();
  90. }
  91. break;
  92. case Js::OpCode::CloneBlockScope:
  93. case Js::OpCode::CloneInnerScopeSlots:
  94. // It would be nice to do this in IRBuilder, but doing so gives us
  95. // trouble when doing the DoSlotArrayCheck since it assume single def
  96. // of the sym to do its check properly. So instead we assign the dst
  97. // here in FlowGraph.
  98. instr->SetDst(instr->GetSrc1());
  99. break;
  100. }
  101. if (OpCodeAttr::UseAllFields(instr->m_opcode))
  102. {
  103. // UseAllFields opcode are call instruction or opcode that would call.
  104. hasCall = true;
  105. if (OpCodeAttr::CallInstr(instr->m_opcode))
  106. {
  107. if (!instr->isCallInstrProtectedByNoProfileBailout)
  108. {
  109. instr->m_func->SetHasCallsOnSelfAndParents();
  110. }
  111. // For ARM & X64 because of their register calling convention
  112. // the ArgOuts need to be moved next to the call.
  113. #if defined(_M_ARM) || defined(_M_X64)
  114. IR::Instr* argInsertInstr = instr;
  115. instr->IterateArgInstrs([&](IR::Instr* argInstr)
  116. {
  117. if (argInstr->m_opcode != Js::OpCode::LdSpreadIndices &&
  118. argInstr->m_opcode != Js::OpCode::ArgOut_A_Dynamic &&
  119. argInstr->m_opcode != Js::OpCode::ArgOut_A_FromStackArgs &&
  120. argInstr->m_opcode != Js::OpCode::ArgOut_A_SpreadArg)
  121. {
  122. // don't have bailout in asm.js so we don't need BytecodeArgOutCapture
  123. if (!argInstr->m_func->GetJnFunction()->GetIsAsmjsMode())
  124. {
  125. // Need to always generate byte code arg out capture,
  126. // because bailout can't restore from the arg out as it is
  127. // replaced by new sym for register calling convention in lower
  128. argInstr->GenerateBytecodeArgOutCapture();
  129. }
  130. // Check if the instruction is already next
  131. if (argInstr != argInsertInstr->m_prev)
  132. {
  133. // It is not, move it.
  134. argInstr->Move(argInsertInstr);
  135. }
  136. argInsertInstr = argInstr;
  137. }
  138. return false;
  139. });
  140. #endif
  141. }
  142. }
  143. }
  144. NEXT_INSTR_IN_FUNC_BACKWARD_EDITING;
  145. this->func->isFlowGraphValid = true;
  146. Assert(!this->catchLabelStack || this->catchLabelStack->Empty());
  147. // We've been walking backward so that edge lists would be in the right order. Now walk the blocks
  148. // forward to number the blocks in lexical order.
  149. unsigned int blockNum = 0;
  150. FOREACH_BLOCK(block, this)
  151. {
  152. block->SetBlockNum(blockNum++);
  153. }NEXT_BLOCK;
  154. AssertMsg(blockNum == this->blockCount, "Block count is out of whack");
  155. this->RemoveUnreachableBlocks();
  156. this->FindLoops();
  157. bool breakBlocksRelocated = this->CanonicalizeLoops();
  158. #if DBG
  159. this->VerifyLoopGraph();
  160. #endif
  161. // Renumber the blocks. Break block remove code has likely inserted new basic blocks.
  162. blockNum = 0;
  163. // Regions need to be assigned before Globopt because:
  164. // 1. FullJit: The Backward Pass will set the write-through symbols on the regions and the forward pass will
  165. // use this information to insert ToVars for those symbols. Also, for a symbol determined as write-through
  166. // in the try region to be restored correctly by the bailout, it should not be removed from the
  167. // byteCodeUpwardExposedUsed upon a def in the try region (the def might be preempted by an exception).
  168. //
  169. // 2. SimpleJit: Same case of correct restoration as above applies in SimpleJit too. However, the only bailout
  170. // we have in Simple Jitted code right now is BailOnSimpleJitToFullJitLoopBody, installed in IRBuilder. So,
  171. // for now, we can just check if the func has a bailout to assign regions pre globopt while running SimpleJit.
  172. bool assignRegionsBeforeGlobopt = this->func->HasTry() &&
  173. (this->func->DoOptimizeTryCatch() || (this->func->IsSimpleJit() && this->func->hasBailout));
  174. Region ** blockToRegion = nullptr;
  175. if (assignRegionsBeforeGlobopt)
  176. {
  177. blockToRegion = JitAnewArrayZ(this->alloc, Region*, this->blockCount);
  178. }
  179. FOREACH_BLOCK_ALL(block, this)
  180. {
  181. block->SetBlockNum(blockNum++);
  182. if (assignRegionsBeforeGlobopt)
  183. {
  184. if (block->isDeleted && !block->isDead)
  185. {
  186. continue;
  187. }
  188. this->UpdateRegionForBlock(block, blockToRegion);
  189. }
  190. } NEXT_BLOCK_ALL;
  191. AssertMsg (blockNum == this->blockCount, "Block count is out of whack");
  192. if (breakBlocksRelocated)
  193. {
  194. // Sort loop lists only if there is break block removal.
  195. SortLoopLists();
  196. }
  197. #if DBG_DUMP
  198. this->Dump(false, nullptr);
  199. #endif
  200. }
  201. void
  202. FlowGraph::SortLoopLists()
  203. {
  204. // Sort the blocks in loopList
  205. for (Loop *loop = this->loopList; loop; loop = loop->next)
  206. {
  207. unsigned int lastBlockNumber = loop->GetHeadBlock()->GetBlockNum();
  208. // Insertion sort as the blockList is almost sorted in the loop.
  209. FOREACH_BLOCK_IN_LOOP_EDITING(block, loop, iter)
  210. {
  211. if (lastBlockNumber <= block->GetBlockNum())
  212. {
  213. lastBlockNumber = block->GetBlockNum();
  214. }
  215. else
  216. {
  217. iter.UnlinkCurrent();
  218. FOREACH_BLOCK_IN_LOOP_EDITING(insertBlock,loop,newIter)
  219. {
  220. if (insertBlock->GetBlockNum() > block->GetBlockNum())
  221. {
  222. break;
  223. }
  224. }NEXT_BLOCK_IN_LOOP_EDITING;
  225. newIter.InsertBefore(block);
  226. }
  227. }NEXT_BLOCK_IN_LOOP_EDITING;
  228. }
  229. }
  230. void
  231. FlowGraph::RunPeeps()
  232. {
  233. if (this->func->HasTry())
  234. {
  235. return;
  236. }
  237. if (PHASE_OFF(Js::FGPeepsPhase, this->func))
  238. {
  239. return;
  240. }
  241. IR::Instr * instrCm = nullptr;
  242. bool tryUnsignedCmpPeep = false;
  243. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->func)
  244. {
  245. switch(instr->m_opcode)
  246. {
  247. case Js::OpCode::Br:
  248. case Js::OpCode::BrEq_I4:
  249. case Js::OpCode::BrGe_I4:
  250. case Js::OpCode::BrGt_I4:
  251. case Js::OpCode::BrLt_I4:
  252. case Js::OpCode::BrLe_I4:
  253. case Js::OpCode::BrUnGe_I4:
  254. case Js::OpCode::BrUnGt_I4:
  255. case Js::OpCode::BrUnLt_I4:
  256. case Js::OpCode::BrUnLe_I4:
  257. case Js::OpCode::BrNeq_I4:
  258. case Js::OpCode::BrEq_A:
  259. case Js::OpCode::BrGe_A:
  260. case Js::OpCode::BrGt_A:
  261. case Js::OpCode::BrLt_A:
  262. case Js::OpCode::BrLe_A:
  263. case Js::OpCode::BrUnGe_A:
  264. case Js::OpCode::BrUnGt_A:
  265. case Js::OpCode::BrUnLt_A:
  266. case Js::OpCode::BrUnLe_A:
  267. case Js::OpCode::BrNotEq_A:
  268. case Js::OpCode::BrNotNeq_A:
  269. case Js::OpCode::BrSrNotEq_A:
  270. case Js::OpCode::BrSrNotNeq_A:
  271. case Js::OpCode::BrNotGe_A:
  272. case Js::OpCode::BrNotGt_A:
  273. case Js::OpCode::BrNotLt_A:
  274. case Js::OpCode::BrNotLe_A:
  275. case Js::OpCode::BrNeq_A:
  276. case Js::OpCode::BrNotNull_A:
  277. case Js::OpCode::BrNotAddr_A:
  278. case Js::OpCode::BrAddr_A:
  279. case Js::OpCode::BrSrEq_A:
  280. case Js::OpCode::BrSrNeq_A:
  281. case Js::OpCode::BrOnHasProperty:
  282. case Js::OpCode::BrOnNoProperty:
  283. case Js::OpCode::BrHasSideEffects:
  284. case Js::OpCode::BrNotHasSideEffects:
  285. case Js::OpCode::BrFncEqApply:
  286. case Js::OpCode::BrFncNeqApply:
  287. case Js::OpCode::BrOnEmpty:
  288. case Js::OpCode::BrOnNotEmpty:
  289. case Js::OpCode::BrFncCachedScopeEq:
  290. case Js::OpCode::BrFncCachedScopeNeq:
  291. case Js::OpCode::BrOnObject_A:
  292. case Js::OpCode::BrOnClassConstructor:
  293. if (tryUnsignedCmpPeep)
  294. {
  295. this->UnsignedCmpPeep(instr);
  296. }
  297. instrNext = Peeps::PeepBranch(instr->AsBranchInstr());
  298. break;
  299. case Js::OpCode::MultiBr:
  300. // TODO: Run peeps on these as well...
  301. break;
  302. case Js::OpCode::BrTrue_I4:
  303. case Js::OpCode::BrFalse_I4:
  304. case Js::OpCode::BrTrue_A:
  305. case Js::OpCode::BrFalse_A:
  306. if (instrCm)
  307. {
  308. if (instrCm->GetDst()->IsInt32())
  309. {
  310. Assert(instr->m_opcode == Js::OpCode::BrTrue_I4 || instr->m_opcode == Js::OpCode::BrFalse_I4);
  311. instrNext = this->PeepTypedCm(instrCm);
  312. }
  313. else
  314. {
  315. instrNext = this->PeepCm(instrCm);
  316. }
  317. instrCm = nullptr;
  318. if (instrNext == nullptr)
  319. {
  320. // Set instrNext back to the current instr.
  321. instrNext = instr;
  322. }
  323. }
  324. else
  325. {
  326. instrNext = Peeps::PeepBranch(instr->AsBranchInstr());
  327. }
  328. break;
  329. case Js::OpCode::CmEq_I4:
  330. case Js::OpCode::CmGe_I4:
  331. case Js::OpCode::CmGt_I4:
  332. case Js::OpCode::CmLt_I4:
  333. case Js::OpCode::CmLe_I4:
  334. case Js::OpCode::CmNeq_I4:
  335. case Js::OpCode::CmEq_A:
  336. case Js::OpCode::CmGe_A:
  337. case Js::OpCode::CmGt_A:
  338. case Js::OpCode::CmLt_A:
  339. case Js::OpCode::CmLe_A:
  340. case Js::OpCode::CmNeq_A:
  341. case Js::OpCode::CmSrEq_A:
  342. case Js::OpCode::CmSrNeq_A:
  343. if (tryUnsignedCmpPeep)
  344. {
  345. this->UnsignedCmpPeep(instr);
  346. }
  347. case Js::OpCode::CmUnGe_I4:
  348. case Js::OpCode::CmUnGt_I4:
  349. case Js::OpCode::CmUnLt_I4:
  350. case Js::OpCode::CmUnLe_I4:
  351. case Js::OpCode::CmUnGe_A:
  352. case Js::OpCode::CmUnGt_A:
  353. case Js::OpCode::CmUnLt_A:
  354. case Js::OpCode::CmUnLe_A:
  355. // There may be useless branches between the Cm instr and the branch that uses the result.
  356. // So save the last Cm instr seen, and trigger the peep on the next BrTrue/BrFalse.
  357. instrCm = instr;
  358. break;
  359. case Js::OpCode::Label:
  360. if (instr->AsLabelInstr()->IsUnreferenced())
  361. {
  362. instrNext = Peeps::PeepUnreachableLabel(instr->AsLabelInstr(), false);
  363. }
  364. break;
  365. case Js::OpCode::StatementBoundary:
  366. instr->ClearByteCodeOffset();
  367. instr->SetByteCodeOffset(instr->GetNextRealInstrOrLabel());
  368. break;
  369. case Js::OpCode::ShrU_I4:
  370. case Js::OpCode::ShrU_A:
  371. if (tryUnsignedCmpPeep)
  372. {
  373. break;
  374. }
  375. if (instr->GetDst()->AsRegOpnd()->m_sym->IsSingleDef()
  376. && instr->GetSrc2()->IsRegOpnd() && instr->GetSrc2()->AsRegOpnd()->m_sym->IsTaggableIntConst()
  377. && instr->GetSrc2()->AsRegOpnd()->m_sym->GetIntConstValue() == 0)
  378. {
  379. tryUnsignedCmpPeep = true;
  380. }
  381. break;
  382. default:
  383. Assert(!instr->IsBranchInstr());
  384. }
  385. } NEXT_INSTR_IN_FUNC_EDITING;
  386. }
  387. void
  388. Loop::InsertLandingPad(FlowGraph *fg)
  389. {
  390. BasicBlock *headBlock = this->GetHeadBlock();
  391. // Always create a landing pad. This allows globopt to easily hoist instructions
  392. // and re-optimize the block if needed.
  393. BasicBlock *landingPad = BasicBlock::New(fg);
  394. this->landingPad = landingPad;
  395. IR::Instr * headInstr = headBlock->GetFirstInstr();
  396. IR::LabelInstr *landingPadLabel = IR::LabelInstr::New(Js::OpCode::Label, headInstr->m_func);
  397. landingPadLabel->SetByteCodeOffset(headInstr);
  398. headInstr->InsertBefore(landingPadLabel);
  399. landingPadLabel->SetBasicBlock(landingPad);
  400. landingPad->SetBlockNum(fg->blockCount++);
  401. landingPad->SetFirstInstr(landingPadLabel);
  402. landingPad->SetLastInstr(landingPadLabel);
  403. landingPad->prev = headBlock->prev;
  404. landingPad->prev->next = landingPad;
  405. landingPad->next = headBlock;
  406. headBlock->prev = landingPad;
  407. Loop *parentLoop = this->parent;
  408. landingPad->loop = parentLoop;
  409. // We need to add this block to the block list of the parent loops
  410. while (parentLoop)
  411. {
  412. // Find the head block in the block list of the parent loop
  413. FOREACH_BLOCK_IN_LOOP_EDITING(block, parentLoop, iter)
  414. {
  415. if (block == headBlock)
  416. {
  417. // Add the landing pad to the block list
  418. iter.InsertBefore(landingPad);
  419. break;
  420. }
  421. } NEXT_BLOCK_IN_LOOP_EDITING;
  422. parentLoop = parentLoop->parent;
  423. }
  424. // Fix predecessor flow edges
  425. FOREACH_PREDECESSOR_EDGE_EDITING(edge, headBlock, iter)
  426. {
  427. // Make sure it isn't a back-edge
  428. if (edge->GetPred()->loop != this && !this->IsDescendentOrSelf(edge->GetPred()->loop))
  429. {
  430. if (edge->GetPred()->GetLastInstr()->IsBranchInstr() && headBlock->GetFirstInstr()->IsLabelInstr())
  431. {
  432. IR::BranchInstr *branch = edge->GetPred()->GetLastInstr()->AsBranchInstr();
  433. branch->ReplaceTarget(headBlock->GetFirstInstr()->AsLabelInstr(), landingPadLabel);
  434. }
  435. headBlock->UnlinkPred(edge->GetPred(), false);
  436. landingPad->AddPred(edge, fg);
  437. edge->SetSucc(landingPad);
  438. }
  439. } NEXT_PREDECESSOR_EDGE_EDITING;
  440. fg->AddEdge(landingPad, headBlock);
  441. }
  442. bool
  443. Loop::RemoveBreakBlocks(FlowGraph *fg)
  444. {
  445. bool breakBlockRelocated = false;
  446. if (PHASE_OFF(Js::RemoveBreakBlockPhase, fg->GetFunc()))
  447. {
  448. return false;
  449. }
  450. BasicBlock *loopTailBlock = nullptr;
  451. FOREACH_BLOCK_IN_LOOP(block, this)
  452. {
  453. loopTailBlock = block;
  454. }NEXT_BLOCK_IN_LOOP;
  455. AnalysisAssert(loopTailBlock);
  456. FOREACH_BLOCK_BACKWARD_IN_RANGE_EDITING(breakBlockEnd, loopTailBlock, this->GetHeadBlock(), blockPrev)
  457. {
  458. while (!this->IsDescendentOrSelf(breakBlockEnd->loop))
  459. {
  460. // Found at least one break block;
  461. breakBlockRelocated = true;
  462. #if DBG
  463. breakBlockEnd->isBreakBlock = true;
  464. #endif
  465. // Find the first block in this break block sequence.
  466. BasicBlock *breakBlockStart = breakBlockEnd;
  467. BasicBlock *breakBlockStartPrev = breakBlockEnd->GetPrev();
  468. // Walk back the blocks until we find a block which belongs to that block.
  469. // Note: We don't really care if there are break blocks corresponding to different loops. We move the blocks conservatively to the end of the loop.
  470. // Algorithm works on one loop at a time.
  471. while((breakBlockStartPrev->loop == breakBlockEnd->loop) || !this->IsDescendentOrSelf(breakBlockStartPrev->loop))
  472. {
  473. breakBlockStart = breakBlockStartPrev;
  474. breakBlockStartPrev = breakBlockStartPrev->GetPrev();
  475. }
  476. #if DBG
  477. breakBlockStart->isBreakBlock = true; // Mark the first block as well.
  478. #endif
  479. BasicBlock *exitLoopTail = loopTailBlock;
  480. // Move these break blocks to the tail of the loop.
  481. fg->MoveBlocksBefore(breakBlockStart, breakBlockEnd, exitLoopTail->next);
  482. #if DBG_DUMP
  483. fg->Dump(true /*needs verbose flag*/, L"\n After Each iteration of canonicalization \n");
  484. #endif
  485. // Again be conservative, there are edits to the loop graph. Start fresh for this loop.
  486. breakBlockEnd = loopTailBlock;
  487. blockPrev = breakBlockEnd->prev;
  488. }
  489. } NEXT_BLOCK_BACKWARD_IN_RANGE_EDITING;
  490. return breakBlockRelocated;
  491. }
  492. void
  493. FlowGraph::MoveBlocksBefore(BasicBlock *blockStart, BasicBlock *blockEnd, BasicBlock *insertBlock)
  494. {
  495. BasicBlock *srcPredBlock = blockStart->prev;
  496. BasicBlock *srcNextBlock = blockEnd->next;
  497. BasicBlock *dstPredBlock = insertBlock->prev;
  498. IR::Instr* dstPredBlockLastInstr = dstPredBlock->GetLastInstr();
  499. IR::Instr* blockEndLastInstr = blockEnd->GetLastInstr();
  500. // Fix block linkage
  501. srcPredBlock->next = srcNextBlock;
  502. srcNextBlock->prev = srcPredBlock;
  503. dstPredBlock->next = blockStart;
  504. insertBlock->prev = blockEnd;
  505. blockStart->prev = dstPredBlock;
  506. blockEnd->next = insertBlock;
  507. // Fix instruction linkage
  508. IR::Instr::MoveRangeAfter(blockStart->GetFirstInstr(), blockEndLastInstr, dstPredBlockLastInstr);
  509. // Fix instruction flow
  510. IR::Instr *srcLastInstr = srcPredBlock->GetLastInstr();
  511. if (srcLastInstr->IsBranchInstr() && srcLastInstr->AsBranchInstr()->HasFallThrough())
  512. {
  513. // There was a fallthrough in the break blocks original position.
  514. IR::BranchInstr *srcBranch = srcLastInstr->AsBranchInstr();
  515. IR::Instr *srcBranchNextInstr = srcBranch->GetNextRealInstrOrLabel();
  516. // Save the target and invert the branch.
  517. IR::LabelInstr *srcBranchTarget = srcBranch->GetTarget();
  518. srcPredBlock->InvertBranch(srcBranch);
  519. IR::LabelInstr *srcLabel = blockStart->GetFirstInstr()->AsLabelInstr();
  520. // Point the inverted branch to break block.
  521. srcBranch->SetTarget(srcLabel);
  522. if (srcBranchNextInstr != srcBranchTarget)
  523. {
  524. FlowEdge *srcEdge = this->FindEdge(srcPredBlock, srcBranchTarget->GetBasicBlock());
  525. Assert(srcEdge);
  526. BasicBlock *compensationBlock = this->InsertCompensationCodeForBlockMove(srcEdge, true /*insert compensation block to loop list*/, false /*At source*/);
  527. Assert(compensationBlock);
  528. }
  529. }
  530. IR::Instr *dstLastInstr = dstPredBlockLastInstr;
  531. if (dstLastInstr->IsBranchInstr() && dstLastInstr->AsBranchInstr()->HasFallThrough())
  532. {
  533. //There is a fallthrough in the block after which break block is inserted.
  534. FlowEdge *dstEdge = this->FindEdge(dstPredBlock, blockEnd->GetNext());
  535. Assert(dstEdge);
  536. BasicBlock *compensationBlock = this->InsertCompensationCodeForBlockMove(dstEdge, true /*insert compensation block to loop list*/, true /*At sink*/);
  537. Assert(compensationBlock);
  538. }
  539. }
  540. FlowEdge *
  541. FlowGraph::FindEdge(BasicBlock *predBlock, BasicBlock *succBlock)
  542. {
  543. FlowEdge *srcEdge = nullptr;
  544. FOREACH_SUCCESSOR_EDGE(edge, predBlock)
  545. {
  546. if (edge->GetSucc() == succBlock)
  547. {
  548. srcEdge = edge;
  549. break;
  550. }
  551. } NEXT_SUCCESSOR_EDGE;
  552. return srcEdge;
  553. }
  554. void
  555. BasicBlock::InvertBranch(IR::BranchInstr *branch)
  556. {
  557. Assert(this->GetLastInstr() == branch);
  558. Assert(this->GetSuccList()->HasTwo());
  559. branch->Invert();
  560. this->GetSuccList()->Reverse();
  561. }
  562. bool
  563. FlowGraph::CanonicalizeLoops()
  564. {
  565. if (this->func->HasProfileInfo())
  566. {
  567. this->implicitCallFlags = this->func->GetProfileInfo()->GetImplicitCallFlags();
  568. for (Loop *loop = this->loopList; loop; loop = loop->next)
  569. {
  570. this->implicitCallFlags = (Js::ImplicitCallFlags)(this->implicitCallFlags | loop->GetImplicitCallFlags());
  571. }
  572. }
  573. #if DBG_DUMP
  574. this->Dump(true, L"\n Before canonicalizeLoops \n");
  575. #endif
  576. bool breakBlockRelocated = false;
  577. for (Loop *loop = this->loopList; loop; loop = loop->next)
  578. {
  579. loop->InsertLandingPad(this);
  580. if (!this->func->HasTry() || this->func->DoOptimizeTryCatch())
  581. {
  582. bool relocated = loop->RemoveBreakBlocks(this);
  583. if (!breakBlockRelocated && relocated)
  584. {
  585. breakBlockRelocated = true;
  586. }
  587. }
  588. }
  589. #if DBG_DUMP
  590. this->Dump(true, L"\n After canonicalizeLoops \n");
  591. #endif
  592. return breakBlockRelocated;
  593. }
  594. // Find the loops in this function, build the loop structure, and build a linked
  595. // list of the basic blocks in this loop (including blocks of inner loops). The
  596. // list preserves the reverse post-order of the blocks in the flowgraph block list.
  597. void
  598. FlowGraph::FindLoops()
  599. {
  600. if (!this->hasLoop)
  601. {
  602. return;
  603. }
  604. Func * func = this->func;
  605. FOREACH_BLOCK_BACKWARD_IN_FUNC(block, func)
  606. {
  607. if (block->loop != nullptr)
  608. {
  609. // Block already visited
  610. continue;
  611. }
  612. FOREACH_SUCCESSOR_BLOCK(succ, block)
  613. {
  614. if (succ->isLoopHeader && succ->loop == nullptr)
  615. {
  616. // Found a loop back-edge
  617. BuildLoop(succ, block);
  618. }
  619. } NEXT_SUCCESSOR_BLOCK;
  620. if (block->isLoopHeader && block->loop == nullptr)
  621. {
  622. // We would have built a loop for it if it was a loop...
  623. block->isLoopHeader = false;
  624. block->GetFirstInstr()->AsLabelInstr()->m_isLoopTop = false;
  625. }
  626. } NEXT_BLOCK_BACKWARD_IN_FUNC;
  627. }
  628. void
  629. FlowGraph::BuildLoop(BasicBlock *headBlock, BasicBlock *tailBlock, Loop *parentLoop)
  630. {
  631. // This function is recursive, so when jitting in the foreground, probe the stack
  632. if(!func->IsBackgroundJIT())
  633. {
  634. PROBE_STACK(func->GetScriptContext(), Js::Constants::MinStackDefault);
  635. }
  636. if (tailBlock->number < headBlock->number)
  637. {
  638. // Not a loop. We didn't see any back-edge.
  639. headBlock->isLoopHeader = false;
  640. headBlock->GetFirstInstr()->AsLabelInstr()->m_isLoopTop = false;
  641. return;
  642. }
  643. Assert(headBlock->isLoopHeader);
  644. Loop *loop = JitAnewZ(this->GetFunc()->m_alloc, Loop, this->GetFunc()->m_alloc, this->GetFunc());
  645. loop->next = this->loopList;
  646. this->loopList = loop;
  647. headBlock->loop = loop;
  648. loop->headBlock = headBlock;
  649. loop->int32SymsOnEntry = nullptr;
  650. loop->lossyInt32SymsOnEntry = nullptr;
  651. // If parentLoop is a parent of loop, it's headBlock better appear first.
  652. if (parentLoop && loop->headBlock->number > parentLoop->headBlock->number)
  653. {
  654. loop->parent = parentLoop;
  655. parentLoop->isLeaf = false;
  656. }
  657. loop->hasDeadStoreCollectionPass = false;
  658. loop->hasDeadStorePrepass = false;
  659. loop->memOpInfo = nullptr;
  660. NoRecoverMemoryJitArenaAllocator tempAlloc(L"BE-LoopBuilder", this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  661. WalkLoopBlocks(tailBlock, loop, &tempAlloc);
  662. Assert(loop->GetHeadBlock() == headBlock);
  663. IR::LabelInstr * firstInstr = loop->GetLoopTopInstr();
  664. firstInstr->SetLoop(loop);
  665. if (firstInstr->IsProfiledLabelInstr())
  666. {
  667. loop->SetImplicitCallFlags(firstInstr->AsProfiledLabelInstr()->loopImplicitCallFlags);
  668. loop->SetLoopFlags(firstInstr->AsProfiledLabelInstr()->loopFlags);
  669. }
  670. else
  671. {
  672. // Didn't collect profile information, don't do optimizations
  673. loop->SetImplicitCallFlags(Js::ImplicitCall_All);
  674. }
  675. }
  676. Loop::MemCopyCandidate* Loop::MemOpCandidate::AsMemCopy()
  677. {
  678. Assert(this->IsMemCopy());
  679. return (Loop::MemCopyCandidate*)this;
  680. }
  681. Loop::MemSetCandidate* Loop::MemOpCandidate::AsMemSet()
  682. {
  683. Assert(this->IsMemSet());
  684. return (Loop::MemSetCandidate*)this;
  685. }
  686. bool Loop::EnsureMemOpVariablesInitialized()
  687. {
  688. if (this->memOpInfo == nullptr)
  689. {
  690. JitArenaAllocator *allocator = this->GetFunc()->GetTopFunc()->m_fg->alloc;
  691. this->memOpInfo = JitAnewStruct(allocator, Loop::MemOpInfo);
  692. this->memOpInfo->inductionVariablesUsedAfterLoop = nullptr;
  693. this->memOpInfo->startIndexOpndCache[0] = nullptr;
  694. this->memOpInfo->startIndexOpndCache[1] = nullptr;
  695. this->memOpInfo->startIndexOpndCache[2] = nullptr;
  696. this->memOpInfo->startIndexOpndCache[3] = nullptr;
  697. if (this->GetLoopFlags().isInterpreted && !this->GetLoopFlags().memopMinCountReached)
  698. {
  699. #if DBG_DUMP
  700. Func* func = this->GetFunc();
  701. if (Js::Configuration::Global.flags.Verbose && PHASE_TRACE(Js::MemOpPhase, func))
  702. {
  703. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  704. Output::Print(L"MemOp skipped: minimum loop count not reached: Function: %s %s, Loop: %d\n",
  705. func->GetJnFunction()->GetDisplayName(),
  706. func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  707. this->GetLoopNumber()
  708. );
  709. }
  710. #endif
  711. this->memOpInfo->doMemOp = false;
  712. this->memOpInfo->inductionVariableChangeInfoMap = nullptr;
  713. this->memOpInfo->inductionVariableOpndPerUnrollMap = nullptr;
  714. this->memOpInfo->candidates = nullptr;
  715. return false;
  716. }
  717. this->memOpInfo->doMemOp = true;
  718. this->memOpInfo->inductionVariableChangeInfoMap = JitAnew(allocator, Loop::InductionVariableChangeInfoMap, allocator);
  719. this->memOpInfo->inductionVariableOpndPerUnrollMap = JitAnew(allocator, Loop::InductionVariableOpndPerUnrollMap, allocator);
  720. this->memOpInfo->candidates = JitAnew(allocator, Loop::MemOpList, allocator);
  721. }
  722. return true;
  723. }
  724. // Walk the basic blocks backwards until we find the loop header.
  725. // Mark basic blocks in the loop by looking at the predecessors
  726. // of blocks known to be in the loop.
  727. // Recurse on inner loops.
  728. void
  729. FlowGraph::WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *tempAlloc)
  730. {
  731. AnalysisAssert(loop);
  732. BVSparse<JitArenaAllocator> *loopBlocksBv = JitAnew(tempAlloc, BVSparse<JitArenaAllocator>, tempAlloc);
  733. BasicBlock *tailBlock = block;
  734. BasicBlock *lastBlock;
  735. loopBlocksBv->Set(block->GetBlockNum());
  736. this->AddBlockToLoop(block, loop);
  737. if (block == loop->headBlock)
  738. {
  739. // Single block loop, we're done
  740. return;
  741. }
  742. do
  743. {
  744. BOOL isInLoop = loopBlocksBv->Test(block->GetBlockNum());
  745. FOREACH_SUCCESSOR_BLOCK(succ, block)
  746. {
  747. if (succ->isLoopHeader)
  748. {
  749. // Found a loop back-edge
  750. if (loop->headBlock == succ)
  751. {
  752. isInLoop = true;
  753. }
  754. else if (succ->loop == nullptr || succ->loop->headBlock != succ)
  755. {
  756. // Recurse on inner loop
  757. BuildLoop(succ, block, isInLoop ? loop : nullptr);
  758. }
  759. }
  760. } NEXT_SUCCESSOR_BLOCK;
  761. if (isInLoop)
  762. {
  763. // This block is in the loop. All of it's predecessors should be contained in the loop as well.
  764. FOREACH_PREDECESSOR_BLOCK(pred, block)
  765. {
  766. // Fix up loop parent if it isn't set already.
  767. // If pred->loop != loop, we're looking at an inner loop, which was already visited.
  768. // If pred->loop->parent == nullptr, this is the first time we see this loop from an outer
  769. // loop, so this must be an immediate child.
  770. if (pred->loop && pred->loop != loop && loop->headBlock->number < pred->loop->headBlock->number
  771. && (pred->loop->parent == nullptr || pred->loop->parent->headBlock->number < loop->headBlock->number))
  772. {
  773. pred->loop->parent = loop;
  774. loop->isLeaf = false;
  775. if (pred->loop->hasCall)
  776. {
  777. loop->SetHasCall();
  778. }
  779. loop->SetImplicitCallFlags(pred->loop->GetImplicitCallFlags());
  780. }
  781. // Add pred to loop bit vector
  782. loopBlocksBv->Set(pred->GetBlockNum());
  783. } NEXT_PREDECESSOR_BLOCK;
  784. if (block->loop == nullptr || block->loop->IsDescendentOrSelf(loop))
  785. {
  786. block->loop = loop;
  787. }
  788. if (block != tailBlock)
  789. {
  790. this->AddBlockToLoop(block, loop);
  791. }
  792. }
  793. lastBlock = block;
  794. block = block->GetPrev();
  795. } while (lastBlock != loop->headBlock);
  796. }
  797. // Add block to this loop, and it's parent loops.
  798. void
  799. FlowGraph::AddBlockToLoop(BasicBlock *block, Loop *loop)
  800. {
  801. loop->blockList.Prepend(block);
  802. if (block->hasCall)
  803. {
  804. loop->SetHasCall();
  805. }
  806. }
  807. ///----------------------------------------------------------------------------
  808. ///
  809. /// FlowGraph::AddBlock
  810. ///
  811. /// Finish processing of a new block: hook up successor arcs, note loops, etc.
  812. ///
  813. ///----------------------------------------------------------------------------
  814. BasicBlock *
  815. FlowGraph::AddBlock(
  816. IR::Instr * firstInstr,
  817. IR::Instr * lastInstr,
  818. BasicBlock * nextBlock)
  819. {
  820. BasicBlock * block;
  821. IR::LabelInstr * labelInstr;
  822. if (firstInstr->IsLabelInstr())
  823. {
  824. labelInstr = firstInstr->AsLabelInstr();
  825. }
  826. else
  827. {
  828. labelInstr = IR::LabelInstr::New(Js::OpCode::Label, firstInstr->m_func);
  829. labelInstr->SetByteCodeOffset(firstInstr);
  830. if (firstInstr->IsEntryInstr())
  831. {
  832. firstInstr->InsertAfter(labelInstr);
  833. }
  834. else
  835. {
  836. firstInstr->InsertBefore(labelInstr);
  837. }
  838. firstInstr = labelInstr;
  839. }
  840. block = labelInstr->GetBasicBlock();
  841. if (block == nullptr)
  842. {
  843. block = BasicBlock::New(this);
  844. labelInstr->SetBasicBlock(block);
  845. // Remember last block in function to target successor of RETs.
  846. if (!this->tailBlock)
  847. {
  848. this->tailBlock = block;
  849. }
  850. }
  851. // Hook up the successor edges
  852. if (lastInstr->EndsBasicBlock())
  853. {
  854. BasicBlock * blockTarget = nullptr;
  855. if (lastInstr->IsBranchInstr())
  856. {
  857. // Hook up a successor edge to the branch target.
  858. IR::BranchInstr * branchInstr = lastInstr->AsBranchInstr();
  859. if(branchInstr->IsMultiBranch())
  860. {
  861. BasicBlock * blockMultiBrTarget;
  862. IR::MultiBranchInstr * multiBranchInstr = branchInstr->AsMultiBrInstr();
  863. multiBranchInstr->MapUniqueMultiBrLabels([&](IR::LabelInstr * labelInstr) -> void
  864. {
  865. blockMultiBrTarget = SetBlockTargetAndLoopFlag(labelInstr);
  866. this->AddEdge(block, blockMultiBrTarget);
  867. });
  868. }
  869. else
  870. {
  871. IR::LabelInstr * labelInstr = branchInstr->GetTarget();
  872. blockTarget = SetBlockTargetAndLoopFlag(labelInstr);
  873. if (branchInstr->IsConditional())
  874. {
  875. IR::Instr *instrNext = branchInstr->GetNextRealInstrOrLabel();
  876. if (instrNext->IsLabelInstr())
  877. {
  878. SetBlockTargetAndLoopFlag(instrNext->AsLabelInstr());
  879. }
  880. }
  881. }
  882. }
  883. else if (lastInstr->m_opcode == Js::OpCode::Ret && block != this->tailBlock)
  884. {
  885. blockTarget = this->tailBlock;
  886. }
  887. if (blockTarget)
  888. {
  889. this->AddEdge(block, blockTarget);
  890. }
  891. }
  892. if (lastInstr->HasFallThrough())
  893. {
  894. // Add a branch to next instruction so that we don't have to update the flow graph
  895. // when the glob opt tries to insert instructions.
  896. // We don't run the globopt with try/catch, don't need to insert branch to next for fall through blocks.
  897. if (!this->func->HasTry() && !lastInstr->IsBranchInstr())
  898. {
  899. IR::BranchInstr * instr = IR::BranchInstr::New(Js::OpCode::Br,
  900. lastInstr->m_next->AsLabelInstr(), lastInstr->m_func);
  901. instr->SetByteCodeOffset(lastInstr->m_next);
  902. lastInstr->InsertAfter(instr);
  903. lastInstr = instr;
  904. }
  905. this->AddEdge(block, nextBlock);
  906. }
  907. block->SetBlockNum(this->blockCount++);
  908. block->SetFirstInstr(firstInstr);
  909. block->SetLastInstr(lastInstr);
  910. if (this->blockList)
  911. {
  912. this->blockList->prev = block;
  913. }
  914. block->next = this->blockList;
  915. this->blockList = block;
  916. return block;
  917. }
  918. BasicBlock *
  919. FlowGraph::SetBlockTargetAndLoopFlag(IR::LabelInstr * labelInstr)
  920. {
  921. BasicBlock * blockTarget = nullptr;
  922. blockTarget = labelInstr->GetBasicBlock();
  923. if (blockTarget == nullptr)
  924. {
  925. blockTarget = BasicBlock::New(this);
  926. labelInstr->SetBasicBlock(blockTarget);
  927. }
  928. if (labelInstr->m_isLoopTop)
  929. {
  930. blockTarget->isLoopHeader = true;
  931. this->hasLoop = true;
  932. }
  933. return blockTarget;
  934. }
  935. ///----------------------------------------------------------------------------
  936. ///
  937. /// FlowGraph::AddEdge
  938. ///
  939. /// Add an edge connecting the two given blocks.
  940. ///
  941. ///----------------------------------------------------------------------------
  942. FlowEdge *
  943. FlowGraph::AddEdge(BasicBlock * blockPred, BasicBlock * blockSucc)
  944. {
  945. FlowEdge * edge = FlowEdge::New(this);
  946. edge->SetPred(blockPred);
  947. edge->SetSucc(blockSucc);
  948. blockPred->AddSucc(edge, this);
  949. blockSucc->AddPred(edge, this);
  950. return edge;
  951. }
  952. ///----------------------------------------------------------------------------
  953. ///
  954. /// FlowGraph::Destroy
  955. ///
  956. /// Remove all references to FG structures from the IR in preparation for freeing
  957. /// the FG.
  958. ///
  959. ///----------------------------------------------------------------------------
  960. void
  961. FlowGraph::Destroy(void)
  962. {
  963. BOOL fHasTry = this->func->HasTry();
  964. Region ** blockToRegion = nullptr;
  965. if (fHasTry)
  966. {
  967. blockToRegion = JitAnewArrayZ(this->alloc, Region*, this->blockCount);
  968. // Do unreachable code removal up front to avoid problems
  969. // with unreachable back edges, etc.
  970. this->RemoveUnreachableBlocks();
  971. }
  972. FOREACH_BLOCK_ALL(block, this)
  973. {
  974. IR::Instr * firstInstr = block->GetFirstInstr();
  975. if (block->isDeleted && !block->isDead)
  976. {
  977. if (firstInstr->IsLabelInstr())
  978. {
  979. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  980. labelInstr->UnlinkBasicBlock();
  981. // Removing the label for non try blocks as we have a deleted block which has the label instruction
  982. // still not removed; this prevents the assert for cases where the deleted blocks fall through to a helper block,
  983. // i.e. helper introduced by polymorphic inlining bailout.
  984. // Skipping Try blocks as we have dependency on blocks to get the last instr(see below in this function)
  985. if (!fHasTry)
  986. {
  987. if (this->func->GetJnFunction()->IsGenerator())
  988. {
  989. // the label could be a yield resume label, in which case we also need to remove it from the YieldOffsetResumeLabels list
  990. this->func->MapUntilYieldOffsetResumeLabels([this, &labelInstr](int i, const YieldOffsetResumeLabel& yorl)
  991. {
  992. if (labelInstr == yorl.Second())
  993. {
  994. labelInstr->m_hasNonBranchRef = false;
  995. this->func->RemoveYieldOffsetResumeLabel(yorl);
  996. return true;
  997. }
  998. return false;
  999. });
  1000. }
  1001. Assert(labelInstr->IsUnreferenced());
  1002. labelInstr->Remove();
  1003. }
  1004. }
  1005. continue;
  1006. }
  1007. if (block->isLoopHeader && !block->isDead)
  1008. {
  1009. // Mark the tail block of this loop (the last back-edge). The register allocator
  1010. // uses this to lexically find loops.
  1011. BasicBlock *loopTail = nullptr;
  1012. AssertMsg(firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->m_isLoopTop,
  1013. "Label not marked as loop top...");
  1014. FOREACH_BLOCK_IN_LOOP(loopBlock, block->loop)
  1015. {
  1016. FOREACH_SUCCESSOR_BLOCK(succ, loopBlock)
  1017. {
  1018. if (succ == block)
  1019. {
  1020. loopTail = loopBlock;
  1021. break;
  1022. }
  1023. } NEXT_SUCCESSOR_BLOCK;
  1024. } NEXT_BLOCK_IN_LOOP;
  1025. if (loopTail)
  1026. {
  1027. AssertMsg(loopTail->GetLastInstr()->IsBranchInstr(), "LastInstr of loop should always be a branch no?");
  1028. block->loop->SetLoopTopInstr(block->GetFirstInstr()->AsLabelInstr());
  1029. }
  1030. else
  1031. {
  1032. // This loop doesn't have a back-edge: that is, it is not a loop
  1033. // anymore...
  1034. firstInstr->AsLabelInstr()->m_isLoopTop = FALSE;
  1035. }
  1036. }
  1037. if (fHasTry)
  1038. {
  1039. this->UpdateRegionForBlock(block, blockToRegion);
  1040. }
  1041. if (firstInstr->IsLabelInstr())
  1042. {
  1043. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1044. labelInstr->UnlinkBasicBlock();
  1045. if (labelInstr->IsUnreferenced() && !fHasTry)
  1046. {
  1047. // This is an unreferenced label, probably added by FG building.
  1048. // Delete it now to make extended basic blocks visible.
  1049. if (firstInstr == block->GetLastInstr())
  1050. {
  1051. labelInstr->Remove();
  1052. continue;
  1053. }
  1054. else
  1055. {
  1056. labelInstr->Remove();
  1057. }
  1058. }
  1059. }
  1060. // We don't run the globopt with try/catch, don't need to remove branch to next for fall through blocks
  1061. IR::Instr * lastInstr = block->GetLastInstr();
  1062. if (!fHasTry && lastInstr->IsBranchInstr())
  1063. {
  1064. IR::BranchInstr * branchInstr = lastInstr->AsBranchInstr();
  1065. if (!branchInstr->IsConditional() && branchInstr->GetTarget() == branchInstr->m_next)
  1066. {
  1067. // Remove branch to next
  1068. branchInstr->Remove();
  1069. }
  1070. }
  1071. }
  1072. NEXT_BLOCK;
  1073. #if DBG
  1074. if (fHasTry)
  1075. {
  1076. // Now that all blocks have regions, we should see consistently propagated regions at all
  1077. // block boundaries.
  1078. FOREACH_BLOCK(block, this)
  1079. {
  1080. Region * region = blockToRegion[block->GetBlockNum()];
  1081. Region * predRegion = nullptr;
  1082. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1083. {
  1084. predRegion = blockToRegion[predBlock->GetBlockNum()];
  1085. if (predBlock->GetLastInstr() == nullptr)
  1086. {
  1087. AssertMsg(region == predRegion, "Bad region propagation through empty block");
  1088. }
  1089. else
  1090. {
  1091. switch (predBlock->GetLastInstr()->m_opcode)
  1092. {
  1093. case Js::OpCode::TryCatch:
  1094. case Js::OpCode::TryFinally:
  1095. AssertMsg(region->GetParent() == predRegion, "Bad region prop on entry to try-catch/finally");
  1096. if (block->GetFirstInstr() == predBlock->GetLastInstr()->AsBranchInstr()->GetTarget())
  1097. {
  1098. if (predBlock->GetLastInstr()->m_opcode == Js::OpCode::TryCatch)
  1099. {
  1100. AssertMsg(region->GetType() == RegionTypeCatch, "Bad region type on entry to catch");
  1101. }
  1102. else
  1103. {
  1104. AssertMsg(region->GetType() == RegionTypeFinally, "Bad region type on entry to finally");
  1105. }
  1106. }
  1107. else
  1108. {
  1109. AssertMsg(region->GetType() == RegionTypeTry, "Bad region type on entry to try");
  1110. }
  1111. break;
  1112. case Js::OpCode::Leave:
  1113. case Js::OpCode::LeaveNull:
  1114. AssertMsg(region == predRegion->GetParent() || (region == predRegion && this->func->IsLoopBodyInTry()), "Bad region prop on leaving try-catch/finally");
  1115. break;
  1116. // If the try region has a branch out of the loop,
  1117. // - the branch is moved out of the loop as part of break block removal, and
  1118. // - BrOnException is inverted to BrOnNoException and a Br is inserted after it.
  1119. // Otherwise,
  1120. // - FullJit: BrOnException is removed in the forward pass.
  1121. case Js::OpCode::BrOnException:
  1122. Assert(!this->func->DoGlobOpt());
  1123. case Js::OpCode::BrOnNoException:
  1124. Assert(this->func->HasTry() &&
  1125. ((!this->func->HasFinally() && !this->func->IsLoopBody() && !PHASE_OFF(Js::OptimizeTryCatchPhase, this->func)) ||
  1126. (this->func->IsSimpleJit() && this->func->GetJnFunction()->DoJITLoopBody()))); // should be relaxed as more bailouts are added in Simple Jit
  1127. Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch);
  1128. if (region->GetType() == RegionTypeCatch)
  1129. {
  1130. Assert((predRegion->GetType() == RegionTypeTry) || (predRegion->GetType() == RegionTypeCatch));
  1131. }
  1132. else if (region->GetType() == RegionTypeTry)
  1133. {
  1134. Assert(region == predRegion);
  1135. }
  1136. break;
  1137. case Js::OpCode::Br:
  1138. if (region->GetType() == RegionTypeCatch && region != predRegion)
  1139. {
  1140. AssertMsg(predRegion->GetType() == RegionTypeTry, "Bad region type for the try");
  1141. }
  1142. else
  1143. {
  1144. AssertMsg(region == predRegion, "Bad region propagation through interior block");
  1145. }
  1146. break;
  1147. default:
  1148. AssertMsg(region == predRegion, "Bad region propagation through interior block");
  1149. break;
  1150. }
  1151. }
  1152. }
  1153. NEXT_PREDECESSOR_BLOCK;
  1154. switch (region->GetType())
  1155. {
  1156. case RegionTypeRoot:
  1157. Assert(!region->GetMatchingTryRegion() && !region->GetMatchingCatchRegion() && !region->GetMatchingFinallyRegion());
  1158. break;
  1159. case RegionTypeTry:
  1160. Assert(!(region->GetMatchingCatchRegion() && region->GetMatchingFinallyRegion()));
  1161. break;
  1162. case RegionTypeCatch:
  1163. case RegionTypeFinally:
  1164. Assert(region->GetMatchingTryRegion());
  1165. break;
  1166. }
  1167. }
  1168. NEXT_BLOCK;
  1169. FOREACH_BLOCK_DEAD_OR_ALIVE(block, this)
  1170. {
  1171. if (block->GetFirstInstr()->IsLabelInstr())
  1172. {
  1173. IR::LabelInstr *labelInstr = block->GetFirstInstr()->AsLabelInstr();
  1174. if (labelInstr->IsUnreferenced())
  1175. {
  1176. // This is an unreferenced label, probably added by FG building.
  1177. // Delete it now to make extended basic blocks visible.
  1178. labelInstr->Remove();
  1179. }
  1180. }
  1181. } NEXT_BLOCK_DEAD_OR_ALIVE;
  1182. }
  1183. #endif
  1184. this->func->isFlowGraphValid = false;
  1185. }
  1186. // Propagate the region forward from the block's predecessor(s), tracking the effect
  1187. // of the flow transition. Record the region in the block-to-region map provided
  1188. // and on the label at the entry to the block (if any).
  1189. void
  1190. FlowGraph::UpdateRegionForBlock(BasicBlock * block, Region ** blockToRegion)
  1191. {
  1192. Region *region;
  1193. Region * predRegion = nullptr;
  1194. IR::Instr * tryInstr = nullptr;
  1195. IR::Instr * firstInstr = block->GetFirstInstr();
  1196. if (firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->GetRegion())
  1197. {
  1198. Assert(this->func->HasTry() && (this->func->DoOptimizeTryCatch() || (this->func->IsSimpleJit() && this->func->hasBailout)));
  1199. blockToRegion[block->GetBlockNum()] = firstInstr->AsLabelInstr()->GetRegion();
  1200. return;
  1201. }
  1202. if (block == this->blockList)
  1203. {
  1204. // Head of the graph: create the root region.
  1205. region = Region::New(RegionTypeRoot, nullptr, this->func);
  1206. }
  1207. else
  1208. {
  1209. // Propagate the region forward by finding a predecessor we've already processed.
  1210. // We require that there be one, since we've already removed unreachable blocks.
  1211. region = nullptr;
  1212. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1213. {
  1214. AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
  1215. predRegion = blockToRegion[predBlock->GetBlockNum()];
  1216. if (predRegion != nullptr)
  1217. {
  1218. region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
  1219. break;
  1220. }
  1221. }
  1222. NEXT_PREDECESSOR_BLOCK;
  1223. }
  1224. AnalysisAssertMsg(region != nullptr, "Failed to find region for block");
  1225. if (!region->ehBailoutData)
  1226. {
  1227. region->AllocateEHBailoutData(this->func, tryInstr);
  1228. }
  1229. // Record the region in the block-to-region map.
  1230. blockToRegion[block->GetBlockNum()] = region;
  1231. if (firstInstr->IsLabelInstr())
  1232. {
  1233. // Record the region on the label and make sure it stays around as a region
  1234. // marker if we're entering a region at this point.
  1235. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1236. labelInstr->SetRegion(region);
  1237. if (region != predRegion)
  1238. {
  1239. labelInstr->m_hasNonBranchRef = true;
  1240. }
  1241. }
  1242. }
  1243. Region *
  1244. FlowGraph::PropagateRegionFromPred(BasicBlock * block, BasicBlock * predBlock, Region * predRegion, IR::Instr * &tryInstr)
  1245. {
  1246. // Propagate predRegion to region, looking at the flow transition for an opcode
  1247. // that affects the region.
  1248. Region * region = nullptr;
  1249. IR::Instr * predLastInstr = predBlock->GetLastInstr();
  1250. IR::Instr * firstInstr = block->GetFirstInstr();
  1251. if (predLastInstr == nullptr)
  1252. {
  1253. // Empty block: trivially propagate the region.
  1254. region = predRegion;
  1255. }
  1256. else
  1257. {
  1258. Region * tryRegion = nullptr;
  1259. IR::LabelInstr * tryInstrNext = nullptr;
  1260. switch (predLastInstr->m_opcode)
  1261. {
  1262. case Js::OpCode::TryCatch:
  1263. // Entry to a try-catch. See whether we're entering the try or the catch
  1264. // by looking for the handler label.
  1265. Assert(predLastInstr->m_next->IsLabelInstr());
  1266. tryInstrNext = predLastInstr->m_next->AsLabelInstr();
  1267. tryRegion = tryInstrNext->GetRegion();
  1268. if (firstInstr == predLastInstr->AsBranchInstr()->GetTarget())
  1269. {
  1270. region = Region::New(RegionTypeCatch, predRegion, this->func);
  1271. Assert(tryRegion);
  1272. region->SetMatchingTryRegion(tryRegion);
  1273. tryRegion->SetMatchingCatchRegion(region);
  1274. }
  1275. else
  1276. {
  1277. region = Region::New(RegionTypeTry, predRegion, this->func);
  1278. tryInstr = predLastInstr;
  1279. }
  1280. break;
  1281. case Js::OpCode::TryFinally:
  1282. // Entry to a try-finally. See whether we're entering the try or the finally
  1283. // by looking for the handler label.
  1284. Assert(predLastInstr->m_next->IsLabelInstr());
  1285. tryInstrNext = predLastInstr->m_next->AsLabelInstr();
  1286. tryRegion = tryInstrNext->GetRegion();
  1287. if (firstInstr == predLastInstr->AsBranchInstr()->GetTarget())
  1288. {
  1289. region = Region::New(RegionTypeFinally, predRegion, this->func);
  1290. Assert(tryRegion);
  1291. region->SetMatchingTryRegion(tryRegion);
  1292. tryRegion->SetMatchingFinallyRegion(region);
  1293. }
  1294. else
  1295. {
  1296. region = Region::New(RegionTypeTry, predRegion, this->func);
  1297. tryInstr = predLastInstr;
  1298. }
  1299. break;
  1300. case Js::OpCode::Leave:
  1301. case Js::OpCode::LeaveNull:
  1302. // Exiting a try or handler. Retrieve the current region's parent.
  1303. region = predRegion->GetParent();
  1304. if (region == nullptr)
  1305. {
  1306. // We found a Leave in the root region- this can only happen when a jitted loop body
  1307. // in a try block has a return statement.
  1308. Assert(this->func->IsLoopBodyInTry());
  1309. predLastInstr->AsBranchInstr()->m_isOrphanedLeave = true;
  1310. region = predRegion;
  1311. }
  1312. break;
  1313. default:
  1314. // Normal (non-EH) transition: just propagate the region.
  1315. region = predRegion;
  1316. break;
  1317. }
  1318. }
  1319. return region;
  1320. }
  1321. void
  1322. FlowGraph::InsertCompBlockToLoopList(Loop *loop, BasicBlock* compBlock, BasicBlock* targetBlock, bool postTarget)
  1323. {
  1324. if (loop)
  1325. {
  1326. bool found = false;
  1327. FOREACH_BLOCK_IN_LOOP_EDITING(loopBlock, loop, iter)
  1328. {
  1329. if (loopBlock == targetBlock)
  1330. {
  1331. found = true;
  1332. break;
  1333. }
  1334. } NEXT_BLOCK_IN_LOOP_EDITING;
  1335. if (found)
  1336. {
  1337. if (postTarget)
  1338. {
  1339. iter.Next();
  1340. }
  1341. iter.InsertBefore(compBlock);
  1342. }
  1343. InsertCompBlockToLoopList(loop->parent, compBlock, targetBlock, postTarget);
  1344. }
  1345. }
  1346. // Insert a block on the given edge
  1347. BasicBlock *
  1348. FlowGraph::InsertAirlockBlock(FlowEdge * edge)
  1349. {
  1350. BasicBlock * airlockBlock = BasicBlock::New(this);
  1351. BasicBlock * sourceBlock = edge->GetPred();
  1352. BasicBlock * sinkBlock = edge->GetSucc();
  1353. BasicBlock * sinkPrevBlock = sinkBlock->prev;
  1354. IR::Instr * sinkPrevBlockLastInstr = sinkPrevBlock->GetLastInstr();
  1355. IR::Instr * sourceLastInstr = sourceBlock->GetLastInstr();
  1356. airlockBlock->loop = sinkBlock->loop;
  1357. airlockBlock->SetBlockNum(this->blockCount++);
  1358. #ifdef DBG
  1359. airlockBlock->isAirLockBlock = true;
  1360. #endif
  1361. //
  1362. // Fixup block linkage
  1363. //
  1364. // airlock block is inserted right before sourceBlock
  1365. airlockBlock->prev = sinkBlock->prev;
  1366. sinkBlock->prev = airlockBlock;
  1367. airlockBlock->next = sinkBlock;
  1368. airlockBlock->prev->next = airlockBlock;
  1369. //
  1370. // Fixup flow edges
  1371. //
  1372. sourceBlock->RemoveSucc(sinkBlock, this, false);
  1373. // Add sourceBlock -> airlockBlock
  1374. this->AddEdge(sourceBlock, airlockBlock);
  1375. // Add airlockBlock -> sinkBlock
  1376. edge->SetPred(airlockBlock);
  1377. airlockBlock->AddSucc(edge, this);
  1378. // Fixup data use count
  1379. airlockBlock->SetDataUseCount(1);
  1380. sourceBlock->DecrementDataUseCount();
  1381. //
  1382. // Fixup IR
  1383. //
  1384. // Maintain the instruction region for inlining
  1385. IR::LabelInstr *sinkLabel = sinkBlock->GetFirstInstr()->AsLabelInstr();
  1386. Func * sinkLabelFunc = sinkLabel->m_func;
  1387. IR::LabelInstr *airlockLabel = IR::LabelInstr::New(Js::OpCode::Label, sinkLabelFunc);
  1388. sinkPrevBlockLastInstr->InsertAfter(airlockLabel);
  1389. airlockBlock->SetFirstInstr(airlockLabel);
  1390. airlockLabel->SetBasicBlock(airlockBlock);
  1391. // Add br to sinkBlock from airlock block
  1392. IR::BranchInstr *airlockBr = IR::BranchInstr::New(Js::OpCode::Br, sinkLabel, sinkLabelFunc);
  1393. airlockBr->SetByteCodeOffset(sinkLabel);
  1394. airlockLabel->InsertAfter(airlockBr);
  1395. airlockBlock->SetLastInstr(airlockBr);
  1396. airlockLabel->SetByteCodeOffset(sinkLabel);
  1397. // Fixup flow out of sourceBlock
  1398. IR::BranchInstr *sourceBr = sourceLastInstr->AsBranchInstr();
  1399. if (sourceBr->IsMultiBranch())
  1400. {
  1401. const bool replaced = sourceBr->AsMultiBrInstr()->ReplaceTarget(sinkLabel, airlockLabel);
  1402. Assert(replaced);
  1403. }
  1404. else if (sourceBr->GetTarget() == sinkLabel)
  1405. {
  1406. sourceBr->SetTarget(airlockLabel);
  1407. }
  1408. if (!sinkPrevBlockLastInstr->IsBranchInstr() || sinkPrevBlockLastInstr->AsBranchInstr()->HasFallThrough())
  1409. {
  1410. if (!sinkPrevBlock->isDeleted)
  1411. {
  1412. FlowEdge *dstEdge = this->FindEdge(sinkPrevBlock, sinkBlock);
  1413. if (dstEdge) // Possibility that sourceblock may be same as sinkPrevBlock
  1414. {
  1415. BasicBlock* compensationBlock = this->InsertCompensationCodeForBlockMove(dstEdge, true /*insert comp block to loop list*/, true);
  1416. compensationBlock->IncrementDataUseCount();
  1417. // We need to skip airlock compensation block in globopt as its inserted while globopt is iteration over the blocks.
  1418. compensationBlock->isAirLockCompensationBlock = true;
  1419. }
  1420. }
  1421. }
  1422. #if DBG_DUMP
  1423. this->Dump(true, L"\n After insertion of airlock block \n");
  1424. #endif
  1425. return airlockBlock;
  1426. }
  1427. // Insert a block on the given edge
  1428. BasicBlock *
  1429. FlowGraph::InsertCompensationCodeForBlockMove(FlowEdge * edge, bool insertToLoopList, bool sinkBlockLoop)
  1430. {
  1431. BasicBlock * compBlock = BasicBlock::New(this);
  1432. BasicBlock * sourceBlock = edge->GetPred();
  1433. BasicBlock * sinkBlock = edge->GetSucc();
  1434. BasicBlock * fallthroughBlock = sourceBlock->next;
  1435. IR::Instr * sourceLastInstr = sourceBlock->GetLastInstr();
  1436. compBlock->SetBlockNum(this->blockCount++);
  1437. if (insertToLoopList)
  1438. {
  1439. // For flow graph edits in
  1440. if (sinkBlockLoop)
  1441. {
  1442. if (sinkBlock->loop && sinkBlock->loop->GetHeadBlock() == sinkBlock)
  1443. {
  1444. // BLUE 531255: sinkblock may be the head block of new loop, we shouldn't insert compensation block to that loop
  1445. // Insert it to all the parent loop lists.
  1446. compBlock->loop = sinkBlock->loop->parent;
  1447. InsertCompBlockToLoopList(compBlock->loop, compBlock, sinkBlock, false);
  1448. }
  1449. else
  1450. {
  1451. compBlock->loop = sinkBlock->loop;
  1452. InsertCompBlockToLoopList(compBlock->loop, compBlock, sinkBlock, false); // sinkBlock or fallthroughBlock?
  1453. }
  1454. #ifdef DBG
  1455. compBlock->isBreakCompensationBlockAtSink = true;
  1456. #endif
  1457. }
  1458. else
  1459. {
  1460. compBlock->loop = sourceBlock->loop;
  1461. InsertCompBlockToLoopList(compBlock->loop, compBlock, sourceBlock, true);
  1462. #ifdef DBG
  1463. compBlock->isBreakCompensationBlockAtSource = true;
  1464. #endif
  1465. }
  1466. }
  1467. //
  1468. // Fixup block linkage
  1469. //
  1470. // compensation block is inserted right after sourceBlock
  1471. compBlock->next = fallthroughBlock;
  1472. fallthroughBlock->prev = compBlock;
  1473. compBlock->prev = sourceBlock;
  1474. sourceBlock->next = compBlock;
  1475. //
  1476. // Fixup flow edges
  1477. //
  1478. sourceBlock->RemoveSucc(sinkBlock, this, false);
  1479. // Add sourceBlock -> airlockBlock
  1480. this->AddEdge(sourceBlock, compBlock);
  1481. // Add airlockBlock -> sinkBlock
  1482. edge->SetPred(compBlock);
  1483. compBlock->AddSucc(edge, this);
  1484. //
  1485. // Fixup IR
  1486. //
  1487. // Maintain the instruction region for inlining
  1488. IR::LabelInstr *sinkLabel = sinkBlock->GetFirstInstr()->AsLabelInstr();
  1489. Func * sinkLabelFunc = sinkLabel->m_func;
  1490. IR::LabelInstr *compLabel = IR::LabelInstr::New(Js::OpCode::Label, sinkLabelFunc);
  1491. sourceLastInstr->InsertAfter(compLabel);
  1492. compBlock->SetFirstInstr(compLabel);
  1493. compLabel->SetBasicBlock(compBlock);
  1494. // Add br to sinkBlock from compensation block
  1495. IR::BranchInstr *compBr = IR::BranchInstr::New(Js::OpCode::Br, sinkLabel, sinkLabelFunc);
  1496. compBr->SetByteCodeOffset(sinkLabel);
  1497. compLabel->InsertAfter(compBr);
  1498. compBlock->SetLastInstr(compBr);
  1499. compLabel->SetByteCodeOffset(sinkLabel);
  1500. // Fixup flow out of sourceBlock
  1501. if (sourceLastInstr->IsBranchInstr())
  1502. {
  1503. IR::BranchInstr *sourceBr = sourceLastInstr->AsBranchInstr();
  1504. Assert(sourceBr->IsMultiBranch() || sourceBr->IsConditional());
  1505. if (sourceBr->IsMultiBranch())
  1506. {
  1507. const bool replaced = sourceBr->AsMultiBrInstr()->ReplaceTarget(sinkLabel, compLabel);
  1508. Assert(replaced);
  1509. }
  1510. }
  1511. return compBlock;
  1512. }
  1513. void
  1514. FlowGraph::RemoveUnreachableBlocks()
  1515. {
  1516. AnalysisAssert(this->blockList);
  1517. FOREACH_BLOCK(block, this)
  1518. {
  1519. block->isVisited = false;
  1520. }
  1521. NEXT_BLOCK;
  1522. this->blockList->isVisited = true;
  1523. FOREACH_BLOCK_EDITING(block, this)
  1524. {
  1525. if (block->isVisited)
  1526. {
  1527. FOREACH_SUCCESSOR_BLOCK(succ, block)
  1528. {
  1529. succ->isVisited = true;
  1530. } NEXT_SUCCESSOR_BLOCK;
  1531. }
  1532. else
  1533. {
  1534. this->RemoveBlock(block);
  1535. }
  1536. }
  1537. NEXT_BLOCK_EDITING;
  1538. }
  1539. // If block has no predecessor, remove it.
  1540. bool
  1541. FlowGraph::RemoveUnreachableBlock(BasicBlock *block, GlobOpt * globOpt)
  1542. {
  1543. bool isDead = false;
  1544. if ((block->GetPredList() == nullptr || block->GetPredList()->Empty()) && block != this->func->m_fg->blockList)
  1545. {
  1546. isDead = true;
  1547. }
  1548. else if (block->isLoopHeader)
  1549. {
  1550. // A dead loop still has back-edges pointing to it...
  1551. isDead = true;
  1552. FOREACH_PREDECESSOR_BLOCK(pred, block)
  1553. {
  1554. if (!block->loop->IsDescendentOrSelf(pred->loop))
  1555. {
  1556. isDead = false;
  1557. }
  1558. } NEXT_PREDECESSOR_BLOCK;
  1559. }
  1560. if (isDead)
  1561. {
  1562. this->RemoveBlock(block, globOpt);
  1563. return true;
  1564. }
  1565. return false;
  1566. }
  1567. IR::Instr *
  1568. FlowGraph::PeepTypedCm(IR::Instr *instr)
  1569. {
  1570. // Basic pattern, peep:
  1571. // t1 = CmEq a, b
  1572. // BrTrue_I4 $L1, t1
  1573. // Into:
  1574. // t1 = 1
  1575. // BrEq $L1, a, b
  1576. // t1 = 0
  1577. IR::Instr * instrNext = instr->GetNextRealInstrOrLabel();
  1578. // find intermediate Lds
  1579. IR::Instr * instrLd = nullptr;
  1580. if (instrNext->m_opcode == Js::OpCode::Ld_I4)
  1581. {
  1582. instrLd = instrNext;
  1583. instrNext = instrNext->GetNextRealInstrOrLabel();
  1584. }
  1585. IR::Instr * instrLd2 = nullptr;
  1586. if (instrNext->m_opcode == Js::OpCode::Ld_I4)
  1587. {
  1588. instrLd2 = instrNext;
  1589. instrNext = instrNext->GetNextRealInstrOrLabel();
  1590. }
  1591. // Find BrTrue/BrFalse
  1592. IR::Instr *instrBr;
  1593. bool brIsTrue;
  1594. if (instrNext->m_opcode == Js::OpCode::BrTrue_I4)
  1595. {
  1596. instrBr = instrNext;
  1597. brIsTrue = true;
  1598. }
  1599. else if (instrNext->m_opcode == Js::OpCode::BrFalse_I4)
  1600. {
  1601. instrBr = instrNext;
  1602. brIsTrue = false;
  1603. }
  1604. else
  1605. {
  1606. return nullptr;
  1607. }
  1608. AssertMsg(instrLd || (!instrLd && !instrLd2), "Either instrLd is non-null or both null");
  1609. // if we have intermediate Lds, then make sure pattern is:
  1610. // t1 = CmEq a, b
  1611. // t2 = Ld_A t1
  1612. // BrTrue $L1, t2
  1613. if (instrLd && !instrLd->GetSrc1()->IsEqual(instr->GetDst()))
  1614. {
  1615. return nullptr;
  1616. }
  1617. if (instrLd2 && !instrLd2->GetSrc1()->IsEqual(instrLd->GetDst()))
  1618. {
  1619. return nullptr;
  1620. }
  1621. // Make sure we have:
  1622. // t1 = CmEq a, b
  1623. // BrTrue/BrFalse t1
  1624. if (!(instr->GetDst()->IsEqual(instrBr->GetSrc1()) || (instrLd && instrLd->GetDst()->IsEqual(instrBr->GetSrc1())) || (instrLd2 && instrLd2->GetDst()->IsEqual(instrBr->GetSrc1()))))
  1625. {
  1626. return nullptr;
  1627. }
  1628. IR::Opnd * src1 = instr->UnlinkSrc1();
  1629. IR::Opnd * src2 = instr->UnlinkSrc2();
  1630. IR::Instr * instrNew;
  1631. IR::Opnd * tmpOpnd;
  1632. if (instr->GetDst()->IsEqual(src1) || (instrLd && instrLd->GetDst()->IsEqual(src1)) || (instrLd2 && instrLd2->GetDst()->IsEqual(src1)))
  1633. {
  1634. Assert(src1->IsInt32());
  1635. tmpOpnd = IR::RegOpnd::New(TyInt32, instr->m_func);
  1636. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, tmpOpnd, src1, instr->m_func);
  1637. instrNew->SetByteCodeOffset(instr);
  1638. instr->InsertBefore(instrNew);
  1639. src1 = tmpOpnd;
  1640. }
  1641. if (instr->GetDst()->IsEqual(src2) || (instrLd && instrLd->GetDst()->IsEqual(src2)) || (instrLd2 && instrLd2->GetDst()->IsEqual(src2)))
  1642. {
  1643. Assert(src2->IsInt32());
  1644. tmpOpnd = IR::RegOpnd::New(TyInt32, instr->m_func);
  1645. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, tmpOpnd, src2, instr->m_func);
  1646. instrNew->SetByteCodeOffset(instr);
  1647. instr->InsertBefore(instrNew);
  1648. src2 = tmpOpnd;
  1649. }
  1650. instrBr->ReplaceSrc1(src1);
  1651. instrBr->SetSrc2(src2);
  1652. Js::OpCode newOpcode;
  1653. switch (instr->m_opcode)
  1654. {
  1655. case Js::OpCode::CmEq_I4:
  1656. newOpcode = Js::OpCode::BrEq_I4;
  1657. break;
  1658. case Js::OpCode::CmGe_I4:
  1659. newOpcode = Js::OpCode::BrGe_I4;
  1660. break;
  1661. case Js::OpCode::CmGt_I4:
  1662. newOpcode = Js::OpCode::BrGt_I4;
  1663. break;
  1664. case Js::OpCode::CmLt_I4:
  1665. newOpcode = Js::OpCode::BrLt_I4;
  1666. break;
  1667. case Js::OpCode::CmLe_I4:
  1668. newOpcode = Js::OpCode::BrLe_I4;
  1669. break;
  1670. case Js::OpCode::CmUnGe_I4:
  1671. newOpcode = Js::OpCode::BrUnGe_I4;
  1672. break;
  1673. case Js::OpCode::CmUnGt_I4:
  1674. newOpcode = Js::OpCode::BrUnGt_I4;
  1675. break;
  1676. case Js::OpCode::CmUnLt_I4:
  1677. newOpcode = Js::OpCode::BrUnLt_I4;
  1678. break;
  1679. case Js::OpCode::CmUnLe_I4:
  1680. newOpcode = Js::OpCode::BrUnLe_I4;
  1681. break;
  1682. case Js::OpCode::CmNeq_I4:
  1683. newOpcode = Js::OpCode::BrNeq_I4;
  1684. break;
  1685. case Js::OpCode::CmEq_A:
  1686. newOpcode = Js::OpCode::BrEq_A;
  1687. break;
  1688. case Js::OpCode::CmGe_A:
  1689. newOpcode = Js::OpCode::BrGe_A;
  1690. break;
  1691. case Js::OpCode::CmGt_A:
  1692. newOpcode = Js::OpCode::BrGt_A;
  1693. break;
  1694. case Js::OpCode::CmLt_A:
  1695. newOpcode = Js::OpCode::BrLt_A;
  1696. break;
  1697. case Js::OpCode::CmLe_A:
  1698. newOpcode = Js::OpCode::BrLe_A;
  1699. break;
  1700. case Js::OpCode::CmUnGe_A:
  1701. newOpcode = Js::OpCode::BrUnGe_A;
  1702. break;
  1703. case Js::OpCode::CmUnGt_A:
  1704. newOpcode = Js::OpCode::BrUnGt_A;
  1705. break;
  1706. case Js::OpCode::CmUnLt_A:
  1707. newOpcode = Js::OpCode::BrUnLt_A;
  1708. break;
  1709. case Js::OpCode::CmUnLe_A:
  1710. newOpcode = Js::OpCode::BrUnLe_A;
  1711. break;
  1712. case Js::OpCode::CmNeq_A:
  1713. newOpcode = Js::OpCode::BrNeq_A;
  1714. break;
  1715. case Js::OpCode::CmSrEq_A:
  1716. newOpcode = Js::OpCode::BrSrEq_A;
  1717. break;
  1718. case Js::OpCode::CmSrNeq_A:
  1719. newOpcode = Js::OpCode::BrSrNeq_A;
  1720. break;
  1721. default:
  1722. newOpcode = Js::OpCode::InvalidOpCode;
  1723. Assume(UNREACHED);
  1724. }
  1725. instrBr->m_opcode = newOpcode;
  1726. if (brIsTrue)
  1727. {
  1728. instr->SetSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  1729. instr->m_opcode = Js::OpCode::Ld_I4;
  1730. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instr->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  1731. instrNew->SetByteCodeOffset(instrBr);
  1732. instrBr->InsertAfter(instrNew);
  1733. if (instrLd)
  1734. {
  1735. instrLd->ReplaceSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  1736. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  1737. instrNew->SetByteCodeOffset(instrBr);
  1738. instrBr->InsertAfter(instrNew);
  1739. if (instrLd2)
  1740. {
  1741. instrLd2->ReplaceSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  1742. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd2->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  1743. instrNew->SetByteCodeOffset(instrBr);
  1744. instrBr->InsertAfter(instrNew);
  1745. }
  1746. }
  1747. }
  1748. else
  1749. {
  1750. instrBr->AsBranchInstr()->Invert();
  1751. instr->SetSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  1752. instr->m_opcode = Js::OpCode::Ld_I4;
  1753. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instr->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  1754. instrNew->SetByteCodeOffset(instrBr);
  1755. instrBr->InsertAfter(instrNew);
  1756. if (instrLd)
  1757. {
  1758. instrLd->ReplaceSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  1759. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  1760. instrNew->SetByteCodeOffset(instrBr);
  1761. instrBr->InsertAfter(instrNew);
  1762. if (instrLd2)
  1763. {
  1764. instrLd2->ReplaceSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  1765. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd2->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  1766. instrNew->SetByteCodeOffset(instrBr);
  1767. instrBr->InsertAfter(instrNew);
  1768. }
  1769. }
  1770. }
  1771. return instrBr;
  1772. }
  1773. IR::Instr *
  1774. FlowGraph::PeepCm(IR::Instr *instr)
  1775. {
  1776. // Basic pattern, peep:
  1777. // t1 = CmEq a, b
  1778. // t2 = Ld_A t1
  1779. // BrTrue $L1, t2
  1780. // Into:
  1781. // t1 = True
  1782. // t2 = True
  1783. // BrEq $L1, a, b
  1784. // t1 = False
  1785. // t2 = False
  1786. //
  1787. // The true/false Ld_A's will most likely end up being dead-stores...
  1788. // Alternate Pattern
  1789. // t1= CmEq a, b
  1790. // BrTrue $L1, t1
  1791. // Into:
  1792. // BrEq $L1, a, b
  1793. Func *func = instr->m_func;
  1794. // Find Ld_A
  1795. IR::Instr *instrNext = instr->GetNextRealInstrOrLabel();
  1796. IR::Instr *inlineeEndInstr = nullptr;
  1797. IR::Instr *instrNew;
  1798. IR::Instr *instrLd = nullptr, *instrLd2 = nullptr;
  1799. IR::Instr *instrByteCode = instrNext;
  1800. bool ldFound = false;
  1801. IR::Opnd *brSrc = instr->GetDst();
  1802. if (instrNext->m_opcode == Js::OpCode::Ld_A && instrNext->GetSrc1()->IsEqual(instr->GetDst()))
  1803. {
  1804. ldFound = true;
  1805. instrLd = instrNext;
  1806. brSrc = instrNext->GetDst();
  1807. if (brSrc->IsEqual(instr->GetSrc1()) || brSrc->IsEqual(instr->GetSrc2()))
  1808. {
  1809. return nullptr;
  1810. }
  1811. instrNext = instrLd->GetNextRealInstrOrLabel();
  1812. // Is there a second Ld_A?
  1813. if (instrNext->m_opcode == Js::OpCode::Ld_A && instrNext->GetSrc1()->IsEqual(brSrc))
  1814. {
  1815. // We have:
  1816. // t1 = Cm
  1817. // t2 = t1 // ldSrc = t1
  1818. // t3 = t2 // ldDst = t3
  1819. // BrTrue/BrFalse t3
  1820. instrLd2 = instrNext;
  1821. brSrc = instrLd2->GetDst();
  1822. instrNext = instrLd2->GetNextRealInstrOrLabel();
  1823. if (brSrc->IsEqual(instr->GetSrc1()) || brSrc->IsEqual(instr->GetSrc2()))
  1824. {
  1825. return nullptr;
  1826. }
  1827. }
  1828. }
  1829. // Skip over InlineeEnd
  1830. if (instrNext->m_opcode == Js::OpCode::InlineeEnd)
  1831. {
  1832. inlineeEndInstr = instrNext;
  1833. instrNext = inlineeEndInstr->GetNextRealInstrOrLabel();
  1834. }
  1835. // Find BrTrue/BrFalse
  1836. bool brIsTrue;
  1837. if (instrNext->m_opcode == Js::OpCode::BrTrue_A)
  1838. {
  1839. brIsTrue = true;
  1840. }
  1841. else if (instrNext->m_opcode == Js::OpCode::BrFalse_A)
  1842. {
  1843. brIsTrue = false;
  1844. }
  1845. else
  1846. {
  1847. return nullptr;
  1848. }
  1849. IR::Instr *instrBr = instrNext;
  1850. // Make sure we have:
  1851. // t1 = Ld_A
  1852. // BrTrue/BrFalse t1
  1853. if (!instr->GetDst()->IsEqual(instrBr->GetSrc1()) && !brSrc->IsEqual(instrBr->GetSrc1()))
  1854. {
  1855. return nullptr;
  1856. }
  1857. //
  1858. // We have a match. Generate the new branch
  1859. //
  1860. // BrTrue/BrFalse t1
  1861. // Keep a copy of the inliner func and the bytecode offset of the original BrTrue/BrFalse if we end up inserting a new branch out of the inlinee,
  1862. // and sym id of t1 for proper restoration on a bailout before the branch.
  1863. Func* origBrFunc = instrBr->m_func;
  1864. uint32 origBrByteCodeOffset = instrBr->GetByteCodeOffset();
  1865. uint32 origBranchSrcSymId = instrBr->GetSrc1()->GetStackSym()->m_id;
  1866. instrBr->Unlink();
  1867. instr->InsertBefore(instrBr);
  1868. instrBr->ClearByteCodeOffset();
  1869. instrBr->SetByteCodeOffset(instr);
  1870. instrBr->FreeSrc1();
  1871. instrBr->SetSrc1(instr->UnlinkSrc1());
  1872. instrBr->SetSrc2(instr->UnlinkSrc2());
  1873. instrBr->m_func = instr->m_func;
  1874. Js::OpCode newOpcode;
  1875. switch(instr->m_opcode)
  1876. {
  1877. case Js::OpCode::CmEq_A:
  1878. newOpcode = Js::OpCode::BrEq_A;
  1879. break;
  1880. case Js::OpCode::CmGe_A:
  1881. newOpcode = Js::OpCode::BrGe_A;
  1882. break;
  1883. case Js::OpCode::CmGt_A:
  1884. newOpcode = Js::OpCode::BrGt_A;
  1885. break;
  1886. case Js::OpCode::CmLt_A:
  1887. newOpcode = Js::OpCode::BrLt_A;
  1888. break;
  1889. case Js::OpCode::CmLe_A:
  1890. newOpcode = Js::OpCode::BrLe_A;
  1891. break;
  1892. case Js::OpCode::CmUnGe_A:
  1893. newOpcode = Js::OpCode::BrUnGe_A;
  1894. break;
  1895. case Js::OpCode::CmUnGt_A:
  1896. newOpcode = Js::OpCode::BrUnGt_A;
  1897. break;
  1898. case Js::OpCode::CmUnLt_A:
  1899. newOpcode = Js::OpCode::BrUnLt_A;
  1900. break;
  1901. case Js::OpCode::CmUnLe_A:
  1902. newOpcode = Js::OpCode::BrUnLe_A;
  1903. break;
  1904. case Js::OpCode::CmNeq_A:
  1905. newOpcode = Js::OpCode::BrNeq_A;
  1906. break;
  1907. case Js::OpCode::CmSrEq_A:
  1908. newOpcode = Js::OpCode::BrSrEq_A;
  1909. break;
  1910. case Js::OpCode::CmSrNeq_A:
  1911. newOpcode = Js::OpCode::BrSrNeq_A;
  1912. break;
  1913. default:
  1914. Assert(UNREACHED);
  1915. __assume(UNREACHED);
  1916. }
  1917. instrBr->m_opcode = newOpcode;
  1918. IR::AddrOpnd* trueOpnd = IR::AddrOpnd::New(func->GetScriptContext()->GetLibrary()->GetTrue(), IR::AddrOpndKindDynamicVar, func, true);
  1919. IR::AddrOpnd* falseOpnd = IR::AddrOpnd::New(func->GetScriptContext()->GetLibrary()->GetFalse(), IR::AddrOpndKindDynamicVar, func, true);
  1920. trueOpnd->SetValueType(ValueType::Boolean);
  1921. falseOpnd->SetValueType(ValueType::Boolean);
  1922. if (ldFound)
  1923. {
  1924. // Split Ld_A into "Ld_A TRUE"/"Ld_A FALSE"
  1925. if (brIsTrue)
  1926. {
  1927. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrBr->m_func);
  1928. instrNew->SetByteCodeOffset(instrBr);
  1929. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1930. instrBr->InsertBefore(instrNew);
  1931. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetDst(), trueOpnd, instrBr->m_func);
  1932. instrNew->SetByteCodeOffset(instrBr);
  1933. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1934. instrBr->InsertBefore(instrNew);
  1935. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), falseOpnd, instrLd->m_func);
  1936. instrLd->InsertBefore(instrNew);
  1937. instrNew->SetByteCodeOffset(instrLd);
  1938. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1939. instrLd->ReplaceSrc1(falseOpnd);
  1940. if (instrLd2)
  1941. {
  1942. instrLd2->ReplaceSrc1(falseOpnd);
  1943. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd2->GetDst(), trueOpnd, instrBr->m_func);
  1944. instrBr->InsertBefore(instrNew);
  1945. instrNew->SetByteCodeOffset(instrBr);
  1946. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1947. }
  1948. }
  1949. else
  1950. {
  1951. instrBr->AsBranchInstr()->Invert();
  1952. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), falseOpnd, instrBr->m_func);
  1953. instrBr->InsertBefore(instrNew);
  1954. instrNew->SetByteCodeOffset(instrBr);
  1955. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1956. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetDst(), falseOpnd, instrBr->m_func);
  1957. instrBr->InsertBefore(instrNew);
  1958. instrNew->SetByteCodeOffset(instrBr);
  1959. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1960. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrLd->m_func);
  1961. instrLd->InsertBefore(instrNew);
  1962. instrNew->SetByteCodeOffset(instrLd);
  1963. instrLd->ReplaceSrc1(trueOpnd);
  1964. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1965. if (instrLd2)
  1966. {
  1967. instrLd2->ReplaceSrc1(trueOpnd);
  1968. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrBr->m_func);
  1969. instrBr->InsertBefore(instrNew);
  1970. instrNew->SetByteCodeOffset(instrBr);
  1971. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1972. }
  1973. }
  1974. }
  1975. // Fix InlineeEnd
  1976. if (inlineeEndInstr)
  1977. {
  1978. this->InsertInlineeOnFLowEdge(instrBr->AsBranchInstr(), inlineeEndInstr, instrByteCode , origBrFunc, origBrByteCodeOffset, origBranchSrcSymId);
  1979. }
  1980. if (instr->GetDst()->AsRegOpnd()->m_sym->HasByteCodeRegSlot())
  1981. {
  1982. Assert(!instrBr->AsBranchInstr()->HasByteCodeReg());
  1983. StackSym *dstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  1984. instrBr->AsBranchInstr()->SetByteCodeReg(dstSym->GetByteCodeRegSlot());
  1985. }
  1986. instr->Remove();
  1987. //
  1988. // Try optimizing through a second branch.
  1989. // Peep:
  1990. //
  1991. // t2 = True;
  1992. // BrTrue $L1
  1993. // ...
  1994. // L1:
  1995. // t1 = Ld_A t2
  1996. // BrTrue $L2
  1997. //
  1998. // Into:
  1999. // t2 = True;
  2000. // t1 = True;
  2001. // BrTrue $L2 <---
  2002. // ...
  2003. // L1:
  2004. // t1 = Ld_A t2
  2005. // BrTrue $L2
  2006. //
  2007. // This cleanup helps expose second level Cm peeps.
  2008. IR::Instr *instrLd3 = instrBr->AsBranchInstr()->GetTarget()->GetNextRealInstrOrLabel();
  2009. // Skip over branch to branch
  2010. while (instrLd3->m_opcode == Js::OpCode::Br)
  2011. {
  2012. instrLd3 = instrLd3->AsBranchInstr()->GetTarget()->GetNextRealInstrOrLabel();
  2013. }
  2014. // Find Ld_A
  2015. if (instrLd3->m_opcode != Js::OpCode::Ld_A)
  2016. {
  2017. return instrBr;
  2018. }
  2019. IR::Instr *instrBr2 = instrLd3->GetNextRealInstrOrLabel();
  2020. IR::Instr *inlineeEndInstr2 = nullptr;
  2021. // InlineeEnd?
  2022. // REVIEW: Can we handle 2 inlineeEnds?
  2023. if (instrBr2->m_opcode == Js::OpCode::InlineeEnd && !inlineeEndInstr)
  2024. {
  2025. inlineeEndInstr2 = instrBr2;
  2026. instrBr2 = instrBr2->GetNextRealInstrOrLabel();
  2027. }
  2028. // Find branch
  2029. bool brIsTrue2;
  2030. if (instrBr2->m_opcode == Js::OpCode::BrTrue_A)
  2031. {
  2032. brIsTrue2 = true;
  2033. }
  2034. else if (instrBr2->m_opcode == Js::OpCode::BrFalse_A)
  2035. {
  2036. brIsTrue2 = false;
  2037. }
  2038. else
  2039. {
  2040. return nullptr;
  2041. }
  2042. // Make sure Ld_A operates on the right tmps.
  2043. if (!instrLd3->GetDst()->IsEqual(instrBr2->GetSrc1()) || !brSrc->IsEqual(instrLd3->GetSrc1()))
  2044. {
  2045. return nullptr;
  2046. }
  2047. if (instrLd3->GetDst()->IsEqual(instrBr->GetSrc1()) || instrLd3->GetDst()->IsEqual(instrBr->GetSrc2()))
  2048. {
  2049. return nullptr;
  2050. }
  2051. // Make sure that the reg we're assigning to is not live in the intervening instructions (if this is a forward branch).
  2052. if (instrLd3->GetByteCodeOffset() > instrBr->GetByteCodeOffset())
  2053. {
  2054. StackSym *symLd3 = instrLd3->GetDst()->AsRegOpnd()->m_sym;
  2055. if (IR::Instr::FindRegUseInRange(symLd3, instrBr->m_next, instrLd3))
  2056. {
  2057. return nullptr;
  2058. }
  2059. }
  2060. //
  2061. // We have a match!
  2062. //
  2063. if(inlineeEndInstr2)
  2064. {
  2065. origBrFunc = instrBr2->m_func;
  2066. origBrByteCodeOffset = instrBr2->GetByteCodeOffset();
  2067. origBranchSrcSymId = instrBr2->GetSrc1()->GetStackSym()->m_id;
  2068. }
  2069. // Fix Ld_A
  2070. if (brIsTrue)
  2071. {
  2072. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd3->GetDst(), trueOpnd, instrBr->m_func);
  2073. instrBr->InsertBefore(instrNew);
  2074. instrNew->SetByteCodeOffset(instrBr);
  2075. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2076. }
  2077. else
  2078. {
  2079. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd3->GetDst(), falseOpnd, instrBr->m_func);
  2080. instrBr->InsertBefore(instrNew);
  2081. instrNew->SetByteCodeOffset(instrBr);
  2082. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2083. }
  2084. IR::LabelInstr *brTarget2;
  2085. // Retarget branch
  2086. if (brIsTrue2 == brIsTrue)
  2087. {
  2088. brTarget2 = instrBr2->AsBranchInstr()->GetTarget();
  2089. }
  2090. else
  2091. {
  2092. brTarget2 = IR::LabelInstr::New(Js::OpCode::Label, instrBr2->m_func);
  2093. brTarget2->SetByteCodeOffset(instrBr2->m_next);
  2094. instrBr2->InsertAfter(brTarget2);
  2095. }
  2096. instrBr->AsBranchInstr()->SetTarget(brTarget2);
  2097. // InlineeEnd?
  2098. if (inlineeEndInstr2)
  2099. {
  2100. this->InsertInlineeOnFLowEdge(instrBr->AsBranchInstr(), inlineeEndInstr2, instrByteCode, origBrFunc, origBrByteCodeOffset, origBranchSrcSymId);
  2101. }
  2102. return instrBr;
  2103. }
  2104. void
  2105. FlowGraph::InsertInlineeOnFLowEdge(IR::BranchInstr *instrBr, IR::Instr *inlineeEndInstr, IR::Instr *instrBytecode, Func* origBrFunc, uint32 origByteCodeOffset, uint32 origBranchSrcSymId)
  2106. {
  2107. // Helper for PeepsCm code.
  2108. //
  2109. // We've skipped some InlineeEnd. Globopt expects to see these
  2110. // on all flow paths out of the inlinee. Insert an InlineeEnd
  2111. // on the new path:
  2112. // BrEq $L1, a, b
  2113. // Becomes:
  2114. // BrNeq $L2, a, b
  2115. // InlineeEnd
  2116. // Br $L1
  2117. // L2:
  2118. instrBr->AsBranchInstr()->Invert();
  2119. IR::BranchInstr *newBr = IR::BranchInstr::New(Js::OpCode::Br, instrBr->AsBranchInstr()->GetTarget(), origBrFunc);
  2120. newBr->SetByteCodeOffset(origByteCodeOffset);
  2121. instrBr->InsertAfter(newBr);
  2122. IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, instrBr->m_func);
  2123. newLabel->SetByteCodeOffset(instrBytecode);
  2124. newBr->InsertAfter(newLabel);
  2125. instrBr->AsBranchInstr()->SetTarget(newLabel);
  2126. IR::Instr *newInlineeEnd = IR::Instr::New(Js::OpCode::InlineeEnd, inlineeEndInstr->m_func);
  2127. newInlineeEnd->SetSrc1(inlineeEndInstr->GetSrc1());
  2128. newInlineeEnd->SetSrc2(inlineeEndInstr->GetSrc2());
  2129. newInlineeEnd->SetByteCodeOffset(instrBytecode);
  2130. newInlineeEnd->SetIsCloned(true); // Mark it as cloned - this is used later by the inlinee args optimization
  2131. newBr->InsertBefore(newInlineeEnd);
  2132. IR::ByteCodeUsesInstr * useOrigBranchSrcInstr = IR::ByteCodeUsesInstr::New(origBrFunc);
  2133. useOrigBranchSrcInstr->SetByteCodeOffset(origByteCodeOffset);
  2134. useOrigBranchSrcInstr->byteCodeUpwardExposedUsed = JitAnew(origBrFunc->m_alloc, BVSparse<JitArenaAllocator>,origBrFunc->m_alloc);
  2135. useOrigBranchSrcInstr->byteCodeUpwardExposedUsed->Set(origBranchSrcSymId);
  2136. newBr->InsertBefore(useOrigBranchSrcInstr);
  2137. uint newBrFnNumber = newBr->m_func->m_workItem->GetFunctionNumber();
  2138. Assert(newBrFnNumber == origBrFunc->m_workItem->GetFunctionNumber());
  2139. // The function numbers of the new branch and the inlineeEnd instruction should be different (ensuring that the new branch is not added in the inlinee but in the inliner).
  2140. // Only case when they can be same is recursive calls - inlinee and inliner are the same function
  2141. Assert(newBrFnNumber != inlineeEndInstr->m_func->m_workItem->GetFunctionNumber() ||
  2142. newBrFnNumber == inlineeEndInstr->m_func->GetParentFunc()->m_workItem->GetFunctionNumber());
  2143. }
  2144. BasicBlock *
  2145. BasicBlock::New(FlowGraph * graph)
  2146. {
  2147. BasicBlock * block;
  2148. block = JitAnew(graph->alloc, BasicBlock, graph->alloc, graph->GetFunc());
  2149. return block;
  2150. }
  2151. void
  2152. BasicBlock::AddPred(FlowEdge * edge, FlowGraph * graph)
  2153. {
  2154. this->predList.Prepend(graph->alloc, edge);
  2155. }
  2156. void
  2157. BasicBlock::AddSucc(FlowEdge * edge, FlowGraph * graph)
  2158. {
  2159. this->succList.Prepend(graph->alloc, edge);
  2160. }
  2161. void
  2162. BasicBlock::RemovePred(BasicBlock *block, FlowGraph * graph)
  2163. {
  2164. this->RemovePred(block, graph, true, false);
  2165. }
  2166. void
  2167. BasicBlock::RemoveSucc(BasicBlock *block, FlowGraph * graph)
  2168. {
  2169. this->RemoveSucc(block, graph, true, false);
  2170. }
  2171. void
  2172. BasicBlock::RemoveDeadPred(BasicBlock *block, FlowGraph * graph)
  2173. {
  2174. this->RemovePred(block, graph, true, true);
  2175. }
  2176. void
  2177. BasicBlock::RemoveDeadSucc(BasicBlock *block, FlowGraph * graph)
  2178. {
  2179. this->RemoveSucc(block, graph, true, true);
  2180. }
  2181. void
  2182. BasicBlock::RemovePred(BasicBlock *block, FlowGraph * graph, bool doCleanSucc, bool moveToDead)
  2183. {
  2184. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetPredList(), iter)
  2185. {
  2186. if (edge->GetPred() == block)
  2187. {
  2188. BasicBlock *blockSucc = edge->GetSucc();
  2189. if (moveToDead)
  2190. {
  2191. iter.MoveCurrentTo(this->GetDeadPredList());
  2192. }
  2193. else
  2194. {
  2195. iter.RemoveCurrent(graph->alloc);
  2196. }
  2197. if (doCleanSucc)
  2198. {
  2199. block->RemoveSucc(this, graph, false, moveToDead);
  2200. }
  2201. if (blockSucc->isLoopHeader && blockSucc->loop && blockSucc->GetPredList()->HasOne())
  2202. {
  2203. Loop *loop = blockSucc->loop;
  2204. loop->isDead = true;
  2205. }
  2206. return;
  2207. }
  2208. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2209. AssertMsg(UNREACHED, "Edge not found.");
  2210. }
  2211. void
  2212. BasicBlock::RemoveSucc(BasicBlock *block, FlowGraph * graph, bool doCleanPred, bool moveToDead)
  2213. {
  2214. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetSuccList(), iter)
  2215. {
  2216. if (edge->GetSucc() == block)
  2217. {
  2218. if (moveToDead)
  2219. {
  2220. iter.MoveCurrentTo(this->GetDeadSuccList());
  2221. }
  2222. else
  2223. {
  2224. iter.RemoveCurrent(graph->alloc);
  2225. }
  2226. if (doCleanPred)
  2227. {
  2228. block->RemovePred(this, graph, false, moveToDead);
  2229. }
  2230. if (block->isLoopHeader && block->loop && block->GetPredList()->HasOne())
  2231. {
  2232. Loop *loop = block->loop;
  2233. loop->isDead = true;
  2234. }
  2235. return;
  2236. }
  2237. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2238. AssertMsg(UNREACHED, "Edge not found.");
  2239. }
  2240. void
  2241. BasicBlock::UnlinkPred(BasicBlock *block)
  2242. {
  2243. this->UnlinkPred(block, true);
  2244. }
  2245. void
  2246. BasicBlock::UnlinkSucc(BasicBlock *block)
  2247. {
  2248. this->UnlinkSucc(block, true);
  2249. }
  2250. void
  2251. BasicBlock::UnlinkPred(BasicBlock *block, bool doCleanSucc)
  2252. {
  2253. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetPredList(), iter)
  2254. {
  2255. if (edge->GetPred() == block)
  2256. {
  2257. iter.UnlinkCurrent();
  2258. if (doCleanSucc)
  2259. {
  2260. block->UnlinkSucc(this, false);
  2261. }
  2262. return;
  2263. }
  2264. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2265. AssertMsg(UNREACHED, "Edge not found.");
  2266. }
  2267. void
  2268. BasicBlock::UnlinkSucc(BasicBlock *block, bool doCleanPred)
  2269. {
  2270. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetSuccList(), iter)
  2271. {
  2272. if (edge->GetSucc() == block)
  2273. {
  2274. iter.UnlinkCurrent();
  2275. if (doCleanPred)
  2276. {
  2277. block->UnlinkPred(this, false);
  2278. }
  2279. return;
  2280. }
  2281. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2282. AssertMsg(UNREACHED, "Edge not found.");
  2283. }
  2284. bool
  2285. BasicBlock::IsLandingPad()
  2286. {
  2287. BasicBlock * nextBlock = this->GetNext();
  2288. return nextBlock && nextBlock->loop && nextBlock->isLoopHeader && nextBlock->loop->landingPad == this;
  2289. }
  2290. IR::Instr *
  2291. FlowGraph::RemoveInstr(IR::Instr *instr, GlobOpt * globOpt)
  2292. {
  2293. IR::Instr *instrPrev = instr->m_prev;
  2294. if (globOpt)
  2295. {
  2296. // Removing block during glob opt. Need to maintain the graph so that
  2297. // bailout will record the byte code use in case the dead code is exposed
  2298. // by dyno-pogo optimization (where bailout need the byte code uses from
  2299. // the dead blocks where it may not be dead after bailing out)
  2300. if (instr->IsLabelInstr())
  2301. {
  2302. instr->AsLabelInstr()->m_isLoopTop = false;
  2303. return instr;
  2304. }
  2305. else if (instr->IsByteCodeUsesInstr())
  2306. {
  2307. return instr;
  2308. }
  2309. Js::OpCode opcode = instr->m_opcode;
  2310. IR::ByteCodeUsesInstr * newByteCodeUseInstr = globOpt->ConvertToByteCodeUses(instr);
  2311. if (newByteCodeUseInstr != nullptr)
  2312. {
  2313. // We don't care about property used in these instruction
  2314. // It is only necessary for field copy prop so that we will keep the implicit call
  2315. // up to the copy prop location.
  2316. newByteCodeUseInstr->propertySymUse = nullptr;
  2317. if (opcode == Js::OpCode::Yield)
  2318. {
  2319. IR::Instr *instrLabel = newByteCodeUseInstr->m_next;
  2320. while (instrLabel->m_opcode != Js::OpCode::Label)
  2321. {
  2322. instrLabel = instrLabel->m_next;
  2323. }
  2324. func->RemoveDeadYieldOffsetResumeLabel(instrLabel->AsLabelInstr());
  2325. instrLabel->AsLabelInstr()->m_hasNonBranchRef = false;
  2326. }
  2327. // Save the last instruction to update the block with
  2328. return newByteCodeUseInstr;
  2329. }
  2330. else
  2331. {
  2332. return instrPrev;
  2333. }
  2334. }
  2335. else
  2336. {
  2337. instr->Remove();
  2338. return instrPrev;
  2339. }
  2340. }
  2341. void
  2342. FlowGraph::RemoveBlock(BasicBlock *block, GlobOpt * globOpt, bool tailDuping)
  2343. {
  2344. Assert(!block->isDead && !block->isDeleted);
  2345. IR::Instr * lastInstr = nullptr;
  2346. FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)
  2347. {
  2348. if (instr->m_opcode == Js::OpCode::FunctionExit)
  2349. {
  2350. // Removing FunctionExit causes problems downstream...
  2351. // We could change the opcode, or have FunctionEpilog/FunctionExit to get
  2352. // rid of the epilog.
  2353. break;
  2354. }
  2355. if (instr == block->GetFirstInstr())
  2356. {
  2357. Assert(instr->IsLabelInstr());
  2358. instr->AsLabelInstr()->m_isLoopTop = false;
  2359. }
  2360. else
  2361. {
  2362. lastInstr = this->RemoveInstr(instr, globOpt);
  2363. }
  2364. } NEXT_INSTR_IN_BLOCK_EDITING;
  2365. if (lastInstr)
  2366. {
  2367. block->SetLastInstr(lastInstr);
  2368. }
  2369. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, block->GetPredList())
  2370. {
  2371. edge->GetPred()->RemoveSucc(block, this, false, globOpt != nullptr);
  2372. } NEXT_SLISTBASECOUNTED_ENTRY;
  2373. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, block->GetSuccList())
  2374. {
  2375. edge->GetSucc()->RemovePred(block, this, false, globOpt != nullptr);
  2376. } NEXT_SLISTBASECOUNTED_ENTRY;
  2377. if (block->isLoopHeader && this->loopList)
  2378. {
  2379. // If loop graph is built, remove loop from loopList
  2380. Loop **pPrevLoop = &this->loopList;
  2381. while (*pPrevLoop != block->loop)
  2382. {
  2383. pPrevLoop = &((*pPrevLoop)->next);
  2384. }
  2385. *pPrevLoop = (*pPrevLoop)->next;
  2386. this->hasLoop = (this->loopList != nullptr);
  2387. }
  2388. if (globOpt != nullptr)
  2389. {
  2390. block->isDead = true;
  2391. block->GetPredList()->MoveTo(block->GetDeadPredList());
  2392. block->GetSuccList()->MoveTo(block->GetDeadSuccList());
  2393. }
  2394. if (tailDuping)
  2395. {
  2396. block->isDead = true;
  2397. }
  2398. block->isDeleted = true;
  2399. block->SetDataUseCount(0);
  2400. }
  2401. void
  2402. BasicBlock::UnlinkInstr(IR::Instr * instr)
  2403. {
  2404. Assert(this->Contains(instr));
  2405. Assert(this->GetFirstInstr() != this->GetLastInstr());
  2406. if (instr == this->GetFirstInstr())
  2407. {
  2408. Assert(!this->GetFirstInstr()->IsLabelInstr());
  2409. this->SetFirstInstr(instr->m_next);
  2410. }
  2411. else if (instr == this->GetLastInstr())
  2412. {
  2413. this->SetLastInstr(instr->m_prev);
  2414. }
  2415. instr->Unlink();
  2416. }
  2417. void
  2418. BasicBlock::RemoveInstr(IR::Instr * instr)
  2419. {
  2420. Assert(this->Contains(instr));
  2421. if (instr == this->GetFirstInstr())
  2422. {
  2423. this->SetFirstInstr(instr->m_next);
  2424. }
  2425. else if (instr == this->GetLastInstr())
  2426. {
  2427. this->SetLastInstr(instr->m_prev);
  2428. }
  2429. instr->Remove();
  2430. }
  2431. void
  2432. BasicBlock::InsertInstrBefore(IR::Instr *newInstr, IR::Instr *beforeThisInstr)
  2433. {
  2434. Assert(this->Contains(beforeThisInstr));
  2435. beforeThisInstr->InsertBefore(newInstr);
  2436. if(this->GetFirstInstr() == beforeThisInstr)
  2437. {
  2438. Assert(!beforeThisInstr->IsLabelInstr());
  2439. this->SetFirstInstr(newInstr);
  2440. }
  2441. }
  2442. void
  2443. BasicBlock::InsertInstrAfter(IR::Instr *newInstr, IR::Instr *afterThisInstr)
  2444. {
  2445. Assert(this->Contains(afterThisInstr));
  2446. afterThisInstr->InsertAfter(newInstr);
  2447. if (this->GetLastInstr() == afterThisInstr)
  2448. {
  2449. Assert(afterThisInstr->HasFallThrough());
  2450. this->SetLastInstr(newInstr);
  2451. }
  2452. }
  2453. void
  2454. BasicBlock::InsertAfter(IR::Instr *newInstr)
  2455. {
  2456. Assert(this->GetLastInstr()->HasFallThrough());
  2457. this->GetLastInstr()->InsertAfter(newInstr);
  2458. this->SetLastInstr(newInstr);
  2459. }
  2460. void
  2461. Loop::SetHasCall()
  2462. {
  2463. Loop * current = this;
  2464. do
  2465. {
  2466. if (current->hasCall)
  2467. {
  2468. #if DBG
  2469. current = current->parent;
  2470. while (current)
  2471. {
  2472. Assert(current->hasCall);
  2473. current = current->parent;
  2474. }
  2475. #endif
  2476. break;
  2477. }
  2478. current->hasCall = true;
  2479. current = current->parent;
  2480. }
  2481. while (current != nullptr);
  2482. }
  2483. void
  2484. Loop::SetImplicitCallFlags(Js::ImplicitCallFlags newFlags)
  2485. {
  2486. Loop * current = this;
  2487. do
  2488. {
  2489. if ((current->implicitCallFlags & newFlags) == newFlags)
  2490. {
  2491. #if DBG
  2492. current = current->parent;
  2493. while (current)
  2494. {
  2495. Assert((current->implicitCallFlags & newFlags) == newFlags);
  2496. current = current->parent;
  2497. }
  2498. #endif
  2499. break;
  2500. }
  2501. newFlags = (Js::ImplicitCallFlags)(implicitCallFlags | newFlags);
  2502. current->implicitCallFlags = newFlags;
  2503. current = current->parent;
  2504. }
  2505. while (current != nullptr);
  2506. }
  2507. Js::ImplicitCallFlags
  2508. Loop::GetImplicitCallFlags()
  2509. {
  2510. if (this->implicitCallFlags == Js::ImplicitCall_HasNoInfo)
  2511. {
  2512. if (this->parent == nullptr)
  2513. {
  2514. // We don't have any information, and we don't have any parent, so just assume that there aren't any implicit calls
  2515. this->implicitCallFlags = Js::ImplicitCall_None;
  2516. }
  2517. else
  2518. {
  2519. // We don't have any information, get it from the parent and hope for the best
  2520. this->implicitCallFlags = this->parent->GetImplicitCallFlags();
  2521. }
  2522. }
  2523. return this->implicitCallFlags;
  2524. }
  2525. bool
  2526. Loop::CanDoFieldCopyProp()
  2527. {
  2528. #if DBG_DUMP
  2529. if (((this->implicitCallFlags & ~(Js::ImplicitCall_External)) == 0) &&
  2530. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  2531. {
  2532. Output::Print(L"fieldcopyprop disabled because external: loop count: %d", GetLoopNumber());
  2533. GetFunc()->GetJnFunction()->DumpFullFunctionName();
  2534. Output::Print(L"\n");
  2535. Output::Flush();
  2536. }
  2537. #endif
  2538. return GlobOpt::ImplicitCallFlagsAllowOpts(this);
  2539. }
  2540. bool
  2541. Loop::CanDoFieldHoist()
  2542. {
  2543. // We can do field hoist wherever we can do copy prop
  2544. return CanDoFieldCopyProp();
  2545. }
  2546. bool
  2547. Loop::CanHoistInvariants()
  2548. {
  2549. Func * func = this->GetHeadBlock()->firstInstr->m_func->GetTopFunc();
  2550. if (PHASE_OFF(Js::InvariantsPhase, func))
  2551. {
  2552. return false;
  2553. }
  2554. return true;
  2555. }
  2556. IR::LabelInstr *
  2557. Loop::GetLoopTopInstr() const
  2558. {
  2559. IR::LabelInstr * instr = nullptr;
  2560. if (this->topFunc->isFlowGraphValid)
  2561. {
  2562. instr = this->GetHeadBlock()->GetFirstInstr()->AsLabelInstr();
  2563. }
  2564. else
  2565. {
  2566. // Flowgraph gets torn down after the globopt, so can't get the loopTop from the head block.
  2567. instr = this->loopTopLabel;
  2568. }
  2569. if (instr)
  2570. {
  2571. Assert(instr->m_isLoopTop);
  2572. }
  2573. return instr;
  2574. }
  2575. void
  2576. Loop::SetLoopTopInstr(IR::LabelInstr * loopTop)
  2577. {
  2578. this->loopTopLabel = loopTop;
  2579. }
  2580. #if DBG_DUMP
  2581. uint
  2582. Loop::GetLoopNumber() const
  2583. {
  2584. IR::LabelInstr * loopTopInstr = this->GetLoopTopInstr();
  2585. if (loopTopInstr->IsProfiledLabelInstr())
  2586. {
  2587. return loopTopInstr->AsProfiledLabelInstr()->loopNum;
  2588. }
  2589. return Js::LoopHeader::NoLoop;
  2590. }
  2591. bool
  2592. BasicBlock::Contains(IR::Instr * instr)
  2593. {
  2594. FOREACH_INSTR_IN_BLOCK(blockInstr, this)
  2595. {
  2596. if (instr == blockInstr)
  2597. {
  2598. return true;
  2599. }
  2600. }
  2601. NEXT_INSTR_IN_BLOCK;
  2602. return false;
  2603. }
  2604. #endif
  2605. FlowEdge *
  2606. FlowEdge::New(FlowGraph * graph)
  2607. {
  2608. FlowEdge * edge;
  2609. edge = JitAnew(graph->alloc, FlowEdge);
  2610. return edge;
  2611. }
  2612. bool
  2613. Loop::IsDescendentOrSelf(Loop const * loop) const
  2614. {
  2615. Loop const * currentLoop = loop;
  2616. while (currentLoop != nullptr)
  2617. {
  2618. if (currentLoop == this)
  2619. {
  2620. return true;
  2621. }
  2622. currentLoop = currentLoop->parent;
  2623. }
  2624. return false;
  2625. }
  2626. void FlowGraph::SafeRemoveInstr(IR::Instr *instr)
  2627. {
  2628. BasicBlock *block;
  2629. if (instr->m_next->IsLabelInstr())
  2630. {
  2631. block = instr->m_next->AsLabelInstr()->GetBasicBlock()->GetPrev();
  2632. block->RemoveInstr(instr);
  2633. }
  2634. else if (instr->IsLabelInstr())
  2635. {
  2636. block = instr->AsLabelInstr()->GetBasicBlock();
  2637. block->RemoveInstr(instr);
  2638. }
  2639. else
  2640. {
  2641. Assert(!instr->EndsBasicBlock() && !instr->StartsBasicBlock());
  2642. instr->Remove();
  2643. }
  2644. }
  2645. bool FlowGraph::IsUnsignedOpnd(IR::Opnd *src, IR::Opnd **pShrSrc1)
  2646. {
  2647. // Look for an unsigned constant, or the result of an unsigned shift by zero
  2648. if (!src->IsRegOpnd())
  2649. {
  2650. return false;
  2651. }
  2652. if (!src->AsRegOpnd()->m_sym->IsSingleDef())
  2653. {
  2654. return false;
  2655. }
  2656. if (src->AsRegOpnd()->m_sym->IsIntConst())
  2657. {
  2658. int32 intConst = src->AsRegOpnd()->m_sym->GetIntConstValue();
  2659. if (intConst >= 0)
  2660. {
  2661. *pShrSrc1 = src;
  2662. return true;
  2663. }
  2664. else
  2665. {
  2666. return false;
  2667. }
  2668. }
  2669. IR::Instr * shrUInstr = src->AsRegOpnd()->m_sym->GetInstrDef();
  2670. if (shrUInstr->m_opcode != Js::OpCode::ShrU_A)
  2671. {
  2672. return false;
  2673. }
  2674. IR::Opnd *shrCnt = shrUInstr->GetSrc2();
  2675. if (!shrCnt->IsRegOpnd() || !shrCnt->AsRegOpnd()->m_sym->IsTaggableIntConst() || shrCnt->AsRegOpnd()->m_sym->GetIntConstValue() != 0)
  2676. {
  2677. return false;
  2678. }
  2679. *pShrSrc1 = shrUInstr->GetSrc1();
  2680. return true;
  2681. }
  2682. bool FlowGraph::UnsignedCmpPeep(IR::Instr *cmpInstr)
  2683. {
  2684. IR::Opnd *cmpSrc1 = cmpInstr->GetSrc1();
  2685. IR::Opnd *cmpSrc2 = cmpInstr->GetSrc2();
  2686. IR::Opnd *newSrc1;
  2687. IR::Opnd *newSrc2;
  2688. // Look for something like:
  2689. // t1 = ShrU_A x, 0
  2690. // t2 = 10;
  2691. // BrGt t1, t2, L
  2692. //
  2693. // Peep to:
  2694. //
  2695. // t1 = ShrU_A x, 0
  2696. // t2 = 10;
  2697. // ByteCodeUse t1
  2698. // BrUnGt x, t2, L
  2699. //
  2700. // Hopefully dead-store can get rid of the ShrU
  2701. if (!this->func->DoGlobOpt() || !GlobOpt::DoAggressiveIntTypeSpec(this->func) || !GlobOpt::DoLossyIntTypeSpec(this->func))
  2702. {
  2703. return false;
  2704. }
  2705. if (cmpInstr->IsBranchInstr() && !cmpInstr->AsBranchInstr()->IsConditional())
  2706. {
  2707. return false;
  2708. }
  2709. if (!cmpInstr->GetSrc2())
  2710. {
  2711. return false;
  2712. }
  2713. if (!this->IsUnsignedOpnd(cmpSrc1, &newSrc1))
  2714. {
  2715. return false;
  2716. }
  2717. if (!this->IsUnsignedOpnd(cmpSrc2, &newSrc2))
  2718. {
  2719. return false;
  2720. }
  2721. switch(cmpInstr->m_opcode)
  2722. {
  2723. case Js::OpCode::BrEq_A:
  2724. case Js::OpCode::BrNeq_A:
  2725. case Js::OpCode::BrSrEq_A:
  2726. case Js::OpCode::BrSrNeq_A:
  2727. break;
  2728. case Js::OpCode::BrLe_A:
  2729. cmpInstr->m_opcode = Js::OpCode::BrUnLe_A;
  2730. break;
  2731. case Js::OpCode::BrLt_A:
  2732. cmpInstr->m_opcode = Js::OpCode::BrUnLt_A;
  2733. break;
  2734. case Js::OpCode::BrGe_A:
  2735. cmpInstr->m_opcode = Js::OpCode::BrUnGe_A;
  2736. break;
  2737. case Js::OpCode::BrGt_A:
  2738. cmpInstr->m_opcode = Js::OpCode::BrUnGt_A;
  2739. break;
  2740. case Js::OpCode::CmLe_A:
  2741. cmpInstr->m_opcode = Js::OpCode::CmUnLe_A;
  2742. break;
  2743. case Js::OpCode::CmLt_A:
  2744. cmpInstr->m_opcode = Js::OpCode::CmUnLt_A;
  2745. break;
  2746. case Js::OpCode::CmGe_A:
  2747. cmpInstr->m_opcode = Js::OpCode::CmUnGe_A;
  2748. break;
  2749. case Js::OpCode::CmGt_A:
  2750. cmpInstr->m_opcode = Js::OpCode::CmUnGt_A;
  2751. break;
  2752. default:
  2753. return false;
  2754. }
  2755. IR::ByteCodeUsesInstr * bytecodeInstr = IR::ByteCodeUsesInstr::New(cmpInstr->m_func);
  2756. bytecodeInstr->SetByteCodeOffset(cmpInstr);
  2757. bytecodeInstr->byteCodeUpwardExposedUsed = Anew(cmpInstr->m_func->m_alloc, BVSparse<JitArenaAllocator>,cmpInstr->m_func->m_alloc);
  2758. cmpInstr->InsertBefore(bytecodeInstr);
  2759. if (cmpSrc1 != newSrc1)
  2760. {
  2761. if (cmpSrc1->IsRegOpnd())
  2762. {
  2763. bytecodeInstr->byteCodeUpwardExposedUsed->Set(cmpSrc1->AsRegOpnd()->m_sym->m_id);
  2764. }
  2765. cmpInstr->ReplaceSrc1(newSrc1);
  2766. if (newSrc1->IsRegOpnd())
  2767. {
  2768. cmpInstr->GetSrc1()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  2769. }
  2770. }
  2771. if (cmpSrc2 != newSrc2)
  2772. {
  2773. if (cmpSrc2->IsRegOpnd())
  2774. {
  2775. bytecodeInstr->byteCodeUpwardExposedUsed->Set(cmpSrc2->AsRegOpnd()->m_sym->m_id);
  2776. }
  2777. cmpInstr->ReplaceSrc2(newSrc2);
  2778. if (newSrc2->IsRegOpnd())
  2779. {
  2780. cmpInstr->GetSrc2()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  2781. }
  2782. }
  2783. return true;
  2784. }
  2785. #if DBG
  2786. void
  2787. FlowGraph::VerifyLoopGraph()
  2788. {
  2789. FOREACH_BLOCK(block, this)
  2790. {
  2791. Loop *loop = block->loop;
  2792. FOREACH_SUCCESSOR_BLOCK(succ, block)
  2793. {
  2794. if (loop == succ->loop)
  2795. {
  2796. Assert(succ->isLoopHeader == false || loop->GetHeadBlock() == succ);
  2797. continue;
  2798. }
  2799. if (succ->isLoopHeader)
  2800. {
  2801. Assert(succ->loop->parent == loop
  2802. || (!loop->IsDescendentOrSelf(succ->loop)));
  2803. continue;
  2804. }
  2805. Assert(succ->loop == nullptr || succ->loop->IsDescendentOrSelf(loop));
  2806. } NEXT_SUCCESSOR_BLOCK;
  2807. if (!PHASE_OFF(Js::RemoveBreakBlockPhase, this->GetFunc()))
  2808. {
  2809. // Make sure all break blocks have been removed.
  2810. if (loop && !block->isLoopHeader && !(this->func->HasTry() && !this->func->DoOptimizeTryCatch()))
  2811. {
  2812. Assert(loop->IsDescendentOrSelf(block->GetPrev()->loop));
  2813. }
  2814. }
  2815. } NEXT_BLOCK;
  2816. }
  2817. #endif
  2818. #if DBG_DUMP
  2819. void
  2820. FlowGraph::Dump(bool onlyOnVerboseMode, const wchar_t *form)
  2821. {
  2822. if(PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
  2823. {
  2824. if (!onlyOnVerboseMode || Js::Configuration::Global.flags.Verbose)
  2825. {
  2826. if (form)
  2827. {
  2828. Output::Print(form);
  2829. }
  2830. this->Dump();
  2831. }
  2832. }
  2833. }
  2834. void
  2835. FlowGraph::Dump()
  2836. {
  2837. Output::Print(L"\nFlowGraph\n");
  2838. FOREACH_BLOCK(block, this)
  2839. {
  2840. Loop * loop = block->loop;
  2841. while (loop)
  2842. {
  2843. Output::Print(L" ");
  2844. loop = loop->parent;
  2845. }
  2846. block->DumpHeader(false);
  2847. } NEXT_BLOCK;
  2848. Output::Print(L"\nLoopGraph\n");
  2849. for (Loop *loop = this->loopList; loop; loop = loop->next)
  2850. {
  2851. Output::Print(L"\nLoop\n");
  2852. FOREACH_BLOCK_IN_LOOP(block, loop)
  2853. {
  2854. block->DumpHeader(false);
  2855. }NEXT_BLOCK_IN_LOOP;
  2856. Output::Print(L"Loop Ends\n");
  2857. }
  2858. }
  2859. void
  2860. BasicBlock::DumpHeader(bool insertCR)
  2861. {
  2862. if (insertCR)
  2863. {
  2864. Output::Print(L"\n");
  2865. }
  2866. Output::Print(L"BLOCK %d:", this->number);
  2867. if (this->isDead)
  2868. {
  2869. Output::Print(L" **** DEAD ****");
  2870. }
  2871. if (this->isBreakBlock)
  2872. {
  2873. Output::Print(L" **** Break Block ****");
  2874. }
  2875. else if (this->isAirLockBlock)
  2876. {
  2877. Output::Print(L" **** Air lock Block ****");
  2878. }
  2879. else if (this->isBreakCompensationBlockAtSource)
  2880. {
  2881. Output::Print(L" **** Break Source Compensation Code ****");
  2882. }
  2883. else if (this->isBreakCompensationBlockAtSink)
  2884. {
  2885. Output::Print(L" **** Break Sink Compensation Code ****");
  2886. }
  2887. else if (this->isAirLockCompensationBlock)
  2888. {
  2889. Output::Print(L" **** Airlock block Compensation Code ****");
  2890. }
  2891. if (!this->predList.Empty())
  2892. {
  2893. BOOL fFirst = TRUE;
  2894. Output::Print(L" In(");
  2895. FOREACH_PREDECESSOR_BLOCK(blockPred, this)
  2896. {
  2897. if (!fFirst)
  2898. {
  2899. Output::Print(L", ");
  2900. }
  2901. Output::Print(L"%d", blockPred->GetBlockNum());
  2902. fFirst = FALSE;
  2903. }
  2904. NEXT_PREDECESSOR_BLOCK;
  2905. Output::Print(L")");
  2906. }
  2907. if (!this->succList.Empty())
  2908. {
  2909. BOOL fFirst = TRUE;
  2910. Output::Print(L" Out(");
  2911. FOREACH_SUCCESSOR_BLOCK(blockSucc, this)
  2912. {
  2913. if (!fFirst)
  2914. {
  2915. Output::Print(L", ");
  2916. }
  2917. Output::Print(L"%d", blockSucc->GetBlockNum());
  2918. fFirst = FALSE;
  2919. }
  2920. NEXT_SUCCESSOR_BLOCK;
  2921. Output::Print(L")");
  2922. }
  2923. if (!this->deadPredList.Empty())
  2924. {
  2925. BOOL fFirst = TRUE;
  2926. Output::Print(L" DeadIn(");
  2927. FOREACH_DEAD_PREDECESSOR_BLOCK(blockPred, this)
  2928. {
  2929. if (!fFirst)
  2930. {
  2931. Output::Print(L", ");
  2932. }
  2933. Output::Print(L"%d", blockPred->GetBlockNum());
  2934. fFirst = FALSE;
  2935. }
  2936. NEXT_DEAD_PREDECESSOR_BLOCK;
  2937. Output::Print(L")");
  2938. }
  2939. if (!this->deadSuccList.Empty())
  2940. {
  2941. BOOL fFirst = TRUE;
  2942. Output::Print(L" DeadOut(");
  2943. FOREACH_DEAD_SUCCESSOR_BLOCK(blockSucc, this)
  2944. {
  2945. if (!fFirst)
  2946. {
  2947. Output::Print(L", ");
  2948. }
  2949. Output::Print(L"%d", blockSucc->GetBlockNum());
  2950. fFirst = FALSE;
  2951. }
  2952. NEXT_DEAD_SUCCESSOR_BLOCK;
  2953. Output::Print(L")");
  2954. }
  2955. if (this->loop)
  2956. {
  2957. Output::Print(L" Loop(%d) header: %d", this->loop->loopNumber, this->loop->GetHeadBlock()->GetBlockNum());
  2958. if (this->loop->parent)
  2959. {
  2960. Output::Print(L" parent(%d): %d", this->loop->parent->loopNumber, this->loop->parent->GetHeadBlock()->GetBlockNum());
  2961. }
  2962. if (this->loop->GetHeadBlock() == this)
  2963. {
  2964. Output::SkipToColumn(50);
  2965. Output::Print(L"Call Exp/Imp: ");
  2966. if (this->loop->GetHasCall())
  2967. {
  2968. Output::Print(L"yes/");
  2969. }
  2970. else
  2971. {
  2972. Output::Print(L" no/");
  2973. }
  2974. Output::Print(Js::DynamicProfileInfo::GetImplicitCallFlagsString(this->loop->GetImplicitCallFlags()));
  2975. }
  2976. }
  2977. Output::Print(L"\n");
  2978. if (insertCR)
  2979. {
  2980. Output::Print(L"\n");
  2981. }
  2982. }
  2983. void
  2984. BasicBlock::Dump()
  2985. {
  2986. // Dumping the first instruction (label) will dump the block header as well.
  2987. FOREACH_INSTR_IN_BLOCK(instr, this)
  2988. {
  2989. instr->Dump();
  2990. }
  2991. NEXT_INSTR_IN_BLOCK;
  2992. }
  2993. void
  2994. AddPropertyCacheBucket::Dump() const
  2995. {
  2996. Assert(this->initialType != nullptr);
  2997. Assert(this->finalType != nullptr);
  2998. Output::Print(L" initial type: 0x%x, final type: 0x%x ", this->initialType, this->finalType);
  2999. }
  3000. void
  3001. ObjTypeGuardBucket::Dump() const
  3002. {
  3003. Assert(this->guardedPropertyOps != nullptr);
  3004. this->guardedPropertyOps->Dump();
  3005. }
  3006. void
  3007. ObjWriteGuardBucket::Dump() const
  3008. {
  3009. Assert(this->writeGuards != nullptr);
  3010. this->writeGuards->Dump();
  3011. }
  3012. #endif