LowererMDArch.cpp 124 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "LowererMDArch.h"
  7. #include "Library/JavascriptGeneratorFunction.h"
  8. const Js::OpCode LowererMD::MDExtend32Opcode = Js::OpCode::MOVSXD;
  9. extern const IRType RegTypes[RegNumCount];
  10. BYTE
  11. LowererMDArch::GetDefaultIndirScale()
  12. {
  13. return IndirScale8;
  14. }
  15. RegNum
  16. LowererMDArch::GetRegShiftCount()
  17. {
  18. return RegRCX;
  19. }
  20. RegNum
  21. LowererMDArch::GetRegReturn(IRType type)
  22. {
  23. return ( IRType_IsFloat(type) || IRType_IsSimd128(type) ) ? RegXMM0 : RegRAX;
  24. }
  25. RegNum
  26. LowererMDArch::GetRegReturnAsmJs(IRType type)
  27. {
  28. if (IRType_IsFloat(type))
  29. {
  30. return RegXMM0;
  31. }
  32. else if (IRType_IsSimd128(type))
  33. {
  34. return RegXMM0;
  35. }
  36. else
  37. {
  38. return RegRAX;
  39. }
  40. }
  41. RegNum
  42. LowererMDArch::GetRegStackPointer()
  43. {
  44. return RegRSP;
  45. }
  46. RegNum
  47. LowererMDArch::GetRegBlockPointer()
  48. {
  49. return RegRBP;
  50. }
  51. RegNum
  52. LowererMDArch::GetRegFramePointer()
  53. {
  54. return RegRBP;
  55. }
  56. RegNum
  57. LowererMDArch::GetRegChkStkParam()
  58. {
  59. return RegRAX;
  60. }
  61. RegNum
  62. LowererMDArch::GetRegIMulDestLower()
  63. {
  64. return RegRAX;
  65. }
  66. RegNum
  67. LowererMDArch::GetRegIMulHighDestLower()
  68. {
  69. return RegRDX;
  70. }
  71. RegNum
  72. LowererMDArch::GetRegArgI4(int32 argNum)
  73. {
  74. // TODO: decide on registers to use for int
  75. return RegNOREG;
  76. }
  77. RegNum
  78. LowererMDArch::GetRegArgR8(int32 argNum)
  79. {
  80. // TODO: decide on registers to use for double
  81. return RegNOREG;
  82. }
  83. Js::OpCode
  84. LowererMDArch::GetAssignOp(IRType type)
  85. {
  86. switch (type)
  87. {
  88. case TyFloat64:
  89. return Js::OpCode::MOVSD;
  90. case TyFloat32:
  91. return Js::OpCode::MOVSS;
  92. case TySimd128F4:
  93. case TySimd128I4:
  94. case TySimd128I8:
  95. case TySimd128I16:
  96. case TySimd128U4:
  97. case TySimd128U8:
  98. case TySimd128U16:
  99. case TySimd128B4:
  100. case TySimd128B8:
  101. case TySimd128B16:
  102. case TySimd128D2:
  103. case TySimd128I2:
  104. return Js::OpCode::MOVUPS;
  105. default:
  106. return Js::OpCode::MOV;
  107. }
  108. }
  109. void
  110. LowererMDArch::Init(LowererMD *lowererMD)
  111. {
  112. this->lowererMD = lowererMD;
  113. this->helperCallArgsCount = 0;
  114. }
  115. ///----------------------------------------------------------------------------
  116. ///
  117. /// LowererMD::LoadInputParamPtr
  118. ///
  119. /// Load the address of the start of the passed-in parameters not including
  120. /// the this parameter.
  121. ///
  122. ///----------------------------------------------------------------------------
  123. IR::Instr *
  124. LowererMDArch::LoadInputParamPtr(IR::Instr *instrInsert, IR::RegOpnd *optionalDstOpnd /* = nullptr */)
  125. {
  126. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  127. {
  128. IR::RegOpnd * argPtrRegOpnd = Lowerer::LoadGeneratorArgsPtr(instrInsert);
  129. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(argPtrRegOpnd, 1 * MachPtr, TyMachPtr, this->m_func);
  130. IR::RegOpnd * dstOpnd = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachPtr, this->m_func);
  131. return Lowerer::InsertLea(dstOpnd, indirOpnd, instrInsert);
  132. }
  133. else
  134. {
  135. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  136. StackSym *paramSym = StackSym::New(TyMachReg, this->m_func);
  137. this->m_func->SetArgOffset(paramSym, 5 * MachPtr);
  138. return this->lowererMD->m_lowerer->InsertLoadStackAddress(paramSym, instrInsert, optionalDstOpnd);
  139. }
  140. }
  141. IR::Instr *
  142. LowererMDArch::LoadStackArgPtr(IR::Instr * instrArgPtr)
  143. {
  144. // Get the args pointer relative to the frame pointer.
  145. // NOTE: This code is sufficient for the apply-args optimization, but not for StackArguments,
  146. // if and when that is enabled.
  147. // dst = LEA &[rbp + "this" offset + sizeof(var)]
  148. IR::Instr * instr = LoadInputParamPtr(instrArgPtr, instrArgPtr->UnlinkDst()->AsRegOpnd());
  149. instrArgPtr->Remove();
  150. return instr->m_prev;
  151. }
  152. IR::Instr *
  153. LowererMDArch::LoadHeapArgsCached(IR::Instr *instrArgs)
  154. {
  155. ASSERT_INLINEE_FUNC(instrArgs);
  156. Func *func = instrArgs->m_func;
  157. IR::Instr *instrPrev = instrArgs->m_prev;
  158. if (instrArgs->m_func->IsStackArgsEnabled())
  159. {
  160. instrArgs->m_opcode = Js::OpCode::MOV;
  161. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  162. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  163. {
  164. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  165. Output::Flush();
  166. }
  167. }
  168. else
  169. {
  170. // s7 = formals are let decls
  171. // s6 = memory context
  172. // s5 = local frame instance
  173. // s4 = address of first actual argument (after "this")
  174. // s3 = formal argument count
  175. // s2 = actual argument count
  176. // s1 = current function
  177. // dst = JavascriptOperators::LoadArguments(s1, s2, s3, s4, s5, s6, s7)
  178. // s7 = formals are let decls
  179. IR::Opnd * formalsAreLetDecls = IR::IntConstOpnd::New((IntConstType)(instrArgs->m_opcode == Js::OpCode::LdLetHeapArgsCached), TyUint8, func);
  180. this->LoadHelperArgument(instrArgs, formalsAreLetDecls);
  181. // s6 = memory context
  182. this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  183. // s5 = local frame instance
  184. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  185. this->LoadHelperArgument(instrArgs, frameObj);
  186. if (func->IsInlinee())
  187. {
  188. // s4 = address of first actual argument (after "this").
  189. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  190. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  191. IR::Instr *instr = this->lowererMD->m_lowerer->InsertLoadStackAddress(firstRealArgSlotSym, instrArgs);
  192. this->LoadHelperArgument(instrArgs, instr->GetDst());
  193. // s3 = formal argument count (without counting "this").
  194. uint32 formalsCount = func->GetJITFunctionBody()->GetInParamsCount() - 1;
  195. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyUint32, func));
  196. // s2 = actual argument count (without counting "this").
  197. instr = IR::Instr::New(Js::OpCode::MOV,
  198. IR::RegOpnd::New(TyMachReg, func),
  199. IR::IntConstOpnd::New(func->actualCount - 1, TyMachReg, func),
  200. func);
  201. instrArgs->InsertBefore(instr);
  202. this->LoadHelperArgument(instrArgs, instr->GetDst());
  203. // s1 = current function.
  204. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  205. // Save the newly-created args object to its dedicated stack slot.
  206. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  207. instr = IR::Instr::New(Js::OpCode::MOV,
  208. argObjSlotOpnd,
  209. instrArgs->GetDst(),
  210. func);
  211. instrArgs->InsertAfter(instr);
  212. }
  213. else
  214. {
  215. // s4 = address of first actual argument (after "this")
  216. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  217. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  218. this->LoadHelperArgument(instrArgs, instr->GetDst());
  219. // s3 = formal argument count (without counting "this")
  220. uint32 formalsCount = func->GetInParamsCount() - 1;
  221. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyInt32, func));
  222. // s2 = actual argument count (without counting "this")
  223. instr = this->lowererMD->LoadInputParamCount(instrArgs);
  224. instr = IR::Instr::New(Js::OpCode::DEC, instr->GetDst(), instr->GetDst(), func);
  225. instrArgs->InsertBefore(instr);
  226. this->LoadHelperArgument(instrArgs, instr->GetDst());
  227. // s1 = current function
  228. StackSym *paramSym = StackSym::New(TyMachReg, func);
  229. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  230. IR::Opnd * srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  231. this->LoadHelperArgument(instrArgs, srcOpnd);
  232. // Save the newly-created args object to its dedicated stack slot.
  233. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  234. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  235. instrArgs->InsertAfter(instr);
  236. }
  237. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArgsCached);
  238. }
  239. return instrPrev;
  240. }
  241. ///----------------------------------------------------------------------------
  242. ///
  243. /// LowererMDArch::LoadHeapArguments
  244. ///
  245. /// Load the arguments object
  246. /// NOTE: The same caveat regarding arguments passed on the stack applies here
  247. /// as in LoadInputParamCount above.
  248. ///----------------------------------------------------------------------------
  249. IR::Instr *
  250. LowererMDArch::LoadHeapArguments(IR::Instr *instrArgs)
  251. {
  252. ASSERT_INLINEE_FUNC(instrArgs);
  253. Func *func = instrArgs->m_func;
  254. IR::Instr *instrPrev = instrArgs->m_prev;
  255. if (func->IsStackArgsEnabled())
  256. {
  257. instrArgs->m_opcode = Js::OpCode::MOV;
  258. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  259. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  260. {
  261. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  262. Output::Flush();
  263. }
  264. }
  265. else
  266. {
  267. // s7 = formals are let decls
  268. // s6 = memory context
  269. // s5 = array of property ID's
  270. // s4 = local frame instance
  271. // s3 = address of first actual argument (after "this")
  272. // s2 = actual argument count
  273. // s1 = current function
  274. // dst = JavascriptOperators::LoadHeapArguments(s1, s2, s3, s4, s5, s6, s7)
  275. // s7 = formals are let decls
  276. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(instrArgs->m_opcode == Js::OpCode::LdLetHeapArguments ? TRUE : FALSE, TyUint8, func));
  277. // s6 = memory context
  278. instrPrev = this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  279. // s5 = array of property ID's
  280. intptr_t formalsPropIdArray = instrArgs->m_func->GetJITFunctionBody()->GetFormalsPropIdArrayAddr();
  281. if (!formalsPropIdArray)
  282. {
  283. formalsPropIdArray = instrArgs->m_func->GetScriptContextInfo()->GetNullAddr();
  284. }
  285. IR::Opnd * argArray = IR::AddrOpnd::New(formalsPropIdArray, IR::AddrOpndKindDynamicMisc, m_func);
  286. this->LoadHelperArgument(instrArgs, argArray);
  287. // s4 = local frame instance
  288. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  289. this->LoadHelperArgument(instrArgs, frameObj);
  290. if (func->IsInlinee())
  291. {
  292. // s3 = address of first actual argument (after "this").
  293. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  294. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  295. IR::Instr *instr = this->lowererMD->m_lowerer->InsertLoadStackAddress(firstRealArgSlotSym, instrArgs);
  296. this->LoadHelperArgument(instrArgs, instr->GetDst());
  297. // s2 = actual argument count (without counting "this").
  298. instr = IR::Instr::New(Js::OpCode::MOV,
  299. IR::RegOpnd::New(TyUint32, func),
  300. IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func),
  301. func);
  302. instrArgs->InsertBefore(instr);
  303. this->LoadHelperArgument(instrArgs, instr->GetDst());
  304. // s1 = current function.
  305. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  306. // Save the newly-created args object to its dedicated stack slot.
  307. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  308. instr = IR::Instr::New(Js::OpCode::MOV,
  309. argObjSlotOpnd,
  310. instrArgs->GetDst(),
  311. func);
  312. instrArgs->InsertAfter(instr);
  313. }
  314. else
  315. {
  316. // s3 = address of first actual argument (after "this")
  317. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  318. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  319. this->LoadHelperArgument(instrArgs, instr->GetDst());
  320. // s2 = actual argument count (without counting "this")
  321. instr = this->lowererMD->LoadInputParamCount(instrArgs, -1);
  322. IR::Opnd * opndInputParamCount = instr->GetDst();
  323. this->LoadHelperArgument(instrArgs, opndInputParamCount);
  324. // s1 = current function
  325. StackSym * paramSym = StackSym::New(TyMachReg, func);
  326. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  327. IR::Opnd * srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  328. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  329. {
  330. // the function object for generator calls is a GeneratorVirtualScriptFunction object
  331. // and we need to pass the real JavascriptGeneratorFunction object so grab it instead
  332. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
  333. Lowerer::InsertMove(tmpOpnd, srcOpnd, instrArgs);
  334. srcOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
  335. }
  336. this->LoadHelperArgument(instrArgs, srcOpnd);
  337. // Save the newly-created args object to its dedicated stack slot.
  338. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  339. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  340. instrArgs->InsertAfter(instr);
  341. }
  342. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArguments);
  343. }
  344. return instrPrev;
  345. }
  346. //
  347. // Load the parameter in the first argument slot
  348. //
  349. IR::Instr *
  350. LowererMDArch::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * dst, ushort extraArgs)
  351. {
  352. // Spread moves down the argument slot by one.
  353. IR::Opnd * argOpnd = this->GetArgSlotOpnd(3 + extraArgs);
  354. IR::Instr * argInstr = Lowerer::InsertMove(argOpnd, dst, instr);
  355. return argInstr;
  356. }
  357. inline static RegNum GetRegFromArgPosition(const bool isFloatArg, const uint16 argPosition)
  358. {
  359. RegNum reg = RegNOREG;
  360. if (!isFloatArg && argPosition <= IntArgRegsCount)
  361. {
  362. switch (argPosition)
  363. {
  364. #define REG_INT_ARG(Index, Name) \
  365. case ((Index) + 1): \
  366. reg = Reg ## Name; \
  367. break;
  368. #include "RegList.h"
  369. default:
  370. Assume(UNREACHED);
  371. }
  372. }
  373. else if (isFloatArg && argPosition <= XmmArgRegsCount)
  374. {
  375. switch (argPosition)
  376. {
  377. #define REG_XMM_ARG(Index, Name) \
  378. case ((Index) + 1): \
  379. reg = Reg ## Name; \
  380. break;
  381. #include "RegList.h"
  382. default:
  383. Assume(UNREACHED);
  384. }
  385. }
  386. return reg;
  387. }
  388. int32
  389. LowererMDArch::LowerCallArgs(IR::Instr *callInstr, ushort callFlags, Js::ArgSlot extraParams, IR::IntConstOpnd **callInfoOpndRef /* = nullptr */)
  390. {
  391. AssertMsg(this->helperCallArgsCount == 0, "We don't support nested helper calls yet");
  392. const Js::ArgSlot argOffset = 1;
  393. uint32 argCount = 0;
  394. // Lower args and look for StartCall
  395. IR::Instr * argInstr = callInstr;
  396. IR::Instr * cfgInsertLoc = callInstr->GetPrevRealInstr();
  397. IR::Opnd *src2 = argInstr->UnlinkSrc2();
  398. while (src2->IsSymOpnd())
  399. {
  400. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  401. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  402. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  403. argLinkOpnd->Free(this->m_func);
  404. argInstr = argLinkSym->m_instrDef;
  405. src2 = argInstr->UnlinkSrc2();
  406. this->lowererMD->ChangeToAssign(argInstr);
  407. // Mov each arg to its argSlot
  408. Js::ArgSlot argPosition = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  409. Js::ArgSlot index = argOffset + argPosition;
  410. if(index < argPosition)
  411. {
  412. Js::Throw::OutOfMemory();
  413. }
  414. index += extraParams;
  415. if(index < extraParams)
  416. {
  417. Js::Throw::OutOfMemory();
  418. }
  419. IR::Opnd * dstOpnd = this->GetArgSlotOpnd(index, argLinkSym);
  420. argInstr->ReplaceDst(dstOpnd);
  421. cfgInsertLoc = argInstr->GetPrevRealInstr();
  422. // The arg sym isn't assigned a constant directly anymore
  423. // TODO: We can just move the instruction down next to the call if it is just a constant assignment
  424. // but AMD64 doesn't have the MOV mem,imm64 encoding, and we have no code to detect if the value can fit
  425. // into imm32 and hoist the src if it is not.
  426. argLinkSym->m_isConst = false;
  427. argLinkSym->m_isIntConst = false;
  428. argLinkSym->m_isTaggableIntConst = false;
  429. argInstr->Unlink();
  430. callInstr->InsertBefore(argInstr);
  431. argCount++;
  432. }
  433. IR::RegOpnd * argLinkOpnd = src2->AsRegOpnd();
  434. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  435. AssertMsg(!argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  436. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  437. if (callInstr->m_opcode == Js::OpCode::NewScObject ||
  438. callInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
  439. callInstr->m_opcode == Js::OpCode::NewScObjectLiteral ||
  440. callInstr->m_opcode == Js::OpCode::NewScObjArray ||
  441. callInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
  442. {
  443. // These push an extra arg.
  444. argCount++;
  445. }
  446. AssertMsg(startCallInstr->m_opcode == Js::OpCode::StartCall ||
  447. startCallInstr->m_opcode == Js::OpCode::LoweredStartCall,
  448. "Problem with arg chain.");
  449. AssertMsg(startCallInstr->GetArgOutCount(/*getInterpreterArgOutCount*/ false) == argCount ||
  450. m_func->GetJITFunctionBody()->IsAsmJsMode(),
  451. "ArgCount doesn't match StartCall count");
  452. //
  453. // Machine dependent lowering
  454. //
  455. if (callInstr->m_opcode != Js::OpCode::AsmJsCallI)
  456. {
  457. // Push argCount
  458. IR::IntConstOpnd *argCountOpnd = Lowerer::MakeCallInfoConst(callFlags, argCount, m_func);
  459. if (callInfoOpndRef)
  460. {
  461. argCountOpnd->Use(m_func);
  462. *callInfoOpndRef = argCountOpnd;
  463. }
  464. Lowerer::InsertMove(this->GetArgSlotOpnd(1 + extraParams), argCountOpnd, callInstr);
  465. }
  466. startCallInstr = this->LowerStartCall(startCallInstr);
  467. const uint32 argSlots = argCount + 1 + extraParams; // + 1 for call flags
  468. this->m_func->m_argSlotsForFunctionsCalled = max(this->m_func->m_argSlotsForFunctionsCalled, argSlots);
  469. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  470. {
  471. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  472. GeneratePreCall(callInstr, functionObjOpnd, cfgInsertLoc->GetNextRealInstr());
  473. }
  474. return argSlots;
  475. }
  476. void
  477. LowererMDArch::SetMaxArgSlots(Js::ArgSlot actualCount /*including this*/)
  478. {
  479. Js::ArgSlot offset = 3;//For function object & callInfo & this
  480. if (this->m_func->m_argSlotsForFunctionsCalled < (uint32) (actualCount + offset))
  481. {
  482. this->m_func->m_argSlotsForFunctionsCalled = (uint32)(actualCount + offset);
  483. }
  484. return;
  485. }
  486. void
  487. LowererMDArch::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed)
  488. {
  489. IRType type = TyVar;
  490. if (isZeroed)
  491. {
  492. if (value == 0)
  493. {
  494. // Recycler memory are zero initialized
  495. return;
  496. }
  497. type = value <= UINT_MAX ?
  498. (value <= USHORT_MAX ?
  499. (value <= UCHAR_MAX ? TyUint8 : TyUint16) :
  500. TyUint32) :
  501. type;
  502. }
  503. Func * func = this->m_func;
  504. lowererMD->GetLowerer()->InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  505. }
  506. IR::Instr *
  507. LowererMDArch::LowerCallIDynamic(IR::Instr *callInstr, IR::Instr*saveThisArgOutInstr, IR::Opnd *argsLength, ushort callFlags, IR::Instr * insertBeforeInstrForCFG)
  508. {
  509. callInstr->InsertBefore(saveThisArgOutInstr); //Move this Argout next to call;
  510. this->LoadDynamicArgument(saveThisArgOutInstr, 3); //this pointer is the 3rd argument
  511. /*callInfo*/
  512. if (callInstr->m_func->IsInlinee())
  513. {
  514. Assert(argsLength->AsIntConstOpnd()->GetValue() == callInstr->m_func->actualCount);
  515. this->SetMaxArgSlots((Js::ArgSlot)callInstr->m_func->actualCount);
  516. }
  517. else
  518. {
  519. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, argsLength, argsLength, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), this->m_func));
  520. this->SetMaxArgSlots(Js::InlineeCallInfo::MaxInlineeArgoutCount);
  521. }
  522. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, this->GetArgSlotOpnd(2), argsLength, this->m_func));
  523. IR::Opnd *funcObjOpnd = callInstr->UnlinkSrc1();
  524. GeneratePreCall(callInstr, funcObjOpnd, insertBeforeInstrForCFG);
  525. // Normally for dynamic calls we move 4 args to registers and push remaining
  526. // args onto stack (Windows convention, and unchanged on xplat). We need to
  527. // manully home 4 args. inlinees lower differently and follow platform ABI.
  528. // So we need to manually home actualArgsCount + 2 args (function, callInfo).
  529. const uint32 homeArgs = callInstr->m_func->IsInlinee() ?
  530. callInstr->m_func->actualCount + 2 : 4;
  531. LowerCall(callInstr, homeArgs);
  532. return callInstr;
  533. }
  534. void
  535. LowererMDArch::GenerateFunctionObjectTest(IR::Instr * callInstr, IR::RegOpnd *functionObjOpnd, bool isHelper, IR::LabelInstr* continueAfterExLabel /* = nullptr */)
  536. {
  537. AssertMsg(!m_func->IsJitInDebugMode() || continueAfterExLabel, "When jit is in debug mode, continueAfterExLabel must be provided otherwise continue after exception may cause AV.");
  538. IR::RegOpnd *functionObjRegOpnd = functionObjOpnd->AsRegOpnd();
  539. IR::Instr * insertBeforeInstr = callInstr;
  540. // Need check and error if we are calling a tagged int.
  541. if (!functionObjRegOpnd->IsNotTaggedValue())
  542. {
  543. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  544. if (this->lowererMD->GenerateObjectTest(functionObjRegOpnd, callInstr, helperLabel))
  545. {
  546. IR::LabelInstr * callLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  547. IR::Instr* instr = IR::BranchInstr::New(Js::OpCode::JMP, callLabel, this->m_func);
  548. callInstr->InsertBefore(instr);
  549. callInstr->InsertBefore(helperLabel);
  550. callInstr->InsertBefore(callLabel);
  551. insertBeforeInstr = callLabel;
  552. lowererMD->m_lowerer->GenerateRuntimeError(insertBeforeInstr, JSERR_NeedFunction);
  553. if (continueAfterExLabel)
  554. {
  555. // Under debugger the RuntimeError (exception) can be ignored, generate branch to jmp to safe place
  556. // (which would normally be debugger bailout check).
  557. IR::BranchInstr* continueAfterEx = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueAfterExLabel, this->m_func);
  558. insertBeforeInstr->InsertBefore(continueAfterEx);
  559. }
  560. }
  561. }
  562. }
  563. void
  564. LowererMDArch::GeneratePreCall(IR::Instr * callInstr, IR::Opnd *functionObjOpnd, IR::Instr * insertBeforeInstrForCFGCheck)
  565. {
  566. if (insertBeforeInstrForCFGCheck == nullptr)
  567. {
  568. insertBeforeInstrForCFGCheck = callInstr;
  569. }
  570. IR::RegOpnd * functionTypeRegOpnd = nullptr;
  571. IR::IndirOpnd * entryPointIndirOpnd = nullptr;
  572. if (callInstr->m_opcode == Js::OpCode::AsmJsCallI)
  573. {
  574. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  575. IR::IndirOpnd* functionInfoIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
  576. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  577. insertBeforeInstrForCFGCheck->InsertBefore(instr);
  578. functionInfoIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::ScriptFunctionType::GetEntryPointInfoOffset(), TyMachReg, m_func);
  579. instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  580. insertBeforeInstrForCFGCheck->InsertBefore(instr);
  581. uint32 entryPointOffset = Js::ProxyEntryPointInfo::GetAddressOffset();
  582. entryPointIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, entryPointOffset, TyMachReg, m_func);
  583. }
  584. else
  585. {
  586. // For calls to fixed functions we load the function's type directly from the known (hard-coded) function object address.
  587. // For other calls, we need to load it from the function object stored in a register operand.
  588. if (functionObjOpnd->IsAddrOpnd() && functionObjOpnd->AsAddrOpnd()->m_isFunction)
  589. {
  590. functionTypeRegOpnd = this->lowererMD->m_lowerer->GenerateFunctionTypeFromFixedFunctionObject(insertBeforeInstrForCFGCheck, functionObjOpnd);
  591. }
  592. else if (functionObjOpnd->IsRegOpnd())
  593. {
  594. AssertMsg(functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym(), "Expected call target to be a stack symbol.");
  595. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  596. // functionTypeRegOpnd(RAX) = MOV function->type
  597. {
  598. IR::IndirOpnd * functionTypeIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  599. Js::DynamicObject::GetOffsetOfType(), TyMachReg, m_func);
  600. IR::Instr * mov = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionTypeIndirOpnd, m_func);
  601. insertBeforeInstrForCFGCheck->InsertBefore(mov);
  602. }
  603. }
  604. else
  605. {
  606. AnalysisAssertMsg(false, "Unexpected call target operand type.");
  607. }
  608. // entryPointRegOpnd(RAX) = MOV type->entryPoint
  609. entryPointIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::Type::GetOffsetOfEntryPoint(), TyMachPtr, m_func);
  610. }
  611. IR::RegOpnd *entryPointRegOpnd = functionTypeRegOpnd;
  612. entryPointRegOpnd->m_isCallArg = true;
  613. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, entryPointRegOpnd, entryPointIndirOpnd, m_func);
  614. insertBeforeInstrForCFGCheck->InsertBefore(mov);
  615. // entryPointRegOpnd(RAX) = CALL entryPointRegOpnd(RAX)
  616. callInstr->SetSrc1(entryPointRegOpnd);
  617. #if defined(_CONTROL_FLOW_GUARD)
  618. // verify that the call target is valid (CFG Check)
  619. if (!PHASE_OFF(Js::CFGInJitPhase, this->m_func))
  620. {
  621. this->lowererMD->GenerateCFGCheck(entryPointRegOpnd, insertBeforeInstrForCFGCheck);
  622. }
  623. #endif
  624. // Setup the first call argument - pointer to the function being called.
  625. IR::Instr * instrMovArg1 = IR::Instr::New(Js::OpCode::MOV, GetArgSlotOpnd(1), functionObjOpnd, m_func);
  626. callInstr->InsertBefore(instrMovArg1);
  627. }
  628. IR::Instr *
  629. LowererMDArch::LowerCallI(IR::Instr * callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  630. {
  631. AssertMsg(this->helperCallArgsCount == 0, "We don't support nested helper calls yet");
  632. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  633. IR::Instr * insertBeforeInstrForCFGCheck = callInstr;
  634. // If this is a call for new, we already pass the function operand through NewScObject,
  635. // which checks if the function operand is a real function or not, don't need to add a check again
  636. // If this is a call to a fixed function, we've already verified that the target is, indeed, a function.
  637. if (callInstr->m_opcode != Js::OpCode::CallIFixed && !(callFlags & Js::CallFlags_New))
  638. {
  639. Assert(functionObjOpnd->IsRegOpnd());
  640. IR::LabelInstr* continueAfterExLabel = Lowerer::InsertContinueAfterExceptionLabelForDebugger(m_func, callInstr, isHelper);
  641. GenerateFunctionObjectTest(callInstr, functionObjOpnd->AsRegOpnd(), isHelper, continueAfterExLabel);
  642. }
  643. else if (insertBeforeInstrForCFG != nullptr)
  644. {
  645. RegNum dstReg = insertBeforeInstrForCFG->GetDst()->AsRegOpnd()->GetReg();
  646. AssertMsg(dstReg == RegArg2 || dstReg == RegArg3, "NewScObject should insert the first Argument in RegArg2/RegArg3 only based on Spread call or not.");
  647. insertBeforeInstrForCFGCheck = insertBeforeInstrForCFG;
  648. }
  649. GeneratePreCall(callInstr, functionObjOpnd, insertBeforeInstrForCFGCheck);
  650. // We need to get the calculated CallInfo in SimpleJit because that doesn't include any changes for stack alignment
  651. IR::IntConstOpnd *callInfo = nullptr;
  652. int32 argCount = LowerCallArgs(callInstr, callFlags, 1, &callInfo);
  653. IR::Opnd *const finalDst = callInstr->GetDst();
  654. // x64 keeps track of argCount for us, so pass just an arbitrary value there
  655. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  656. IR::AutoReuseOpnd autoReuseSavedFunctionObjOpnd;
  657. if (callInstr->IsJitProfilingInstr())
  658. {
  659. Assert(callInstr->m_func->IsSimpleJit());
  660. Assert(!CONFIG_FLAG(NewSimpleJit));
  661. if(finalDst &&
  662. finalDst->IsRegOpnd() &&
  663. functionObjOpnd->IsRegOpnd() &&
  664. finalDst->AsRegOpnd()->m_sym == functionObjOpnd->AsRegOpnd()->m_sym)
  665. {
  666. // The function object sym is going to be overwritten, so save it in a temp for profiling
  667. IR::RegOpnd *const savedFunctionObjOpnd = IR::RegOpnd::New(functionObjOpnd->GetType(), callInstr->m_func);
  668. autoReuseSavedFunctionObjOpnd.Initialize(savedFunctionObjOpnd, callInstr->m_func);
  669. Lowerer::InsertMove(savedFunctionObjOpnd, functionObjOpnd, callInstr->m_next);
  670. functionObjOpnd = savedFunctionObjOpnd;
  671. }
  672. auto instr = callInstr->AsJitProfilingInstr();
  673. ret = this->lowererMD->m_lowerer->GenerateCallProfiling(
  674. instr->profileId,
  675. instr->inlineCacheIndex,
  676. instr->GetDst(),
  677. functionObjOpnd,
  678. callInfo,
  679. instr->isProfiledReturnCall,
  680. callInstr,
  681. ret);
  682. }
  683. return ret;
  684. }
  685. static inline IRType ExtendHelperArg(IRType type)
  686. {
  687. #ifdef __clang__
  688. // clang expects caller to extend arg size to int
  689. switch (type)
  690. {
  691. case TyInt8:
  692. case TyInt16:
  693. return TyInt32;
  694. case TyUint8:
  695. case TyUint16:
  696. return TyUint32;
  697. }
  698. #endif
  699. return type;
  700. }
  701. IR::Instr *
  702. LowererMDArch::LowerCall(IR::Instr * callInstr, uint32 argCount)
  703. {
  704. UNREFERENCED_PARAMETER(argCount);
  705. IR::Instr *retInstr = callInstr;
  706. callInstr->m_opcode = Js::OpCode::CALL;
  707. // This is required here due to calls create during lowering
  708. callInstr->m_func->SetHasCallsOnSelfAndParents();
  709. if (callInstr->GetDst())
  710. {
  711. IR::Opnd * dstOpnd;
  712. this->lowererMD->ForceDstToReg(callInstr);
  713. dstOpnd = callInstr->GetDst();
  714. IRType dstType = dstOpnd->GetType();
  715. Js::OpCode assignOp = GetAssignOp(dstType);
  716. if (callInstr->GetSrc1()->IsHelperCallOpnd())
  717. {
  718. // Truncate the result of a conversion to 32-bit int, because the C++ code doesn't.
  719. IR::HelperCallOpnd *helperOpnd = callInstr->GetSrc1()->AsHelperCallOpnd();
  720. if (helperOpnd->m_fnHelper == IR::HelperConv_ToInt32 ||
  721. helperOpnd->m_fnHelper == IR::HelperConv_ToInt32_Full ||
  722. helperOpnd->m_fnHelper == IR::HelperConv_ToInt32Core ||
  723. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32 ||
  724. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32_Full ||
  725. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32Core)
  726. {
  727. assignOp = Js::OpCode::MOV_TRUNC;
  728. }
  729. }
  730. IR::Instr * movInstr = callInstr->SinkDst(assignOp);
  731. RegNum reg = GetRegReturn(dstType);
  732. callInstr->GetDst()->AsRegOpnd()->SetReg(reg);
  733. movInstr->GetSrc1()->AsRegOpnd()->SetReg(reg);
  734. retInstr = movInstr;
  735. }
  736. //
  737. // assign the arguments to appropriate positions
  738. //
  739. AssertMsg(this->helperCallArgsCount >= 0, "Fatal. helper call arguments ought to be positive");
  740. AssertMsg(this->helperCallArgsCount < MaxArgumentsToHelper && MaxArgumentsToHelper < 255, "Too many helper call arguments");
  741. uint16 argsLeft = static_cast<uint16>(this->helperCallArgsCount);
  742. // Sys V x64 ABI assigns int and xmm arg registers separately.
  743. // e.g. args: int, double, int, double, int, double
  744. // Windows: int0, xmm1, int2, xmm3, stack, stack
  745. // Sys V: int0, xmm0, int1, xmm1, int2, xmm2
  746. #ifdef _WIN32
  747. #define _V_ARG_INDEX(index) index
  748. #else
  749. uint16 _vindex[MaxArgumentsToHelper];
  750. {
  751. uint16 intIndex = 1, doubleIndex = 1, stackIndex = IntArgRegsCount + 1;
  752. for (int i = 0; i < this->helperCallArgsCount; i++)
  753. {
  754. IR::Opnd * helperSrc = this->helperCallArgs[this->helperCallArgsCount - 1 - i];
  755. IRType type = helperSrc->GetType();
  756. if (IRType_IsFloat(type) || IRType_IsSimd128(type))
  757. {
  758. if (doubleIndex <= XmmArgRegsCount)
  759. {
  760. _vindex[i] = doubleIndex++;
  761. }
  762. else
  763. {
  764. _vindex[i] = stackIndex++;
  765. }
  766. }
  767. else
  768. {
  769. if (intIndex <= IntArgRegsCount)
  770. {
  771. _vindex[i] = intIndex++;
  772. }
  773. else
  774. {
  775. _vindex[i] = stackIndex++;
  776. }
  777. }
  778. }
  779. }
  780. #define _V_ARG_INDEX(index) _vindex[(index) - 1]
  781. #endif
  782. // xplat NOTE: Lower often loads "known args" with LoadHelperArgument() and
  783. // variadic JS runtime args with LowerCallArgs(). So the full args length is
  784. // this->helperCallArgsCount + argCount
  785. // "argCount > 0" indicates we have variadic JS runtime args and needs to
  786. // manually home registers on xplat.
  787. const bool shouldHomeParams = argCount > 0;
  788. while (argsLeft > 0)
  789. {
  790. IR::Opnd * helperSrc = this->helperCallArgs[this->helperCallArgsCount - argsLeft];
  791. uint16 index = _V_ARG_INDEX(argsLeft);
  792. StackSym * helperSym = m_func->m_symTable->GetArgSlotSym(index);
  793. helperSym->m_type = ExtendHelperArg(helperSrc->GetType());
  794. Lowerer::InsertMove(
  795. this->GetArgSlotOpnd(index, helperSym, /*isHelper*/!shouldHomeParams),
  796. helperSrc,
  797. callInstr, false);
  798. --argsLeft;
  799. }
  800. #ifndef _WIN32
  801. // Manually home args
  802. if (shouldHomeParams)
  803. {
  804. const int callArgCount = this->helperCallArgsCount + static_cast<int>(argCount);
  805. int argRegs = min(callArgCount, static_cast<int>(XmmArgRegsCount));
  806. for (int i = argRegs; i > 0; i--)
  807. {
  808. IRType type = this->xplatCallArgs.args[i];
  809. bool isFloatArg = this->xplatCallArgs.IsFloat(i);
  810. if ( i > IntArgRegsCount && !isFloatArg ) continue;
  811. StackSym * sym = this->m_func->m_symTable->GetArgSlotSym(static_cast<uint16>(i));
  812. RegNum reg = GetRegFromArgPosition(isFloatArg, i);
  813. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  814. regOpnd->m_isCallArg = true;
  815. Lowerer::InsertMove(
  816. IR::SymOpnd::New(sym, type, this->m_func),
  817. regOpnd,
  818. callInstr, false);
  819. }
  820. }
  821. this->xplatCallArgs.Reset();
  822. #endif // !_WIN32
  823. //
  824. // load the address into a register because we cannot directly access 64 bit constants
  825. // in CALL instruction. Non helper call methods will already be accessed indirectly.
  826. //
  827. // Skip this for bailout calls. The register allocator will lower that as appropriate, without affecting spill choices.
  828. //
  829. // Also skip this for relocatable helper calls. These will be turned into indirect
  830. // calls in lower.
  831. if (callInstr->GetSrc1()->IsHelperCallOpnd() && !callInstr->HasBailOutInfo())
  832. {
  833. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr,m_func), RegRAX, TyMachPtr, this->m_func);
  834. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::MOV, targetOpnd, callInstr->GetSrc1(), this->m_func);
  835. targetOpnd->m_isCallArg = true;
  836. callInstr->UnlinkSrc1();
  837. callInstr->SetSrc1(targetOpnd);
  838. callInstr->InsertBefore(movInstr);
  839. }
  840. //
  841. // Reset the call
  842. //
  843. this->m_func->m_argSlotsForFunctionsCalled = max(this->m_func->m_argSlotsForFunctionsCalled , (uint32)this->helperCallArgsCount);
  844. this->helperCallArgsCount = 0;
  845. return retInstr;
  846. }
  847. //
  848. // Returns the opnd where the corresponding argument would have been stored. On amd64,
  849. // the first 4 arguments go in registers and the rest are on stack.
  850. //
  851. IR::Opnd *
  852. LowererMDArch::GetArgSlotOpnd(uint16 index, StackSym * argSym, bool isHelper /*= false*/)
  853. {
  854. Assert(index != 0);
  855. uint16 argPosition = index;
  856. IR::Opnd *argSlotOpnd = nullptr;
  857. if (argSym != nullptr)
  858. {
  859. argSym->m_offset = (index - 1) * MachPtr;
  860. argSym->m_allocated = true;
  861. }
  862. IRType type = argSym ? argSym->GetType() : TyMachReg;
  863. const bool isFloatArg = IRType_IsFloat(type) || IRType_IsSimd128(type);
  864. RegNum reg = GetRegFromArgPosition(isFloatArg, argPosition);
  865. #ifndef _WIN32
  866. if (isFloatArg && argPosition <= XmmArgRegsCount)
  867. {
  868. this->xplatCallArgs.SetFloat(argPosition);
  869. }
  870. #endif
  871. if (reg != RegNOREG)
  872. {
  873. IR::RegOpnd *regOpnd = IR::RegOpnd::New(argSym, reg, type, m_func);
  874. regOpnd->m_isCallArg = true;
  875. argSlotOpnd = regOpnd;
  876. }
  877. else
  878. {
  879. if (argSym == nullptr)
  880. {
  881. argSym = this->m_func->m_symTable->GetArgSlotSym(index);
  882. }
  883. #ifndef _WIN32
  884. // helper does not home args, adjust stack offset
  885. if (isHelper)
  886. {
  887. const uint16 argIndex = index - IntArgRegsCount;
  888. argSym->m_offset = (argIndex - 1) * MachPtr;
  889. }
  890. #endif
  891. argSlotOpnd = IR::SymOpnd::New(argSym, type, this->m_func);
  892. }
  893. return argSlotOpnd;
  894. }
  895. IR::Instr *
  896. LowererMDArch::LowerAsmJsCallE(IR::Instr *callInstr)
  897. {
  898. IR::IntConstOpnd *callInfo = nullptr;
  899. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_Value, 1, &callInfo);
  900. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  901. return ret;
  902. }
  903. IR::Instr *
  904. LowererMDArch::LowerAsmJsCallI(IR::Instr * callInstr)
  905. {
  906. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_Value, 0);
  907. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  908. return ret;
  909. }
  910. IR::Instr *
  911. LowererMDArch::LowerWasmArrayBoundsCheck(IR::Instr * instr, IR::Opnd *addrOpnd)
  912. {
  913. IR::IndirOpnd * indirOpnd = addrOpnd->AsIndirOpnd();
  914. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  915. if (indexOpnd && !indexOpnd->IsIntegral32())
  916. {
  917. // We don't expect the index to be anything other than an int32 or uint32
  918. // Having an int32 index guaranties that int64 index add doesn't overflow
  919. // If we're wrong, just throw index out of range
  920. Assert(UNREACHED);
  921. lowererMD->m_lowerer->GenerateThrow(IR::IntConstOpnd::New(WASMERR_ArrayIndexOutOfRange, TyInt32, m_func), instr);
  922. return instr;
  923. }
  924. if (m_func->GetJITFunctionBody()->UsesWAsmJsFastVirtualBuffer())
  925. {
  926. return instr;
  927. }
  928. Assert(instr->GetSrc2());
  929. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  930. IR::LabelInstr * loadLabel = Lowerer::InsertLabel(false, instr);
  931. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  932. // Find array buffer length
  933. uint32 offset = indirOpnd->GetOffset();
  934. IR::Opnd *arrayLenOpnd = instr->GetSrc2();
  935. IR::Int64ConstOpnd * constOffsetOpnd = IR::Int64ConstOpnd::New((int64)addrOpnd->GetSize() + (int64)offset, TyInt64, m_func);
  936. IR::Opnd *cmpOpnd;
  937. if (indexOpnd != nullptr)
  938. {
  939. // Compare index + memop access length and array buffer length, and generate RuntimeError if greater
  940. cmpOpnd = IR::RegOpnd::New(TyInt64, m_func);
  941. Lowerer::InsertAdd(true, cmpOpnd, indexOpnd, constOffsetOpnd, helperLabel);
  942. }
  943. else
  944. {
  945. cmpOpnd = constOffsetOpnd;
  946. }
  947. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, arrayLenOpnd, Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  948. lowererMD->m_lowerer->GenerateThrow(IR::IntConstOpnd::New(WASMERR_ArrayIndexOutOfRange, TyInt32, m_func), loadLabel);
  949. Lowerer::InsertBranch(Js::OpCode::Br, loadLabel, helperLabel);
  950. return doneLabel;
  951. }
  952. void
  953. LowererMDArch::LowerAtomicStore(IR::Opnd * dst, IR::Opnd * src1, IR::Instr * insertBeforeInstr)
  954. {
  955. Assert(IRType_IsNativeInt(dst->GetType()));
  956. Assert(IRType_IsNativeInt(src1->GetType()));
  957. IR::RegOpnd* tmpSrc = IR::RegOpnd::New(dst->GetType(), m_func);
  958. Lowerer::InsertMove(tmpSrc, src1, insertBeforeInstr);
  959. // Put src1 as dst to make sure we know that register is modified
  960. IR::Instr* xchgInstr = IR::Instr::New(Js::OpCode::XCHG, tmpSrc, tmpSrc, dst, insertBeforeInstr->m_func);
  961. insertBeforeInstr->InsertBefore(xchgInstr);
  962. }
  963. void
  964. LowererMDArch::LowerAtomicLoad(IR::Opnd * dst, IR::Opnd * src1, IR::Instr * insertBeforeInstr)
  965. {
  966. Assert(IRType_IsNativeInt(dst->GetType()));
  967. Assert(IRType_IsNativeInt(src1->GetType()));
  968. IR::Instr* newMove = Lowerer::InsertMove(dst, src1, insertBeforeInstr);
  969. if (m_func->GetJITFunctionBody()->UsesWAsmJsFastVirtualBuffer())
  970. {
  971. // We need to have an AV when accessing out of bounds memory even if the dst is not used
  972. // Make sure LinearScan doesn't dead store this instruction
  973. newMove->hasSideEffects = true;
  974. }
  975. // Need to add Memory Barrier before the load
  976. // MemoryBarrier is implemented with `lock or [rsp], 0` on x64
  977. IR::IndirOpnd* stackTop = IR::IndirOpnd::New(
  978. IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, m_func),
  979. 0,
  980. TyMachReg,
  981. m_func
  982. );
  983. IR::IntConstOpnd* zero = IR::IntConstOpnd::New(0, TyMachReg, m_func);
  984. IR::Instr* memoryBarrier = IR::Instr::New(Js::OpCode::LOCKOR, stackTop, stackTop, zero, m_func);
  985. newMove->InsertBefore(memoryBarrier);
  986. }
  987. IR::Instr*
  988. LowererMDArch::LowerAsmJsLdElemHelper(IR::Instr * instr, bool isSimdLoad /*= false*/, bool checkEndOffset /*= false*/)
  989. {
  990. IR::Instr* done;
  991. IR::Opnd * src1 = instr->UnlinkSrc1();
  992. IRType type = src1->GetType();
  993. IR::RegOpnd * indexOpnd = src1->AsIndirOpnd()->GetIndexOpnd();
  994. const uint8 dataWidth = instr->dataWidth;
  995. Assert(isSimdLoad == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  996. #if ENABLE_FAST_ARRAYBUFFER
  997. if (CONFIG_FLAG(WasmFastArray) && m_func->GetJITFunctionBody()->IsWasmFunction())
  998. {
  999. return instr;
  1000. }
  1001. #endif
  1002. #ifdef _WIN32
  1003. // For x64, bound checks are required only for SIMD loads.
  1004. if (isSimdLoad)
  1005. #else
  1006. // xplat: Always do bound check. We don't support out-of-bound access violation recovery.
  1007. if (true)
  1008. #endif
  1009. {
  1010. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  1011. IR::LabelInstr * loadLabel = Lowerer::InsertLabel(false, instr);
  1012. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  1013. IR::Opnd *cmpOpnd;
  1014. if (indexOpnd)
  1015. {
  1016. cmpOpnd = indexOpnd;
  1017. }
  1018. else
  1019. {
  1020. cmpOpnd = IR::IntConstOpnd::New(src1->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  1021. }
  1022. // if dataWidth != byte per element, we need to check end offset
  1023. if (checkEndOffset)
  1024. {
  1025. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  1026. // MOV tmp, cmpOnd
  1027. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  1028. // ADD tmp, dataWidth
  1029. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, tmp->GetType(), m_func, true), helperLabel);
  1030. // JB helper
  1031. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  1032. // CMP tmp, size
  1033. // JG $helper
  1034. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  1035. }
  1036. else
  1037. {
  1038. #ifdef ENABLE_WASM_SIMD
  1039. if (m_func->GetJITFunctionBody()->IsWasmFunction() && src1->AsIndirOpnd()->GetOffset()) //WASM
  1040. {
  1041. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  1042. // MOV tmp, cmpOnd
  1043. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  1044. // ADD tmp, offset
  1045. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)src1->AsIndirOpnd()->GetOffset(), tmp->GetType(), m_func), helperLabel);
  1046. // JB helper
  1047. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  1048. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  1049. }
  1050. else
  1051. #endif
  1052. {
  1053. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  1054. }
  1055. }
  1056. Lowerer::InsertBranch(Js::OpCode::Br, loadLabel, helperLabel);
  1057. if (isSimdLoad)
  1058. {
  1059. lowererMD->m_lowerer->GenerateRuntimeError(loadLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  1060. }
  1061. else
  1062. {
  1063. if (IRType_IsFloat(type))
  1064. {
  1065. Lowerer::InsertMove(instr->UnlinkDst(), IR::FloatConstOpnd::New(Js::NumberConstants::NaN, type, m_func), loadLabel);
  1066. }
  1067. else
  1068. {
  1069. Lowerer::InsertMove(instr->UnlinkDst(), IR::IntConstOpnd::New(0, TyInt8, m_func), loadLabel);
  1070. }
  1071. }
  1072. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, loadLabel);
  1073. done = doneLabel;
  1074. }
  1075. else
  1076. {
  1077. Assert(!instr->GetSrc2());
  1078. done = instr;
  1079. }
  1080. return done;
  1081. }
  1082. IR::Instr*
  1083. LowererMDArch::LowerAsmJsStElemHelper(IR::Instr * instr, bool isSimdStore /*= false*/, bool checkEndOffset /*= false*/)
  1084. {
  1085. IR::Instr* done;
  1086. IR::Opnd * dst = instr->UnlinkDst();
  1087. IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  1088. const uint8 dataWidth = instr->dataWidth;
  1089. Assert(isSimdStore == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  1090. #ifdef _WIN32
  1091. // For x64, bound checks are required only for SIMD loads.
  1092. if (isSimdStore)
  1093. #else
  1094. // xplat: Always do bound check. We don't support out-of-bound access violation recovery.
  1095. if (true)
  1096. #endif
  1097. {
  1098. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  1099. IR::LabelInstr * storeLabel = Lowerer::InsertLabel(false, instr);
  1100. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  1101. IR::Opnd * cmpOpnd;
  1102. if (indexOpnd)
  1103. {
  1104. cmpOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  1105. }
  1106. else
  1107. {
  1108. cmpOpnd = IR::IntConstOpnd::New(dst->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  1109. }
  1110. // if dataWidth != byte per element, we need to check end offset
  1111. if (checkEndOffset)
  1112. {
  1113. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  1114. // MOV tmp, cmpOnd
  1115. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  1116. // ADD tmp, dataWidth
  1117. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, tmp->GetType(), m_func, true), helperLabel);
  1118. // JB helper
  1119. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  1120. // CMP tmp, size
  1121. // JG $helper
  1122. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  1123. }
  1124. else
  1125. {
  1126. #ifdef ENABLE_WASM_SIMD
  1127. if (m_func->GetJITFunctionBody()->IsWasmFunction() && dst->AsIndirOpnd()->GetOffset()) //WASM
  1128. {
  1129. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  1130. // MOV tmp, cmpOnd
  1131. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  1132. // ADD tmp, offset
  1133. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)dst->AsIndirOpnd()->GetOffset(), tmp->GetType(), m_func), helperLabel);
  1134. // JB helper
  1135. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  1136. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  1137. }
  1138. else
  1139. #endif
  1140. {
  1141. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  1142. }
  1143. }
  1144. Lowerer::InsertBranch(Js::OpCode::Br, storeLabel, helperLabel);
  1145. if (isSimdStore)
  1146. {
  1147. lowererMD->m_lowerer->GenerateRuntimeError(storeLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  1148. }
  1149. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, storeLabel);
  1150. done = doneLabel;
  1151. }
  1152. else
  1153. {
  1154. Assert(!instr->GetSrc2());
  1155. done = instr;
  1156. }
  1157. return done;
  1158. }
  1159. ///----------------------------------------------------------------------------
  1160. ///
  1161. /// LowererMDArch::LowerStartCall
  1162. ///
  1163. ///
  1164. ///----------------------------------------------------------------------------
  1165. IR::Instr *
  1166. LowererMDArch::LowerStartCall(IR::Instr * startCallInstr)
  1167. {
  1168. startCallInstr->m_opcode = Js::OpCode::LoweredStartCall;
  1169. return startCallInstr;
  1170. }
  1171. IR::Instr *
  1172. LowererMDArch::LoadInt64HelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1173. {
  1174. return LoadHelperArgument(instrInsert, opndArg);
  1175. }
  1176. ///----------------------------------------------------------------------------
  1177. ///
  1178. /// LowererMDArch::LoadHelperArgument
  1179. ///
  1180. /// Assign register or push on stack as per AMD64 calling convention
  1181. ///
  1182. ///----------------------------------------------------------------------------
  1183. IR::Instr *
  1184. LowererMDArch::LoadHelperArgument(IR::Instr *instr, IR::Opnd *opndArg)
  1185. {
  1186. IR::Opnd *destOpnd;
  1187. IR::Instr *instrToReturn;
  1188. if(opndArg->IsImmediateOpnd())
  1189. {
  1190. destOpnd = opndArg;
  1191. instrToReturn = instr;
  1192. }
  1193. else
  1194. {
  1195. destOpnd = IR::RegOpnd::New(opndArg->GetType(), this->m_func);
  1196. instrToReturn = instr->m_prev;
  1197. Lowerer::InsertMove(destOpnd, opndArg, instr, false);
  1198. instrToReturn = instrToReturn->m_next;
  1199. }
  1200. helperCallArgs[helperCallArgsCount++] = destOpnd;
  1201. AssertMsg(helperCallArgsCount < LowererMDArch::MaxArgumentsToHelper,
  1202. "We do not yet support any no. of arguments to the helper");
  1203. return instrToReturn;
  1204. }
  1205. IR::Instr *
  1206. LowererMDArch::LoadDynamicArgument(IR::Instr *instr, uint argNumber)
  1207. {
  1208. Assert(instr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  1209. Assert(instr->GetSrc2() == nullptr);
  1210. instr->m_opcode = Js::OpCode::MOV;
  1211. IR::Opnd* dst = GetArgSlotOpnd((Js::ArgSlot) argNumber);
  1212. instr->SetDst(dst);
  1213. if (!dst->IsRegOpnd())
  1214. {
  1215. //TODO: Move it to legalizer.
  1216. IR::RegOpnd *tempOpnd = IR::RegOpnd::New(TyMachReg, instr->m_func);
  1217. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, tempOpnd, instr->GetSrc1(), instr->m_func));
  1218. instr->ReplaceSrc1(tempOpnd);
  1219. }
  1220. return instr;
  1221. }
  1222. IR::Instr *
  1223. LowererMDArch::LoadDynamicArgumentUsingLength(IR::Instr *instr)
  1224. {
  1225. Assert(instr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  1226. IR::RegOpnd* src2 = instr->UnlinkSrc2()->AsRegOpnd();
  1227. IR::Instr*mov = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyMachReg, this->m_func), src2, this->m_func);
  1228. instr->InsertBefore(mov);
  1229. //We need store nth actuals, so stack location is after function object, callinfo & this pointer
  1230. instr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, mov->GetDst(), mov->GetDst(), IR::IntConstOpnd::New(3, TyMachReg, this->m_func), this->m_func));
  1231. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  1232. IR::IndirOpnd *actualsLocation = IR::IndirOpnd::New(stackPointer, mov->GetDst()->AsRegOpnd(), GetDefaultIndirScale(), TyMachReg, this->m_func);
  1233. instr->SetDst(actualsLocation);
  1234. instr->m_opcode = Js::OpCode::MOV;
  1235. return instr;
  1236. }
  1237. IR::Instr *
  1238. LowererMDArch::LoadDoubleHelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1239. {
  1240. IR::Opnd * float64Opnd;
  1241. if (opndArg->GetType() == TyFloat32)
  1242. {
  1243. float64Opnd = IR::RegOpnd::New(TyFloat64, m_func);
  1244. IR::Instr * instr = IR::Instr::New(Js::OpCode::CVTSS2SD, float64Opnd, opndArg, this->m_func);
  1245. instrInsert->InsertBefore(instr);
  1246. }
  1247. else
  1248. {
  1249. float64Opnd = opndArg;
  1250. }
  1251. Assert(opndArg->IsFloat());
  1252. return LoadHelperArgument(instrInsert, opndArg);
  1253. }
  1254. IR::Instr *
  1255. LowererMDArch::LoadFloatHelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1256. {
  1257. Assert(opndArg->IsFloat32());
  1258. return LoadHelperArgument(instrInsert, opndArg);
  1259. }
  1260. //
  1261. // Emits the code to allocate 'size' amount of space on stack. for values smaller than PAGE_SIZE
  1262. // this will just emit sub rsp,size otherwise calls _chkstk.
  1263. //
  1264. void
  1265. LowererMDArch::GenerateStackAllocation(IR::Instr *instr, uint32 size)
  1266. {
  1267. Assert(size > 0);
  1268. IR::RegOpnd * rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1269. //review: size should fit in 32bits
  1270. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New(size, TyMachReg, this->m_func);
  1271. if (size <= PAGESIZE)
  1272. {
  1273. // Generate SUB RSP, stackSize
  1274. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB,
  1275. rspOpnd, rspOpnd, stackSizeOpnd, this->m_func);
  1276. instr->InsertAfter(subInstr);
  1277. }
  1278. else
  1279. {
  1280. // Generate _chkstk call
  1281. //
  1282. // REVIEW: Call to helper functions assume the address of the variable to be present in
  1283. // RAX. But _chkstk method accepts argument in RAX. Hence handling this one manually.
  1284. // fix this later when CALLHELPER leaved dependency on RAX.
  1285. //
  1286. IR::RegOpnd *raxOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, this->m_func);
  1287. IR::RegOpnd *rcxOpnd = IR::RegOpnd::New(nullptr, RegRCX, TyMachReg, this->m_func);
  1288. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB, rspOpnd, rspOpnd, stackSizeOpnd, this->m_func);
  1289. instr->InsertAfter(subInstr);
  1290. // Leave off the src until we've calculated it below.
  1291. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, raxOpnd, rcxOpnd, this->m_func);
  1292. instr->InsertAfter(callInstr);
  1293. this->LowerCall(callInstr, 0);
  1294. {
  1295. IR::Instr *movHelperAddrInstr = IR::Instr::New(
  1296. Js::OpCode::MOV,
  1297. rcxOpnd,
  1298. IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, this->m_func),
  1299. this->m_func);
  1300. instr->InsertAfter(movHelperAddrInstr);
  1301. }
  1302. Lowerer::InsertMove(raxOpnd, stackSizeOpnd, instr->m_next);
  1303. }
  1304. }
  1305. void
  1306. LowererMDArch::MovArgFromReg2Stack(IR::Instr * instr, RegNum reg, uint16 slotNumber, IRType type)
  1307. {
  1308. StackSym * slotSym = this->m_func->m_symTable->GetArgSlotSym(slotNumber + 1);
  1309. slotSym->m_type = type;
  1310. IR::SymOpnd * dst = IR::SymOpnd::New(slotSym, type, this->m_func);
  1311. IR::RegOpnd * src = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  1312. IR::Instr * movInstr = IR::Instr::New(GetAssignOp(type), dst, src, this->m_func);
  1313. instr->InsertAfter(movInstr);
  1314. }
  1315. ///----------------------------------------------------------------------------
  1316. ///
  1317. /// LowererMDArch::LowerEntryInstr
  1318. ///
  1319. /// Emit prolog.
  1320. ///
  1321. ///----------------------------------------------------------------------------
  1322. IR::Instr *
  1323. LowererMDArch::LowerEntryInstr(IR::EntryInstr * entryInstr)
  1324. {
  1325. /*
  1326. * push rbp
  1327. * mov rbp, rsp
  1328. * sub rsp, localVariablesHeight + floatCalleeSavedRegsSize
  1329. * movsdx qword ptr [rsp + 0], xmm6 ------\
  1330. * movsdx qword ptr [rsp + 8], xmm7 |
  1331. * ... |
  1332. * movsdx qword ptr [rsp + (N * 8)], xmmN |- Callee saved registers.
  1333. * push rsi |
  1334. * ... |
  1335. * push rbx ------/
  1336. * sub rsp, ArgumentsBacking
  1337. */
  1338. uint savedRegSize = 0;
  1339. IR::Instr *firstPrologInstr = nullptr;
  1340. IR::Instr *lastPrologInstr = nullptr;
  1341. // PUSH used callee-saved registers.
  1342. IR::Instr *secondInstr = entryInstr->m_next;
  1343. AssertMsg(secondInstr, "Instruction chain broken.");
  1344. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  1345. unsigned xmmOffset = 0;
  1346. // PDATA doesn't seem to like two consecutive "SUB RSP, size" instructions. Temporarily save and
  1347. // restore RBX always so that the pattern doesn't occur in the prolog.
  1348. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg + 1))
  1349. {
  1350. if (LinearScan::IsCalleeSaved(reg) && (this->m_func->HasTry() || this->m_func->m_regsUsed.Test(reg)))
  1351. {
  1352. IRType type = RegTypes[reg];
  1353. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  1354. if (type == TyFloat64)
  1355. {
  1356. IR::Instr *saveInstr = IR::Instr::New(Js::OpCode::MOVAPS,
  1357. IR::IndirOpnd::New(stackPointer,
  1358. xmmOffset,
  1359. type,
  1360. this->m_func),
  1361. regOpnd,
  1362. this->m_func);
  1363. xmmOffset += (MachDouble * 2);
  1364. entryInstr->InsertAfter(saveInstr);
  1365. m_func->m_prologEncoder.RecordXmmRegSave();
  1366. }
  1367. else
  1368. {
  1369. Assert(type == TyInt64);
  1370. IR::Instr *pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1371. pushInstr->SetSrc1(regOpnd);
  1372. entryInstr->InsertAfter(pushInstr);
  1373. m_func->m_prologEncoder.RecordNonVolRegSave();
  1374. savedRegSize += MachPtr;
  1375. }
  1376. }
  1377. }
  1378. //
  1379. // Now that we know the exact stack size, lets fix it for alignment
  1380. // The stack on entry would be aligned. VC++ recommends that the stack
  1381. // should always be 16 byte aligned.
  1382. //
  1383. uint32 argSlotsForFunctionsCalled = this->m_func->m_argSlotsForFunctionsCalled;
  1384. if (Lowerer::IsArgSaveRequired(this->m_func))
  1385. {
  1386. if (argSlotsForFunctionsCalled < IntArgRegsCount)
  1387. argSlotsForFunctionsCalled = IntArgRegsCount;
  1388. }
  1389. else
  1390. {
  1391. argSlotsForFunctionsCalled = 0;
  1392. }
  1393. uint32 stackArgsSize = MachPtr * (argSlotsForFunctionsCalled + 1);
  1394. this->m_func->m_localStackHeight = Math::Align<int32>(this->m_func->m_localStackHeight, 8);
  1395. // Allocate the inlined arg out stack in the locals. Allocate an additional slot so that
  1396. // we can unconditionally clear the first slot past the current frame.
  1397. this->m_func->m_localStackHeight += m_func->GetMaxInlineeArgOutSize() + MachPtr;
  1398. uint32 stackLocalsSize = this->m_func->m_localStackHeight;
  1399. if(xmmOffset != 0)
  1400. {
  1401. // Xmm registers need to be saved to 16-byte-aligned addresses. The stack locals size is aligned here and the total
  1402. // size will be aligned below, which guarantees that the offset from rsp will be 16-byte-aligned.
  1403. stackLocalsSize = ::Math::Align(stackLocalsSize + xmmOffset, static_cast<uint32>(MachDouble * 2));
  1404. }
  1405. uint32 totalStackSize = stackLocalsSize +
  1406. stackArgsSize +
  1407. savedRegSize;
  1408. AssertMsg(0 == (totalStackSize % 8), "Stack should always be 8 byte aligned");
  1409. uint32 alignmentPadding = (totalStackSize % 16) ? MachPtr : 0;
  1410. stackArgsSize += alignmentPadding;
  1411. Assert(
  1412. xmmOffset == 0 ||
  1413. ::Math::Align(stackArgsSize + savedRegSize, static_cast<uint32>(MachDouble * 2)) == stackArgsSize + savedRegSize);
  1414. totalStackSize += alignmentPadding;
  1415. if(totalStackSize > (1u << 20)) // 1 MB
  1416. {
  1417. // Total stack size is > 1 MB, let's just bail. There are things that need to be changed to allow using large stack
  1418. // sizes, for instance in the unwind info, the offset to saved xmm registers can be (1 MB - 16) at most for the op-code
  1419. // we're currently using (UWOP_SAVE_XMM128). To support larger offsets, we need to use a FAR version of the op-code.
  1420. throw Js::OperationAbortedException();
  1421. }
  1422. if (m_func->HasInlinee())
  1423. {
  1424. this->m_func->GetJITOutput()->SetFrameHeight(this->m_func->m_localStackHeight);
  1425. }
  1426. //
  1427. // This is the last instruction so should have been emitted before, register saves.
  1428. // But we did not have 'savedRegSize' by then. So we saved secondInstr. We now insert w.r.t that
  1429. // instruction.
  1430. //
  1431. this->m_func->SetArgsSize(stackArgsSize);
  1432. this->m_func->SetSavedRegSize(savedRegSize);
  1433. this->m_func->SetSpillSize(stackLocalsSize);
  1434. if (secondInstr == entryInstr->m_next)
  1435. {
  1436. // There is no register save at all, just combine the stack allocation
  1437. uint combineStackAllocationSize = stackArgsSize + stackLocalsSize;
  1438. this->GenerateStackAllocation(secondInstr->m_prev, combineStackAllocationSize);
  1439. m_func->m_prologEncoder.RecordAlloca(combineStackAllocationSize);
  1440. }
  1441. else
  1442. {
  1443. this->GenerateStackAllocation(secondInstr->m_prev, stackArgsSize);
  1444. m_func->m_prologEncoder.RecordAlloca(stackArgsSize);
  1445. // Allocate frame.
  1446. if (stackLocalsSize)
  1447. {
  1448. this->GenerateStackAllocation(entryInstr, stackLocalsSize);
  1449. m_func->m_prologEncoder.RecordAlloca(stackLocalsSize);
  1450. }
  1451. }
  1452. lastPrologInstr = secondInstr->m_prev;
  1453. Assert(lastPrologInstr != entryInstr);
  1454. // Zero-initialize dedicated arguments slot.
  1455. IR::Instr *movRax0 = nullptr;
  1456. IR::Opnd *raxOpnd = nullptr;
  1457. if ((this->m_func->HasArgumentSlot() &&
  1458. (this->m_func->IsStackArgsEnabled() ||
  1459. this->m_func->IsJitInDebugMode() ||
  1460. // disabling apply inlining leads to explicit load from the zero-inited slot
  1461. this->m_func->GetJITFunctionBody()->IsInlineApplyDisabled()))
  1462. #ifdef BAILOUT_INJECTION
  1463. || Js::Configuration::Global.flags.IsEnabled(Js::BailOutFlag)
  1464. || Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryLineFlag)
  1465. || Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryByteCodeFlag)
  1466. || Js::Configuration::Global.flags.IsEnabled(Js::BailOutByteCodeFlag)
  1467. #endif
  1468. )
  1469. {
  1470. // TODO: Support mov [rbp - n], IMM64
  1471. raxOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyUint32, this->m_func);
  1472. movRax0 = IR::Instr::New(Js::OpCode::XOR, raxOpnd, raxOpnd, raxOpnd, this->m_func);
  1473. secondInstr->m_prev->InsertAfter(movRax0);
  1474. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  1475. IR::Instr *movNullInstr = IR::Instr::New(Js::OpCode::MOV, opnd, raxOpnd->UseWithNewType(TyMachReg, this->m_func), this->m_func);
  1476. secondInstr->m_prev->InsertAfter(movNullInstr);
  1477. }
  1478. // Zero initialize the first inlinee frames argc.
  1479. if (m_func->HasInlinee())
  1480. {
  1481. if(!movRax0)
  1482. {
  1483. raxOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyUint32, this->m_func);
  1484. movRax0 = IR::Instr::New(Js::OpCode::XOR, raxOpnd, raxOpnd, raxOpnd, this->m_func);
  1485. secondInstr->m_prev->InsertAfter(movRax0);
  1486. }
  1487. StackSym *sym = this->m_func->m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1488. sym->m_isInlinedArgSlot = true;
  1489. sym->m_offset = 0;
  1490. IR::Opnd *dst = IR::SymOpnd::New(sym, 0, TyMachReg, this->m_func);
  1491. secondInstr->m_prev->InsertAfter(IR::Instr::New(Js::OpCode::MOV,
  1492. dst,
  1493. raxOpnd->UseWithNewType(TyMachReg, this->m_func),
  1494. this->m_func));
  1495. }
  1496. // Generate MOV RBP, RSP
  1497. IR::RegOpnd * rbpOpnd = IR::RegOpnd::New(nullptr, RegRBP, TyMachReg, this->m_func);
  1498. IR::RegOpnd * rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1499. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, rbpOpnd, rspOpnd, this->m_func);
  1500. entryInstr->InsertAfter(movInstr);
  1501. // Generate PUSH RBP
  1502. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1503. pushInstr->SetSrc1(rbpOpnd);
  1504. entryInstr->InsertAfter(pushInstr);
  1505. m_func->m_prologEncoder.RecordNonVolRegSave();
  1506. firstPrologInstr = pushInstr;
  1507. //
  1508. // Insert pragmas that tell the prolog encoder the extent of the prolog.
  1509. //
  1510. firstPrologInstr->InsertBefore(IR::PragmaInstr::New(Js::OpCode::PrologStart, 0, m_func));
  1511. lastPrologInstr->InsertAfter(IR::PragmaInstr::New(Js::OpCode::PrologEnd, 0, m_func));
  1512. #ifdef _WIN32 // home registers
  1513. //
  1514. // Now store all the arguments in the register in the stack slots
  1515. //
  1516. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody())
  1517. {
  1518. uint16 offset = 2;
  1519. this->MovArgFromReg2Stack(entryInstr, RegRCX, 1);
  1520. for (uint16 i = 0; i < m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetArgCount() && i < 3; i++)
  1521. {
  1522. switch (m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetArgType(i))
  1523. {
  1524. case Js::AsmJsVarType::Int:
  1525. this->MovArgFromReg2Stack(entryInstr, i == 0 ? RegRDX : i == 1 ? RegR8 : RegR9, offset, TyInt32);
  1526. offset++;
  1527. break;
  1528. case Js::AsmJsVarType::Int64:
  1529. this->MovArgFromReg2Stack(entryInstr, i == 0 ? RegRDX : i == 1 ? RegR8 : RegR9, offset, TyInt64);
  1530. offset++;
  1531. break;
  1532. case Js::AsmJsVarType::Float:
  1533. // registers we need are contiguous, so calculate it from XMM1
  1534. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TyFloat32);
  1535. offset++;
  1536. break;
  1537. case Js::AsmJsVarType::Double:
  1538. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TyFloat64);
  1539. offset++;
  1540. break;
  1541. case Js::AsmJsVarType::Float32x4:
  1542. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128F4);
  1543. offset += 2;
  1544. break;
  1545. case Js::AsmJsVarType::Int32x4:
  1546. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128I4);
  1547. offset += 2;
  1548. break;
  1549. case Js::AsmJsVarType::Int16x8:
  1550. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128I8);
  1551. offset += 2;
  1552. break;
  1553. case Js::AsmJsVarType::Int8x16:
  1554. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128I16);
  1555. offset += 2;
  1556. break;
  1557. case Js::AsmJsVarType::Uint32x4:
  1558. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128U4);
  1559. offset += 2;
  1560. break;
  1561. case Js::AsmJsVarType::Uint16x8:
  1562. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128U8);
  1563. offset += 2;
  1564. break;
  1565. case Js::AsmJsVarType::Uint8x16:
  1566. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128U16);
  1567. offset += 2;
  1568. break;
  1569. case Js::AsmJsVarType::Bool32x4:
  1570. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128B4);
  1571. offset += 2;
  1572. break;
  1573. case Js::AsmJsVarType::Bool16x8:
  1574. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128B8);
  1575. offset += 2;
  1576. break;
  1577. case Js::AsmJsVarType::Bool8x16:
  1578. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128B16);
  1579. offset += 2;
  1580. break;
  1581. case Js::AsmJsVarType::Float64x2:
  1582. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128D2);
  1583. offset += 2;
  1584. break;
  1585. case Js::AsmJsVarType::Int64x2:
  1586. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128I2);
  1587. offset += 2;
  1588. break;
  1589. default:
  1590. Assume(UNREACHED);
  1591. }
  1592. }
  1593. }
  1594. else if (argSlotsForFunctionsCalled)
  1595. {
  1596. this->MovArgFromReg2Stack(entryInstr, RegRCX, 1);
  1597. this->MovArgFromReg2Stack(entryInstr, RegRDX, 2);
  1598. this->MovArgFromReg2Stack(entryInstr, RegR8, 3);
  1599. this->MovArgFromReg2Stack(entryInstr, RegR9, 4);
  1600. }
  1601. #endif // _WIN32
  1602. IntConstType frameSize = Js::Constants::MinStackJIT + stackArgsSize + stackLocalsSize + savedRegSize;
  1603. this->GeneratePrologueStackProbe(entryInstr, frameSize);
  1604. return entryInstr;
  1605. }
  1606. void
  1607. LowererMDArch::GeneratePrologueStackProbe(IR::Instr *entryInstr, IntConstType frameSize)
  1608. {
  1609. //
  1610. // Generate a stack overflow check. Since ProbeCurrentStack throws an exception it needs
  1611. // an unwindable stack. Should we need to call ProbeCurrentStack, instead of creating a new frame here,
  1612. // we make it appear like our caller directly called ProbeCurrentStack.
  1613. //
  1614. // For thread-bound thread context
  1615. // MOV rax, ThreadContext::scriptStackLimit + frameSize
  1616. // CMP rsp, rax
  1617. // JG $done
  1618. // MOV rax, ThreadContext::ProbeCurrentStack
  1619. // MOV rcx, frameSize
  1620. // MOV rdx, scriptContext
  1621. // JMP rax
  1622. // $done:
  1623. //
  1624. // For thread-agile thread context
  1625. // MOV rax, [ThreadContext::scriptStackLimit]
  1626. // ADD rax, frameSize
  1627. // CMP rsp, rax
  1628. // JG $done
  1629. // MOV rax, ThreadContext::ProbeCurrentStack
  1630. // MOV rcx, frameSize
  1631. // MOV rdx, scriptContext
  1632. // JMP rax
  1633. // $done:
  1634. //
  1635. // For thread context with script interrupt enabled
  1636. // MOV rax, [ThreadContext::scriptStackLimit]
  1637. // ADD rax, frameSize
  1638. // JO $helper
  1639. // CMP rsp, rax
  1640. // JG $done
  1641. // $helper:
  1642. // MOV rax, ThreadContext::ProbeCurrentStack
  1643. // MOV rcx, frameSize
  1644. // MOV rdx, scriptContext
  1645. // JMP rax
  1646. // $done:
  1647. //
  1648. // Do not insert stack probe for leaf functions which have low stack footprint
  1649. if (this->m_func->IsTrueLeaf() &&
  1650. frameSize - Js::Constants::MinStackJIT < Js::Constants::MaxStackSizeForNoProbe)
  1651. {
  1652. return;
  1653. }
  1654. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  1655. IR::Instr *insertInstr = entryInstr->m_next;
  1656. IR::Instr *instr;
  1657. IR::Opnd *stackLimitOpnd;
  1658. bool doInterruptProbe = m_func->GetJITFunctionBody()->DoInterruptProbe();
  1659. // MOV rax, ThreadContext::scriptStackLimit + frameSize
  1660. stackLimitOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, this->m_func);
  1661. if (doInterruptProbe || !m_func->GetThreadContextInfo()->IsThreadBound())
  1662. {
  1663. // Load the current stack limit from the ThreadContext and add the current frame size.
  1664. {
  1665. intptr_t pLimit = m_func->GetThreadContextInfo()->GetThreadStackLimitAddr();
  1666. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, this->m_func);
  1667. Lowerer::InsertMove(baseOpnd, IR::AddrOpnd::New(pLimit, IR::AddrOpndKindDynamicMisc, this->m_func), insertInstr);
  1668. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, 0, TyMachReg, this->m_func);
  1669. Lowerer::InsertMove(stackLimitOpnd, indirOpnd, insertInstr);
  1670. }
  1671. instr = IR::Instr::New(Js::OpCode::ADD, stackLimitOpnd, stackLimitOpnd,
  1672. IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func), this->m_func);
  1673. insertInstr->InsertBefore(instr);
  1674. if (doInterruptProbe)
  1675. {
  1676. // If the add overflows, call the probe helper.
  1677. instr = IR::BranchInstr::New(Js::OpCode::JO, helperLabel, this->m_func);
  1678. insertInstr->InsertBefore(instr);
  1679. }
  1680. }
  1681. else
  1682. {
  1683. // TODO: michhol, check this math
  1684. size_t scriptStackLimit = m_func->GetThreadContextInfo()->GetScriptStackLimit();
  1685. Lowerer::InsertMove(stackLimitOpnd, IR::IntConstOpnd::New((frameSize + scriptStackLimit), TyMachReg, this->m_func), insertInstr);
  1686. }
  1687. // CMP rsp, rax
  1688. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  1689. instr->SetSrc1(IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, m_func));
  1690. instr->SetSrc2(stackLimitOpnd);
  1691. insertInstr->InsertBefore(instr);
  1692. IR::LabelInstr * doneLabel = nullptr;
  1693. if (!PHASE_OFF(Js::LayoutPhase, this->m_func))
  1694. {
  1695. // JLE $helper
  1696. instr = IR::BranchInstr::New(Js::OpCode::JLE, helperLabel, m_func);
  1697. insertInstr->InsertBefore(instr);
  1698. Security::InsertRandomFunctionPad(insertInstr);
  1699. // This is generated after layout. Generate the block at the end of the function manually
  1700. insertInstr = IR::PragmaInstr::New(Js::OpCode::StatementBoundary, Js::Constants::NoStatementIndex, m_func);
  1701. this->m_func->m_tailInstr->InsertAfter(insertInstr);
  1702. this->m_func->m_tailInstr = insertInstr;
  1703. }
  1704. else
  1705. {
  1706. doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  1707. // JGT $done
  1708. instr = IR::BranchInstr::New(Js::OpCode::JGT, doneLabel, m_func);
  1709. insertInstr->InsertBefore(instr);
  1710. }
  1711. insertInstr->InsertBefore(helperLabel);
  1712. IR::RegOpnd *target;
  1713. {
  1714. // MOV RegArg1, scriptContext
  1715. Lowerer::InsertMove(
  1716. IR::RegOpnd::New(nullptr, RegArg1, TyMachReg, m_func),
  1717. this->lowererMD->m_lowerer->LoadScriptContextOpnd(insertInstr), insertInstr);
  1718. // MOV RegArg0, frameSize
  1719. Lowerer::InsertMove(
  1720. IR::RegOpnd::New(nullptr, RegArg0, TyMachReg, this->m_func),
  1721. IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func), insertInstr);
  1722. // MOV rax, ThreadContext::ProbeCurrentStack
  1723. target = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, m_func);
  1724. Lowerer::InsertMove(target, IR::HelperCallOpnd::New(IR::HelperProbeCurrentStack, m_func), insertInstr);
  1725. }
  1726. // JMP rax
  1727. instr = IR::MultiBranchInstr::New(Js::OpCode::JMP, target, m_func);
  1728. insertInstr->InsertBefore(instr);
  1729. if (doneLabel)
  1730. {
  1731. // $done:
  1732. insertInstr->InsertBefore(doneLabel);
  1733. Security::InsertRandomFunctionPad(doneLabel);
  1734. }
  1735. }
  1736. ///----------------------------------------------------------------------------
  1737. ///
  1738. /// LowererMDArch::LowerExitInstr
  1739. ///
  1740. /// Emit epilog.
  1741. ///
  1742. ///----------------------------------------------------------------------------
  1743. IR::Instr *
  1744. LowererMDArch::LowerExitInstr(IR::ExitInstr * exitInstr)
  1745. {
  1746. uint32 savedRegSize = 0;
  1747. // POP used callee-saved registers
  1748. IR::Instr * exitPrevInstr = exitInstr->m_prev;
  1749. AssertMsg(exitPrevInstr, "Can a function have only 1 instr ? Or is the instr chain broken");
  1750. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  1751. unsigned xmmOffset = 0;
  1752. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg+1))
  1753. {
  1754. if (LinearScan::IsCalleeSaved(reg) && (this->m_func->HasTry() || this->m_func->m_regsUsed.Test(reg)))
  1755. {
  1756. IRType type = RegTypes[reg];
  1757. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  1758. if (type == TyFloat64)
  1759. {
  1760. IR::Instr *restoreInstr = IR::Instr::New(Js::OpCode::MOVAPS,
  1761. regOpnd,
  1762. IR::IndirOpnd::New(stackPointer,
  1763. xmmOffset,
  1764. type,
  1765. this->m_func),
  1766. this->m_func);
  1767. xmmOffset += (MachDouble * 2);
  1768. exitInstr->InsertBefore(restoreInstr);
  1769. }
  1770. else
  1771. {
  1772. Assert(type == TyInt64);
  1773. IR::Instr *popInstr = IR::Instr::New(Js::OpCode::POP, regOpnd, this->m_func);
  1774. exitInstr->InsertBefore(popInstr);
  1775. savedRegSize += MachPtr;
  1776. }
  1777. }
  1778. }
  1779. Assert(savedRegSize == (uint)this->m_func->GetSavedRegSize());
  1780. // Generate ADD RSP, argsStackSize before the register restore (if there are any)
  1781. uint32 stackArgsSize = this->m_func->GetArgsSize();
  1782. Assert(stackArgsSize);
  1783. if (savedRegSize || xmmOffset)
  1784. {
  1785. IR::IntConstOpnd *stackSizeOpnd = IR::IntConstOpnd::New(stackArgsSize, TyMachReg, this->m_func);
  1786. IR::Instr *addInstr = IR::Instr::New(Js::OpCode::ADD, stackPointer, stackPointer, stackSizeOpnd, this->m_func);
  1787. exitPrevInstr->InsertAfter(addInstr);
  1788. }
  1789. //
  1790. // useful register operands
  1791. //
  1792. IR::RegOpnd * rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1793. IR::RegOpnd * rbpOpnd = IR::RegOpnd::New(nullptr, RegRBP, TyMachReg, this->m_func);
  1794. // Restore frame
  1795. // Generate MOV RSP, RBP
  1796. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, rspOpnd, rbpOpnd, this->m_func);
  1797. exitInstr->InsertBefore(movInstr);
  1798. // Generate POP RBP
  1799. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::POP, rbpOpnd, this->m_func);
  1800. exitInstr->InsertBefore(pushInstr);
  1801. // Insert RET
  1802. IR::IntConstOpnd * intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  1803. IR::RegOpnd *retReg = nullptr;
  1804. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody())
  1805. {
  1806. switch (m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetRetType())
  1807. {
  1808. case Js::AsmJsRetType::Double:
  1809. case Js::AsmJsRetType::Float:
  1810. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TyMachDouble), TyMachDouble, this->m_func);
  1811. break;
  1812. #ifdef ENABLE_WASM_SIMD
  1813. case Js::AsmJsRetType::Int32x4:
  1814. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128I4), TySimd128I4, this->m_func);
  1815. break;
  1816. case Js::AsmJsRetType::Int16x8:
  1817. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128I8), TySimd128I8, this->m_func);
  1818. break;
  1819. case Js::AsmJsRetType::Int8x16:
  1820. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128I16), TySimd128U16, this->m_func);
  1821. break;
  1822. case Js::AsmJsRetType::Uint32x4:
  1823. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128U4), TySimd128U4, this->m_func);
  1824. break;
  1825. case Js::AsmJsRetType::Uint16x8:
  1826. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128U8), TySimd128U8, this->m_func);
  1827. break;
  1828. case Js::AsmJsRetType::Uint8x16:
  1829. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128U16), TySimd128U16, this->m_func);
  1830. break;
  1831. case Js::AsmJsRetType::Bool32x4:
  1832. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128B4), TySimd128B4, this->m_func);
  1833. break;
  1834. case Js::AsmJsRetType::Bool16x8:
  1835. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128B8), TySimd128B8, this->m_func);
  1836. break;
  1837. case Js::AsmJsRetType::Bool8x16:
  1838. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128B16), TySimd128B16, this->m_func);
  1839. break;
  1840. case Js::AsmJsRetType::Float32x4:
  1841. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128F4), TySimd128F4, this->m_func);
  1842. break;
  1843. case Js::AsmJsRetType::Float64x2:
  1844. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128D2), TySimd128D2, this->m_func);
  1845. break;
  1846. case Js::AsmJsRetType::Int64x2:
  1847. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128I2), TySimd128I2, this->m_func);
  1848. break;
  1849. #endif
  1850. case Js::AsmJsRetType::Int64:
  1851. case Js::AsmJsRetType::Signed:
  1852. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  1853. break;
  1854. case Js::AsmJsRetType::Void:
  1855. break;
  1856. default:
  1857. Assume(UNREACHED);
  1858. }
  1859. }
  1860. else
  1861. {
  1862. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  1863. }
  1864. // Generate RET
  1865. IR::Instr * retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  1866. retInstr->SetSrc1(intSrc);
  1867. if (retReg)
  1868. {
  1869. retInstr->SetSrc2(retReg);
  1870. }
  1871. exitInstr->InsertBefore(retInstr);
  1872. retInstr->m_opcode = Js::OpCode::RET;
  1873. return exitInstr;
  1874. }
  1875. IR::Instr *
  1876. LowererMDArch::LowerExitInstrAsmJs(IR::ExitInstr * exitInstr)
  1877. {
  1878. // epilogue is almost identical on x64, except for return register
  1879. return LowerExitInstr(exitInstr);
  1880. }
  1881. void
  1882. LowererMDArch::EmitInt4Instr(IR::Instr *instr, bool signExtend /* = false */)
  1883. {
  1884. IR::Opnd *dst = instr->GetDst();
  1885. IR::Opnd *src1 = instr->GetSrc1();
  1886. IR::Opnd *src2 = instr->GetSrc2();
  1887. IR::Instr *newInstr = nullptr;
  1888. IR::RegOpnd *regEDX;
  1889. bool isInt64Instr = instr->AreAllOpndInt64();
  1890. if (!isInt64Instr)
  1891. {
  1892. if (dst && !dst->IsUInt32())
  1893. {
  1894. dst->SetType(TyInt32);
  1895. }
  1896. if (!src1->IsUInt32())
  1897. {
  1898. src1->SetType(TyInt32);
  1899. }
  1900. if (src2 && !src2->IsUInt32())
  1901. {
  1902. src2->SetType(TyInt32);
  1903. }
  1904. }
  1905. switch (instr->m_opcode)
  1906. {
  1907. case Js::OpCode::Neg_I4:
  1908. instr->m_opcode = Js::OpCode::NEG;
  1909. break;
  1910. case Js::OpCode::Not_I4:
  1911. instr->m_opcode = Js::OpCode::NOT;
  1912. break;
  1913. case Js::OpCode::Add_I4:
  1914. LowererMD::ChangeToAdd(instr, false /* needFlags */);
  1915. break;
  1916. case Js::OpCode::Sub_I4:
  1917. LowererMD::ChangeToSub(instr, false /* needFlags */);
  1918. break;
  1919. case Js::OpCode::Mul_I4:
  1920. instr->m_opcode = Js::OpCode::IMUL2;
  1921. break;
  1922. case Js::OpCode::DivU_I4:
  1923. case Js::OpCode::Div_I4:
  1924. instr->SinkDst(Js::OpCode::MOV, RegRAX);
  1925. goto idiv_common;
  1926. case Js::OpCode::RemU_I4:
  1927. case Js::OpCode::Rem_I4:
  1928. instr->SinkDst(Js::OpCode::MOV, RegRDX);
  1929. idiv_common:
  1930. {
  1931. bool isUnsigned = instr->GetSrc1()->IsUnsigned();
  1932. if (isUnsigned)
  1933. {
  1934. Assert(instr->GetSrc2()->IsUnsigned());
  1935. Assert(instr->m_opcode == Js::OpCode::RemU_I4 || instr->m_opcode == Js::OpCode::DivU_I4);
  1936. instr->m_opcode = Js::OpCode::DIV;
  1937. }
  1938. else
  1939. {
  1940. instr->m_opcode = Js::OpCode::IDIV;
  1941. }
  1942. instr->HoistSrc1(Js::OpCode::MOV, RegRAX);
  1943. regEDX = IR::RegOpnd::New(src1->GetType(), instr->m_func);
  1944. regEDX->SetReg(RegRDX);
  1945. if (isUnsigned)
  1946. {
  1947. // we need to ensure that register allocator doesn't muck about with rdx
  1948. instr->HoistSrc2(Js::OpCode::MOV, RegRCX);
  1949. Lowerer::InsertMove(regEDX, IR::IntConstOpnd::New(0, src1->GetType(), instr->m_func), instr);
  1950. // NOP ensures that the EDX = Ld_I4 0 doesn't get deadstored, will be removed in peeps
  1951. instr->InsertBefore(IR::Instr::New(Js::OpCode::NOP, regEDX, regEDX, instr->m_func));
  1952. }
  1953. else
  1954. {
  1955. if (instr->GetSrc2()->IsImmediateOpnd())
  1956. {
  1957. instr->HoistSrc2(Js::OpCode::MOV);
  1958. }
  1959. instr->InsertBefore(IR::Instr::New(isInt64Instr ? Js::OpCode::CQO : Js::OpCode::CDQ, regEDX, instr->m_func));
  1960. }
  1961. return;
  1962. }
  1963. case Js::OpCode::Or_I4:
  1964. instr->m_opcode = Js::OpCode::OR;
  1965. break;
  1966. case Js::OpCode::Xor_I4:
  1967. instr->m_opcode = Js::OpCode::XOR;
  1968. break;
  1969. case Js::OpCode::And_I4:
  1970. instr->m_opcode = Js::OpCode::AND;
  1971. break;
  1972. case Js::OpCode::Shl_I4:
  1973. case Js::OpCode::ShrU_I4:
  1974. case Js::OpCode::Shr_I4:
  1975. case Js::OpCode::Rol_I4:
  1976. case Js::OpCode::Ror_I4:
  1977. LowererMD::ChangeToShift(instr, false /* needFlags */);
  1978. break;
  1979. case Js::OpCode::BrTrue_I4:
  1980. instr->m_opcode = Js::OpCode::JNE;
  1981. goto br1_Common;
  1982. case Js::OpCode::BrFalse_I4:
  1983. instr->m_opcode = Js::OpCode::JEQ;
  1984. br1_Common:
  1985. src1 = instr->UnlinkSrc1();
  1986. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  1987. instr->InsertBefore(newInstr);
  1988. newInstr->SetSrc1(src1);
  1989. newInstr->SetSrc2(src1);
  1990. return;
  1991. case Js::OpCode::BrEq_I4:
  1992. instr->m_opcode = Js::OpCode::JEQ;
  1993. goto br2_Common;
  1994. case Js::OpCode::BrNeq_I4:
  1995. instr->m_opcode = Js::OpCode::JNE;
  1996. goto br2_Common;
  1997. case Js::OpCode::BrUnGt_I4:
  1998. instr->m_opcode = Js::OpCode::JA;
  1999. goto br2_Common;
  2000. case Js::OpCode::BrUnGe_I4:
  2001. instr->m_opcode = Js::OpCode::JAE;
  2002. goto br2_Common;
  2003. case Js::OpCode::BrUnLe_I4:
  2004. instr->m_opcode = Js::OpCode::JBE;
  2005. goto br2_Common;
  2006. case Js::OpCode::BrUnLt_I4:
  2007. instr->m_opcode = Js::OpCode::JB;
  2008. goto br2_Common;
  2009. case Js::OpCode::BrGt_I4:
  2010. instr->m_opcode = Js::OpCode::JGT;
  2011. goto br2_Common;
  2012. case Js::OpCode::BrGe_I4:
  2013. instr->m_opcode = Js::OpCode::JGE;
  2014. goto br2_Common;
  2015. case Js::OpCode::BrLe_I4:
  2016. instr->m_opcode = Js::OpCode::JLE;
  2017. goto br2_Common;
  2018. case Js::OpCode::BrLt_I4:
  2019. instr->m_opcode = Js::OpCode::JLT;
  2020. br2_Common:
  2021. src1 = instr->UnlinkSrc1();
  2022. src2 = instr->UnlinkSrc2();
  2023. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  2024. instr->InsertBefore(newInstr);
  2025. newInstr->SetSrc1(src1);
  2026. newInstr->SetSrc2(src2);
  2027. return;
  2028. default:
  2029. AssertMsg(UNREACHED, "Un-implemented int4 opcode");
  2030. }
  2031. if (signExtend)
  2032. {
  2033. Assert(instr->GetDst());
  2034. IR::Opnd *dst64 = instr->GetDst()->Copy(instr->m_func);
  2035. dst64->SetType(TyMachReg);
  2036. instr->InsertAfter(IR::Instr::New(Js::OpCode::MOVSXD, dst64, instr->GetDst(), instr->m_func));
  2037. }
  2038. LowererMD::Legalize(instr);
  2039. }
  2040. #if !FLOATVAR
  2041. void
  2042. LowererMDArch::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  2043. {
  2044. // e1 = MOV e_src1
  2045. // e1 = SHL e1, Js::VarTag_Shift
  2046. // JO $ToVar
  2047. // JB $ToVar [isFromUint32]
  2048. // e1 = INC e1
  2049. // r_dst = MOVSXD e1
  2050. // JMP $done
  2051. // $ToVar:
  2052. // EmitLoadVarNoCheck
  2053. // $Done:
  2054. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  2055. Assert(instrLoad->GetDst()->GetType() == TyVar);
  2056. bool isInt = false;
  2057. bool isNotInt = false;
  2058. IR::Opnd *dst = instrLoad->GetDst();
  2059. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2060. IR::LabelInstr *toVar = nullptr;
  2061. IR::LabelInstr *done = nullptr;
  2062. // TODO: Fix bad lowering. We shouldn't get TyVars here.
  2063. // Assert(instrLoad->GetSrc1()->GetType() == TyInt32);
  2064. src1->SetType(TyInt32);
  2065. if (src1->IsTaggedInt())
  2066. {
  2067. isInt = true;
  2068. }
  2069. else if (src1->IsNotInt())
  2070. {
  2071. isNotInt = true;
  2072. }
  2073. if (!isNotInt)
  2074. {
  2075. // e1 = MOV e_src1
  2076. IR::RegOpnd *e1 = IR::RegOpnd::New(TyInt32, m_func);
  2077. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV, e1, instrLoad->GetSrc1(), m_func));
  2078. // e1 = SHL e1, Js::VarTag_Shift
  2079. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::SHL,
  2080. e1,
  2081. e1,
  2082. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, m_func), m_func));
  2083. if (!isInt)
  2084. {
  2085. // JO $ToVar
  2086. toVar = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2087. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, toVar, m_func));
  2088. if (isFromUint32)
  2089. {
  2090. // JB $ToVar [isFromUint32]
  2091. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JB, toVar, this->m_func));
  2092. }
  2093. }
  2094. // e1 = INC e1
  2095. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::INC, e1, e1, m_func));
  2096. // dst = MOVSXD e1
  2097. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOVSXD, dst, e1, m_func));
  2098. if (!isInt)
  2099. {
  2100. // JMP $done
  2101. done = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  2102. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2103. }
  2104. }
  2105. IR::Instr *insertInstr = instrLoad;
  2106. if (!isInt)
  2107. {
  2108. // $toVar:
  2109. if (toVar)
  2110. {
  2111. instrLoad->InsertBefore(toVar);
  2112. }
  2113. // ToVar()
  2114. this->lowererMD->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper || toVar != nullptr);
  2115. }
  2116. if (done)
  2117. {
  2118. instrLoad->InsertAfter(done);
  2119. }
  2120. instrLoad->Remove();
  2121. }
  2122. #else
  2123. void
  2124. LowererMDArch::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  2125. {
  2126. // MOV_TRUNC e1, e_src1
  2127. // CMP e1, 0 [uint32]
  2128. // JLT $Helper [uint32] -- overflows?
  2129. // BTS r1, VarTag_Shift
  2130. // MOV r_dst, r1
  2131. // JMP $done [uint32]
  2132. // $helper [uint32]
  2133. // EmitLoadVarNoCheck
  2134. // $done [uint32]
  2135. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  2136. Assert(instrLoad->GetDst()->GetType() == TyVar);
  2137. bool isInt = false;
  2138. IR::Opnd *dst = instrLoad->GetDst();
  2139. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2140. IR::LabelInstr *labelHelper = nullptr;
  2141. // TODO: Fix bad lowering. We shouldn't get TyVars here.
  2142. // Assert(instrLoad->GetSrc1()->GetType() == TyInt32);
  2143. src1->SetType(TyInt32);
  2144. if (src1->IsTaggedInt())
  2145. {
  2146. isInt = true;
  2147. }
  2148. else if (src1->IsNotInt())
  2149. {
  2150. // ToVar()
  2151. this->lowererMD->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper);
  2152. return;
  2153. }
  2154. IR::RegOpnd *r1 = IR::RegOpnd::New(TyVar, m_func);
  2155. // e1 = MOV_TRUNC e_src1
  2156. // (Use MOV_TRUNC here as we rely on the register copy to clear the upper 32 bits.)
  2157. IR::RegOpnd *e1 = r1->Copy(m_func)->AsRegOpnd();
  2158. e1->SetType(TyInt32);
  2159. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC,
  2160. e1,
  2161. src1,
  2162. m_func));
  2163. if (!isInt && isFromUint32)
  2164. {
  2165. // CMP e1, 0
  2166. IR::Instr *instr = IR::Instr::New(Js::OpCode::CMP, m_func);
  2167. instr->SetSrc1(e1);
  2168. instr->SetSrc2(IR::IntConstOpnd::New(0, TyInt32, m_func));
  2169. instrLoad->InsertBefore(instr);
  2170. Assert(!labelHelper);
  2171. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2172. // JLT $helper
  2173. instr = IR::BranchInstr::New(Js::OpCode::JLT, labelHelper, m_func);
  2174. instrLoad->InsertBefore(instr);
  2175. }
  2176. // The previous operation clears the top 32 bits.
  2177. // BTS r1, VarTag_Shift
  2178. this->lowererMD->GenerateInt32ToVarConversion(r1, instrLoad);
  2179. // REVIEW: We need r1 only if we could generate sn = Ld_A_I4 sn. i.e. the destination and
  2180. // source are the same.
  2181. // r_dst = MOV r1
  2182. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV,
  2183. dst,
  2184. r1,
  2185. m_func));
  2186. if (labelHelper)
  2187. {
  2188. Assert(isFromUint32);
  2189. // JMP $done
  2190. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  2191. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, labelDone, m_func));
  2192. // $helper
  2193. instrLoad->InsertBefore(labelHelper);
  2194. // ToVar()
  2195. this->lowererMD->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, true);
  2196. // $done
  2197. instrLoad->InsertBefore(labelDone);
  2198. }
  2199. instrLoad->Remove();
  2200. }
  2201. #endif
  2202. void
  2203. LowererMDArch::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2204. {
  2205. Assert(dst->IsRegOpnd() && dst->IsFloat());
  2206. Assert(src->IsRegOpnd() && src->IsInt32());
  2207. if (dst->IsFloat64())
  2208. {
  2209. // Use MOVD to make sure we sign extended the 32-bit src
  2210. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOVD, dst, src, this->m_func));
  2211. // Convert to float
  2212. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTDQ2PD, dst, dst, this->m_func));
  2213. }
  2214. else
  2215. {
  2216. Assert(dst->IsFloat32());
  2217. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTSI2SS, dst, src, this->m_func));
  2218. }
  2219. }
  2220. void
  2221. LowererMDArch::EmitIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2222. {
  2223. Assert(dst->IsRegOpnd() && dst->IsInt64());
  2224. Assert(src->IsInt32());
  2225. Lowerer::InsertMove(dst, src, instrInsert);
  2226. }
  2227. void
  2228. LowererMDArch::EmitUIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2229. {
  2230. Assert(dst->IsRegOpnd() && dst->IsInt64());
  2231. Assert(src->IsUInt32());
  2232. Lowerer::InsertMove(dst, src, instrInsert);
  2233. }
  2234. void
  2235. LowererMDArch::EmitLongToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2236. {
  2237. Assert(dst->IsRegOpnd() && dst->IsInt32());
  2238. Assert(src->IsInt64());
  2239. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC, dst, src, instrInsert->m_func));
  2240. }
  2241. void
  2242. LowererMDArch::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2243. {
  2244. Assert(dst->IsRegOpnd() && dst->IsFloat());
  2245. Assert(src->IsRegOpnd() && (src->IsInt32() || src->IsUInt32()));
  2246. // MOV tempReg.i32, src - make sure the top bits are 0
  2247. IR::RegOpnd * tempReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2248. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC, tempReg, src, this->m_func));
  2249. // CVTSI2SD dst, tempReg.i64 (Use the tempreg as if it is 64 bit without sign extension)
  2250. instrInsert->InsertBefore(IR::Instr::New(dst->IsFloat64() ? Js::OpCode::CVTSI2SD : Js::OpCode::CVTSI2SS, dst,
  2251. tempReg->UseWithNewType(TyInt64, this->m_func), this->m_func));
  2252. }
  2253. bool
  2254. LowererMDArch::EmitLoadInt32(IR::Instr *instrLoad, bool conversionFromObjectAllowed, bool bailOutOnHelper, IR::LabelInstr * labelBailOut)
  2255. {
  2256. //
  2257. // r1 = MOV src1
  2258. // rtest = MOV src1
  2259. // SHR rtest, AtomTag_Shift
  2260. // CMP rtest, 1
  2261. // JNE $helper or $float
  2262. // r_dst = MOV_TRUNC e_src1
  2263. // JMP $done
  2264. // $float:
  2265. // dst = ConvertToFloat(r1, $helper)
  2266. // $helper:
  2267. // r_dst = ToInt32()
  2268. //
  2269. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  2270. Assert(instrLoad->GetSrc1()->GetType() == TyVar);
  2271. // TODO: Fix bad lowering. We shouldn't see TyVars here.
  2272. // Assert(instrLoad->GetDst()->GetType() == TyInt32);
  2273. bool isInt = false;
  2274. bool isNotInt = false;
  2275. IR::Opnd *dst = instrLoad->GetDst();
  2276. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2277. IR::LabelInstr *helper = nullptr;
  2278. IR::LabelInstr *labelFloat = nullptr;
  2279. IR::LabelInstr *done = nullptr;
  2280. if (src1->IsTaggedInt())
  2281. {
  2282. isInt = true;
  2283. }
  2284. else if (src1->IsNotInt())
  2285. {
  2286. isNotInt = true;
  2287. }
  2288. if (src1->IsEqual(instrLoad->GetDst()) == false)
  2289. {
  2290. // r1 = MOV src1
  2291. IR::RegOpnd *r1 = IR::RegOpnd::New(TyVar, instrLoad->m_func);
  2292. r1->SetValueType(src1->GetValueType());
  2293. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r1, src1, instrLoad->m_func));
  2294. src1 = r1;
  2295. }
  2296. const ValueType src1ValueType(src1->GetValueType());
  2297. const bool doFloatToIntFastPath =
  2298. (src1ValueType.IsLikelyFloat() || src1ValueType.IsLikelyUntaggedInt()) &&
  2299. !(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger));
  2300. if (isNotInt)
  2301. {
  2302. // Known to be non-integer. If we are required to bail out on helper call, just re-jit.
  2303. if (!doFloatToIntFastPath && bailOutOnHelper)
  2304. {
  2305. if(!GlobOpt::DoEliminateArrayAccessHelperCall(this->m_func))
  2306. {
  2307. // Array access helper call removal is already off for some reason. Prevent trying to rejit again
  2308. // because it won't help and the same thing will happen again. Just abort jitting this function.
  2309. if(PHASE_TRACE(Js::BailOutPhase, this->m_func))
  2310. {
  2311. Output::Print(_u(" Aborting JIT because EliminateArrayAccessHelperCall is already off\n"));
  2312. Output::Flush();
  2313. }
  2314. throw Js::OperationAbortedException();
  2315. }
  2316. throw Js::RejitException(RejitReason::ArrayAccessHelperCallEliminationDisabled);
  2317. }
  2318. }
  2319. else
  2320. {
  2321. // It could be an integer in this case.
  2322. if (!isInt)
  2323. {
  2324. if(doFloatToIntFastPath)
  2325. {
  2326. labelFloat = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, false);
  2327. }
  2328. else
  2329. {
  2330. helper = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, true);
  2331. }
  2332. this->lowererMD->GenerateSmIntTest(src1, instrLoad, labelFloat ? labelFloat : helper);
  2333. }
  2334. IR::RegOpnd *src132 = src1->UseWithNewType(TyInt32, instrLoad->m_func)->AsRegOpnd();
  2335. #if !INT32VAR
  2336. // src1 = SAR src1, VarTag_Shift
  2337. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::SAR,
  2338. src132,
  2339. src132,
  2340. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrLoad->m_func),
  2341. instrLoad->m_func));
  2342. // r_dst = MOV src1
  2343. // This is only a MOV (and not a MOVSXD) because we do a signed shift right, but we'll copy
  2344. // all 64 bits.
  2345. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV,
  2346. dst->UseWithNewType(TyMachReg, instrLoad->m_func),
  2347. src1,
  2348. instrLoad->m_func));
  2349. #else
  2350. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC,
  2351. dst->UseWithNewType(TyInt32, instrLoad->m_func),
  2352. src132,
  2353. instrLoad->m_func));
  2354. #endif
  2355. if (!isInt)
  2356. {
  2357. // JMP $done
  2358. done = instrLoad->GetOrCreateContinueLabel();
  2359. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2360. }
  2361. }
  2362. if (!isInt)
  2363. {
  2364. if(doFloatToIntFastPath)
  2365. {
  2366. if(labelFloat)
  2367. {
  2368. instrLoad->InsertBefore(labelFloat);
  2369. }
  2370. if(!helper)
  2371. {
  2372. helper = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, true);
  2373. }
  2374. if(!done)
  2375. {
  2376. done = instrLoad->GetOrCreateContinueLabel();
  2377. }
  2378. #if FLOATVAR
  2379. IR::RegOpnd* floatOpnd = this->lowererMD->CheckFloatAndUntag(src1, instrLoad, helper);
  2380. #else
  2381. this->lowererMD->GenerateFloatTest(src1, instrLoad, helper, instrLoad->HasBailOutInfo());
  2382. IR::IndirOpnd* floatOpnd = IR::IndirOpnd::New(src1, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  2383. #endif
  2384. this->lowererMD->ConvertFloatToInt32(instrLoad->GetDst(), floatOpnd, helper, done, instrLoad);
  2385. }
  2386. // $helper:
  2387. if (helper)
  2388. {
  2389. instrLoad->InsertBefore(helper);
  2390. }
  2391. if(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger))
  2392. {
  2393. // Avoid bailout if we have a JavascriptNumber whose value is a signed 32-bit integer
  2394. lowererMD->m_lowerer->LoadInt32FromUntaggedVar(instrLoad);
  2395. // Need to bail out instead of calling a helper
  2396. return true;
  2397. }
  2398. if (bailOutOnHelper)
  2399. {
  2400. Assert(labelBailOut);
  2401. lowererMD->m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrLoad);
  2402. instrLoad->Remove();
  2403. }
  2404. else if (conversionFromObjectAllowed)
  2405. {
  2406. lowererMD->m_lowerer->LowerUnaryHelperMem(instrLoad, IR::HelperConv_ToInt32);
  2407. }
  2408. else
  2409. {
  2410. lowererMD->m_lowerer->LowerUnaryHelperMemWithBoolReference(instrLoad, IR::HelperConv_ToInt32_NoObjects, true /*useBoolForBailout*/);
  2411. }
  2412. }
  2413. else
  2414. {
  2415. instrLoad->Remove();
  2416. }
  2417. return false;
  2418. }
  2419. IR::Instr *
  2420. LowererMDArch::LoadCheckedFloat(IR::RegOpnd *opndOrig, IR::RegOpnd *opndFloat, IR::LabelInstr *labelInline, IR::LabelInstr *labelHelper, IR::Instr *instrInsert, const bool checkForNullInLoopBody)
  2421. {
  2422. //
  2423. // if (TaggedInt::Is(opndOrig))
  2424. // opndFloat = CVTSI2SD opndOrig_32
  2425. // JMP $labelInline
  2426. // else
  2427. // JMP $labelOpndIsNotInt
  2428. //
  2429. // $labelOpndIsNotInt:
  2430. // if (TaggedFloat::Is(opndOrig))
  2431. // s2 = MOV opndOrig
  2432. // s2 = XOR FloatTag_Value
  2433. // opndFloat = MOVD s2
  2434. // else
  2435. // JMP $labelHelper
  2436. //
  2437. // $labelInline:
  2438. //
  2439. IR::Instr *instrFirst = nullptr;
  2440. IR::LabelInstr *labelOpndIsNotInt = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2441. lowererMD->GenerateSmIntTest(opndOrig, instrInsert, labelOpndIsNotInt, &instrFirst);
  2442. if (opndOrig->GetValueType().IsLikelyFloat())
  2443. {
  2444. // Make this path helper if value is likely a float
  2445. instrInsert->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true));
  2446. }
  2447. IR::Opnd *opndOrig_32 = opndOrig->UseWithNewType(TyInt32, this->m_func);
  2448. IR::Instr *cvtsi2sd = IR::Instr::New(Js::OpCode::CVTSI2SD, opndFloat, opndOrig_32, this->m_func);
  2449. instrInsert->InsertBefore(cvtsi2sd);
  2450. IR::Instr *jmpInline = IR::BranchInstr::New(Js::OpCode::JMP, labelInline, this->m_func);
  2451. instrInsert->InsertBefore(jmpInline);
  2452. instrInsert->InsertBefore(labelOpndIsNotInt);
  2453. lowererMD->GenerateFloatTest(opndOrig, instrInsert, labelHelper, checkForNullInLoopBody);
  2454. IR::RegOpnd *s2 = IR::RegOpnd::New(TyMachReg, this->m_func);
  2455. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, s2, opndOrig, this->m_func);
  2456. instrInsert->InsertBefore(mov);
  2457. IR::Instr *xorTag = IR::Instr::New(Js::OpCode::XOR,
  2458. s2,
  2459. s2,
  2460. IR::IntConstOpnd::New(Js::FloatTag_Value,
  2461. TyMachReg,
  2462. this->m_func,
  2463. /* dontEncode = */ true),
  2464. this->m_func);
  2465. instrInsert->InsertBefore(xorTag);
  2466. LowererMD::Legalize(xorTag);
  2467. IR::Instr *movFloat = IR::Instr::New(Js::OpCode::MOVD, opndFloat, s2, this->m_func);
  2468. instrInsert->InsertBefore(movFloat);
  2469. return instrFirst;
  2470. }
  2471. IR::LabelInstr *
  2472. LowererMDArch::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  2473. {
  2474. return exitTargetInstr;
  2475. }
  2476. bool LowererMDArch::GenerateFastAnd(IR::Instr * instrAnd)
  2477. {
  2478. return true;
  2479. }
  2480. bool LowererMDArch::GenerateFastDivAndRem_Signed(IR::Instr* instrDiv)
  2481. {
  2482. Assert(instrDiv->m_opcode == Js::OpCode::Div_I4 || instrDiv->m_opcode == Js::OpCode::Rem_I4);
  2483. IR::Opnd* divident = instrDiv->GetSrc1(); // nominator
  2484. IR::Opnd* divisor = instrDiv->GetSrc2(); // denominator
  2485. IR::Opnd* dst = instrDiv->GetDst();
  2486. int constDivisor = divisor->AsIntConstOpnd()->AsInt32();
  2487. IR::Opnd* result = IR::RegOpnd::New(TyInt32, this->m_func);
  2488. bool isNegDevisor = false;
  2489. Assert(divisor->GetType() == TyInt32); // constopnd->AsInt32() currently does silent casts between int32 and uint32
  2490. if (constDivisor < 0)
  2491. {
  2492. isNegDevisor = true;
  2493. constDivisor *= -1;
  2494. }
  2495. if (constDivisor <= 0 || constDivisor > INT32_MAX - 1)
  2496. {
  2497. return false;
  2498. }
  2499. if (constDivisor == 1)
  2500. {
  2501. // Wasm expects x/-1 not to be folded to -x.
  2502. if (m_func->GetJITFunctionBody()->IsWasmFunction() && isNegDevisor == true)
  2503. {
  2504. return false;
  2505. }
  2506. Lowerer::InsertMove(dst, divident, instrDiv);
  2507. }
  2508. else if (Math::IsPow2(constDivisor)) // Power of two
  2509. {
  2510. // Negative dividents needs the result incremented by 1
  2511. // Following sequence avoids branch
  2512. // For q = n/d and d = 2^k
  2513. // sar q n k-1 //2^(k-1) if n < 0 else 0
  2514. // shr q q 32-k
  2515. // add q q n
  2516. // sar q q k
  2517. int k = Math::Log2(constDivisor);
  2518. Lowerer::InsertShift(Js::OpCode::Shr_A, false, result, divident, IR::IntConstOpnd::New(k - 1, TyInt8, this->m_func), instrDiv);
  2519. Lowerer::InsertShift(Js::OpCode::ShrU_A, false, result, result, IR::IntConstOpnd::New(32 - k, TyInt8, this->m_func), instrDiv);
  2520. Lowerer::InsertAdd(false, result, result, divident, instrDiv);
  2521. Lowerer::InsertShift(Js::OpCode::Shr_A, false, dst, result, IR::IntConstOpnd::New(k, TyInt8, this->m_func), instrDiv);
  2522. }
  2523. else
  2524. {
  2525. // Ref: Warren's Hacker's Delight, Chapter 10
  2526. //
  2527. // For q = n/d where d is a signed constant
  2528. // Calculate magic_number (multiplier) and shift amounts (shiftAmt) and replace div with mul and shift
  2529. Js::NumberUtilities::DivMagicNumber magic_number(Js::NumberUtilities::GenerateDivMagicNumber(constDivisor));
  2530. int32 multiplier = magic_number.multiplier;
  2531. // Compute mulhs divident, multiplier
  2532. IR::Opnd* quotient64reg = IR::RegOpnd::New(TyInt64, this->m_func);
  2533. IR::Opnd* divident64Reg = IR::RegOpnd::New(TyInt64, this->m_func);
  2534. Lowerer::InsertMove(divident64Reg, divident, instrDiv);
  2535. IR::Instr* imul = IR::Instr::New(LowererMD::MDImulOpcode, quotient64reg, IR::IntConstOpnd::New(multiplier, TyInt32, this->m_func), divident64Reg, this->m_func);
  2536. instrDiv->InsertBefore(imul);
  2537. LowererMD::Legalize(imul);
  2538. Lowerer::InsertShift(Js::OpCode::Shr_A, false, quotient64reg, quotient64reg, IR::IntConstOpnd::New(32, TyInt8, this->m_func), instrDiv);
  2539. Lowerer::InsertMove(result, quotient64reg, instrDiv);
  2540. // Special handling when divisor is of type 5 and 7.
  2541. if (multiplier < 0)
  2542. {
  2543. Lowerer::InsertAdd(false, result, result, divident, instrDiv);
  2544. }
  2545. if (magic_number.shiftAmt > 0)
  2546. {
  2547. Lowerer::InsertShift(Js::OpCode::Shr_A, false, result, result, IR::IntConstOpnd::New(magic_number.shiftAmt, TyInt8, this->m_func), instrDiv);
  2548. }
  2549. IR::Opnd* tmpReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2550. Lowerer::InsertMove(tmpReg2, divident, instrDiv);
  2551. // Add 1 if divisor is less than 0
  2552. Lowerer::InsertShift(Js::OpCode::ShrU_A, false, tmpReg2, tmpReg2, IR::IntConstOpnd::New(31, TyInt8, this->m_func), instrDiv); // 1 if divident < 0, 0 otherwise
  2553. Lowerer::InsertAdd(false, dst, result, tmpReg2, instrDiv);
  2554. }
  2555. // Negate results if divident is less than zero
  2556. if (isNegDevisor)
  2557. {
  2558. Lowerer::InsertSub(false, dst, IR::IntConstOpnd::New(0, TyInt8, this->m_func), dst, instrDiv);
  2559. }
  2560. return true;
  2561. }
  2562. bool LowererMDArch::GenerateFastDivAndRem_Unsigned(IR::Instr* instrDiv)
  2563. {
  2564. Assert(instrDiv->m_opcode == Js::OpCode::DivU_I4 || instrDiv->m_opcode == Js::OpCode::RemU_I4);
  2565. IR::Opnd* divident = instrDiv->GetSrc1(); // nominator
  2566. IR::Opnd* divisor = instrDiv->GetSrc2(); // denominator
  2567. IR::Opnd* dst = instrDiv->GetDst();
  2568. uint constDivisor = divisor->AsIntConstOpnd()->AsUint32();
  2569. Assert(divisor->GetType() == TyUint32); // IR::Opnd->AsInt32() and IR::Opnd->AsUint32() allows silent casts.
  2570. if (constDivisor <= 0 || constDivisor > UINT32_MAX - 1)
  2571. {
  2572. return false;
  2573. }
  2574. if (constDivisor == 1)
  2575. {
  2576. Lowerer::InsertMove(dst, divident, instrDiv);
  2577. }
  2578. else if (Math::IsPow2(constDivisor)) // Power of two
  2579. {
  2580. int k = Math::Log2(constDivisor);
  2581. Lowerer::InsertShift(Js::OpCode::ShrU_A, false, dst, divident, IR::IntConstOpnd::New(k, TyInt8, this->m_func), instrDiv);
  2582. }
  2583. else
  2584. {
  2585. // Ref: Warren's Hacker's Delight, Chapter 10
  2586. //
  2587. // For q = n/d where d is an unsigned constant
  2588. // Calculate magic_number (multiplier) and shift amounts (shiftAmt) and replace div with mul and shift
  2589. Js::NumberUtilities::DivMagicNumber magic_number(Js::NumberUtilities::GenerateDivMagicNumber(constDivisor));
  2590. uint multiplier = magic_number.multiplier;
  2591. int addIndicator = magic_number.addIndicator;
  2592. IR::Opnd* quotient64Reg = IR::RegOpnd::New(TyUint64, this->m_func);
  2593. IR::Opnd* multiplierReg = IR::RegOpnd::New(TyUint64, this->m_func);
  2594. Lowerer::InsertMove(multiplierReg, IR::IntConstOpnd::New(multiplier, TyInt64, this->m_func), instrDiv);
  2595. IR::Instr* imul = IR::Instr::New(LowererMD::MDImulOpcode, quotient64Reg, divident, multiplierReg, this->m_func);
  2596. instrDiv->InsertBefore(imul);
  2597. LowererMD::Legalize(imul);
  2598. if (!addIndicator) // Simple case type 3, 5..
  2599. {
  2600. Lowerer::InsertShift(Js::OpCode::ShrU_A, false, quotient64Reg, quotient64Reg, IR::IntConstOpnd::New(32 + magic_number.shiftAmt, TyInt8, this->m_func), instrDiv);
  2601. Lowerer::InsertMove(dst, quotient64Reg, instrDiv);
  2602. }
  2603. else // Special case type 7..
  2604. {
  2605. IR::Opnd* tmpReg = IR::RegOpnd::New(TyUint32, this->m_func);
  2606. Lowerer::InsertMove(dst, divident, instrDiv);
  2607. Lowerer::InsertShift(Js::OpCode::ShrU_A, false, quotient64Reg, quotient64Reg, IR::IntConstOpnd::New(32, TyInt8, this->m_func), instrDiv);
  2608. Lowerer::InsertMove(tmpReg, quotient64Reg, instrDiv);
  2609. Lowerer::InsertSub(false, dst, dst, tmpReg, instrDiv);
  2610. Lowerer::InsertShift(Js::OpCode::ShrU_A, false, dst, dst, IR::IntConstOpnd::New(1, TyInt8, this->m_func), instrDiv);
  2611. Lowerer::InsertAdd(false, dst, dst, tmpReg, instrDiv);
  2612. Lowerer::InsertShift(Js::OpCode::ShrU_A, false, dst, dst, IR::IntConstOpnd::New(magic_number.shiftAmt-1, TyInt8, this->m_func), instrDiv);
  2613. }
  2614. }
  2615. return true;
  2616. }
  2617. bool LowererMDArch::GenerateFastDivAndRem(IR::Instr* instrDiv, IR::LabelInstr* bailOutLabel)
  2618. {
  2619. Assert(instrDiv);
  2620. IR::Opnd* divident = instrDiv->GetSrc1(); // nominator
  2621. IR::Opnd* divisor = instrDiv->GetSrc2(); // denominator
  2622. IR::Opnd* dst = instrDiv->GetDst();
  2623. if (divident->GetType() != TyInt32 && divident->GetType() != TyUint32)
  2624. {
  2625. return false;
  2626. }
  2627. if (divident->IsRegOpnd() && divident->AsRegOpnd()->IsSameRegUntyped(dst))
  2628. {
  2629. if (instrDiv->m_opcode == Js::OpCode::Rem_I4 || instrDiv->m_opcode == Js::OpCode::RemU_I4 || bailOutLabel)
  2630. {
  2631. divident = IR::RegOpnd::New(TyInt32, instrDiv->m_func);
  2632. Lowerer::InsertMove(divident, instrDiv->GetSrc1(), instrDiv);
  2633. }
  2634. }
  2635. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || !divisor->IsIntConstOpnd())
  2636. {
  2637. return false;
  2638. }
  2639. bool success = false;
  2640. if (instrDiv->m_opcode == Js::OpCode::DivU_I4 || instrDiv->m_opcode == Js::OpCode::RemU_I4)
  2641. {
  2642. success = GenerateFastDivAndRem_Unsigned(instrDiv);
  2643. }
  2644. else if (instrDiv->m_opcode == Js::OpCode::Div_I4 || instrDiv->m_opcode == Js::OpCode::Rem_I4)
  2645. {
  2646. success = GenerateFastDivAndRem_Signed(instrDiv);
  2647. }
  2648. if (!success)
  2649. {
  2650. return false;
  2651. }
  2652. // For reminder/mod ops
  2653. if (instrDiv->m_opcode == Js::OpCode::Rem_I4 || instrDiv->m_opcode == Js::OpCode::RemU_I4 || bailOutLabel)
  2654. {
  2655. // For q = n/d
  2656. // mul dst, dst, divident
  2657. // sub dst, divident, dst
  2658. IR::Opnd* reminderOpnd = dst;;
  2659. if (bailOutLabel)
  2660. {
  2661. reminderOpnd = IR::RegOpnd::New(TyInt32, instrDiv->m_func);
  2662. }
  2663. IR::Instr* imul = IR::Instr::New(LowererMD::MDImulOpcode, reminderOpnd, dst, divisor, instrDiv->m_func);
  2664. instrDiv->InsertBefore(imul);
  2665. LowererMD::Legalize(imul);
  2666. Lowerer::InsertSub(false, reminderOpnd, divident, reminderOpnd, instrDiv);
  2667. if (bailOutLabel)
  2668. {
  2669. Lowerer::InsertTestBranch(reminderOpnd, reminderOpnd, Js::OpCode::BrNeq_A, bailOutLabel, instrDiv);
  2670. }
  2671. }
  2672. // DIV/REM has been optimized and can be removed now.
  2673. instrDiv->Remove();
  2674. return true;
  2675. }
  2676. bool LowererMDArch::GenerateFastXor(IR::Instr * instrXor)
  2677. {
  2678. return true;
  2679. }
  2680. bool LowererMDArch::GenerateFastOr(IR::Instr * instrOr)
  2681. {
  2682. return true;
  2683. }
  2684. bool LowererMDArch::GenerateFastNot(IR::Instr * instrNot)
  2685. {
  2686. return true;
  2687. }
  2688. bool LowererMDArch::GenerateFastShiftLeft(IR::Instr * instrShift)
  2689. {
  2690. return true;
  2691. }
  2692. bool LowererMDArch::GenerateFastShiftRight(IR::Instr * instrShift)
  2693. {
  2694. // Given:
  2695. //
  2696. // dst = Shr/ShrU src1, src2
  2697. //
  2698. // Generate:
  2699. //
  2700. // (If not 2 Int31's, jump to $helper.)
  2701. // s1 = MOV src1
  2702. //RCX = MOV src2
  2703. // TEST RCX, 0x1F [unsigned only] // Bail if unsigned and not shifting,
  2704. // JEQ $helper [unsigned only] // as we may not end up with a taggable int
  2705. // s1 = SAR/SHR s1, RCX
  2706. // BTS s1, VarTag_Shift
  2707. //dst = MOV s1
  2708. // JMP $fallthru
  2709. // $helper:
  2710. // (caller generates helper call)
  2711. // $fallthru:
  2712. IR::Instr * instr;
  2713. IR::LabelInstr * labelHelper;
  2714. IR::LabelInstr * labelFallThru;
  2715. IR::Opnd * opndReg;
  2716. IR::Opnd * opndSrc1;
  2717. IR::Opnd * opndSrc2;
  2718. Assert(instrShift->m_opcode == Js::OpCode::ShrU_A || instrShift->m_opcode == Js::OpCode::Shr_A);
  2719. bool isUnsigned = (instrShift->m_opcode == Js::OpCode::ShrU_A);
  2720. opndSrc1 = instrShift->GetSrc1();
  2721. opndSrc2 = instrShift->GetSrc2();
  2722. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Add instruction");
  2723. // Not int?
  2724. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2725. {
  2726. return true;
  2727. }
  2728. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2729. {
  2730. return true;
  2731. }
  2732. // Tagged ints?
  2733. bool isTaggedInts = false;
  2734. if (opndSrc1->IsTaggedInt())
  2735. {
  2736. if (opndSrc2->IsTaggedInt())
  2737. {
  2738. isTaggedInts = true;
  2739. }
  2740. }
  2741. IntConstType s2Value = 0;
  2742. bool src2IsIntConst = false;
  2743. if (isUnsigned)
  2744. {
  2745. if (opndSrc2->IsRegOpnd())
  2746. {
  2747. src2IsIntConst = opndSrc2->AsRegOpnd()->m_sym->IsTaggableIntConst();
  2748. if (src2IsIntConst)
  2749. {
  2750. s2Value = opndSrc2->AsRegOpnd()->m_sym->GetIntConstValue();
  2751. }
  2752. }
  2753. else
  2754. {
  2755. AssertMsg(opndSrc2->IsAddrOpnd() && Js::TaggedInt::Is(opndSrc2->AsAddrOpnd()->m_address),
  2756. "Expect src2 of shift right to be reg or Var.");
  2757. src2IsIntConst = true;
  2758. s2Value = Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address);
  2759. }
  2760. // 32-bit Shifts only uses the bottom 5 bits.
  2761. s2Value &= 0x1F;
  2762. // Unsigned shift by 0 could yield a value not encodable as a tagged int.
  2763. if (isUnsigned && src2IsIntConst && s2Value == 0)
  2764. {
  2765. return true;
  2766. }
  2767. }
  2768. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2769. if (!isTaggedInts)
  2770. {
  2771. // (If not 2 Int31's, jump to $helper.)
  2772. this->lowererMD->GenerateSmIntPairTest(instrShift, opndSrc1, opndSrc2, labelHelper);
  2773. }
  2774. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2775. if (src2IsIntConst)
  2776. {
  2777. opndSrc2 = IR::IntConstOpnd::New(s2Value, TyInt32, this->m_func);
  2778. }
  2779. else
  2780. {
  2781. // RCX = MOV src2
  2782. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2783. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2784. opndReg->AsRegOpnd()->SetReg(this->GetRegShiftCount());
  2785. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc2, this->m_func);
  2786. instrShift->InsertBefore(instr);
  2787. opndSrc2 = opndReg;
  2788. }
  2789. if (!src2IsIntConst && isUnsigned)
  2790. {
  2791. // TEST RCX, 0x1F [unsigned only] // Bail if unsigned and not shifting,
  2792. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2793. instr->SetSrc1(opndSrc2);
  2794. instr->SetSrc2(IR::IntConstOpnd::New(0x1F, TyInt32, this->m_func));
  2795. instrShift->InsertBefore(instr);
  2796. // JEQ $helper [unsigned only] // as we may not end up with a taggable int
  2797. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  2798. instrShift->InsertBefore(instr);
  2799. }
  2800. // s1 = MOV src1
  2801. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2802. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2803. instrShift->InsertBefore(instr);
  2804. // s1 = SAR/SHR s1, RCX
  2805. instr = IR::Instr::New(isUnsigned ? Js::OpCode::SHR : Js::OpCode::SAR, opndReg, opndReg, opndSrc2, this->m_func);
  2806. instrShift->InsertBefore(instr);
  2807. //
  2808. // Convert TyInt32 operand, back to TyMachPtr type.
  2809. //
  2810. if(TyMachReg != opndReg->GetType())
  2811. {
  2812. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2813. }
  2814. // BTS s1, VarTag_Shift
  2815. this->lowererMD->GenerateInt32ToVarConversion(opndReg, instrShift);
  2816. // dst = MOV s1
  2817. instr = IR::Instr::New(Js::OpCode::MOV, instrShift->GetDst(), opndReg, this->m_func);
  2818. instrShift->InsertBefore(instr);
  2819. // JMP $fallthru
  2820. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2821. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2822. instrShift->InsertBefore(instr);
  2823. // $helper:
  2824. // (caller generates helper call)
  2825. // $fallthru:
  2826. instrShift->InsertBefore(labelHelper);
  2827. instrShift->InsertAfter(labelFallThru);
  2828. return true;
  2829. }
  2830. void
  2831. LowererMDArch::FinalLower()
  2832. {
  2833. IR::IntConstOpnd *intOpnd;
  2834. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, this->m_func->m_tailInstr, this->m_func->m_headInstr)
  2835. {
  2836. switch (instr->m_opcode)
  2837. {
  2838. case Js::OpCode::Ret:
  2839. instr->Remove();
  2840. break;
  2841. case Js::OpCode::LdArgSize:
  2842. Assert(this->m_func->HasTry());
  2843. instr->m_opcode = Js::OpCode::MOV;
  2844. intOpnd = IR::IntConstOpnd::New(this->m_func->GetArgsSize(), TyUint32, this->m_func);
  2845. instr->SetSrc1(intOpnd);
  2846. LowererMD::Legalize(instr);
  2847. break;
  2848. case Js::OpCode::LdSpillSize:
  2849. Assert(this->m_func->HasTry());
  2850. instr->m_opcode = Js::OpCode::MOV;
  2851. intOpnd = IR::IntConstOpnd::New(this->m_func->GetSpillSize(), TyUint32, this->m_func);
  2852. instr->SetSrc1(intOpnd);
  2853. LowererMD::Legalize(instr);
  2854. break;
  2855. case Js::OpCode::Leave:
  2856. Assert(this->m_func->DoOptimizeTry() && !this->m_func->IsLoopBodyInTry());
  2857. instrPrev = this->lowererMD->m_lowerer->LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), true /*fromFinalLower*/);
  2858. break;
  2859. case Js::OpCode::CMOVA:
  2860. case Js::OpCode::CMOVAE:
  2861. case Js::OpCode::CMOVB:
  2862. case Js::OpCode::CMOVBE:
  2863. case Js::OpCode::CMOVE:
  2864. case Js::OpCode::CMOVG:
  2865. case Js::OpCode::CMOVGE:
  2866. case Js::OpCode::CMOVL:
  2867. case Js::OpCode::CMOVLE:
  2868. case Js::OpCode::CMOVNE:
  2869. case Js::OpCode::CMOVNO:
  2870. case Js::OpCode::CMOVNP:
  2871. case Js::OpCode::CMOVNS:
  2872. case Js::OpCode::CMOVO:
  2873. case Js::OpCode::CMOVP:
  2874. case Js::OpCode::CMOVS:
  2875. // Get rid of fake src1.
  2876. if (instr->GetSrc2())
  2877. {
  2878. // CMOV inserted before regalloc have a dummy src1 to simulate the fact that
  2879. // CMOV is not a definite def of the dst.
  2880. instr->SwapOpnds();
  2881. instr->FreeSrc2();
  2882. }
  2883. break;
  2884. case Js::OpCode::LOCKCMPXCHG8B:
  2885. case Js::OpCode::CMPXCHG8B:
  2886. // Get rid of the deps and srcs
  2887. instr->FreeDst();
  2888. instr->FreeSrc2();
  2889. break;
  2890. }
  2891. } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  2892. }
  2893. IR::Opnd*
  2894. LowererMDArch::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  2895. {
  2896. return this->lowererMD->m_lowerer->GenerateArgOutForStackArgs(callInstr, stackArgsInstr);
  2897. }
  2898. void
  2899. LowererMDArch::LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  2900. {
  2901. this->lowererMD->m_lowerer->LowerInlineSpreadArgOutLoopUsingRegisters(callInstr, indexOpnd, arrayElementsStartOpnd);
  2902. }
  2903. IR::Instr *
  2904. LowererMDArch::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  2905. {
  2906. IR::RegOpnd *retReg = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  2907. // Load the continuation address into the return register.
  2908. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, retReg, targetOpnd, this->m_func));
  2909. // MOV REG_EH_SPILL_SIZE, spillSize
  2910. IR::Instr *movSpillSize = IR::Instr::New(Js::OpCode::LdSpillSize,
  2911. IR::RegOpnd::New(nullptr, REG_EH_SPILL_SIZE, TyMachReg, m_func),
  2912. m_func);
  2913. insertBeforeInstr->InsertBefore(movSpillSize);
  2914. // MOV REG_EH_ARGS_SIZE, argsSize
  2915. IR::Instr *movArgsSize = IR::Instr::New(Js::OpCode::LdArgSize,
  2916. IR::RegOpnd::New(nullptr, REG_EH_ARGS_SIZE, TyMachReg, m_func),
  2917. m_func);
  2918. insertBeforeInstr->InsertBefore(movArgsSize);
  2919. // MOV REG_EH_TARGET, amd64_ReturnFromCallWithFakeFrame
  2920. // PUSH REG_EH_TARGET
  2921. // RET
  2922. IR::Opnd *endCallWithFakeFrame = endCallWithFakeFrame =
  2923. IR::RegOpnd::New(nullptr, REG_EH_TARGET, TyMachReg, m_func);
  2924. IR::Instr *movTarget = IR::Instr::New(Js::OpCode::MOV,
  2925. endCallWithFakeFrame,
  2926. IR::HelperCallOpnd::New(IR::HelperOp_ReturnFromCallWithFakeFrame, m_func),
  2927. m_func);
  2928. insertBeforeInstr->InsertBefore(movTarget);
  2929. IR::Instr *push = IR::Instr::New(Js::OpCode::PUSH, m_func);
  2930. push->SetSrc1(endCallWithFakeFrame);
  2931. insertBeforeInstr->InsertBefore(push);
  2932. #if 0
  2933. // TODO: This block gets deleted if we emit a JMP instead of a RET.
  2934. IR::BranchInstr *jmp = IR::BranchInstr::New(Js::OpCode::JMP,
  2935. nullptr,
  2936. targetOpnd,
  2937. m_func);
  2938. leaveInstr->InsertBefore(jmp);
  2939. #endif
  2940. IR::IntConstOpnd *intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  2941. IR::Instr * retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  2942. retInstr->SetSrc1(intSrc);
  2943. retInstr->SetSrc2(retReg);
  2944. insertBeforeInstr->InsertBefore(retInstr);
  2945. // return the last instruction inserted
  2946. return retInstr;
  2947. }
  2948. IR::BranchInstr*
  2949. LowererMDArch::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr)
  2950. {
  2951. Assert(compareSrc->IsFloat64() && missingItemOpnd->IsUint64());
  2952. IR::Opnd * compareSrcUint64Opnd = IR::RegOpnd::New(TyUint64, m_func);
  2953. if (compareSrc->IsRegOpnd())
  2954. {
  2955. this->lowererMD->EmitReinterpretPrimitive(compareSrcUint64Opnd, compareSrc, insertBeforeInstr);
  2956. }
  2957. else if (compareSrc->IsIndirOpnd())
  2958. {
  2959. compareSrcUint64Opnd = compareSrc->UseWithNewType(TyUint64, m_func);
  2960. }
  2961. return this->lowererMD->m_lowerer->InsertCompareBranch(missingItemOpnd, compareSrcUint64Opnd, opcode, target, insertBeforeInstr);
  2962. }