LowererMDArch.cpp 125 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "BackEnd.h"
  6. #include "LowererMDArch.h"
  7. #include "Library\JavascriptGeneratorFunction.h"
  8. const Js::OpCode LowererMD::MDExtend32Opcode = Js::OpCode::MOV;
  9. BYTE
  10. LowererMDArch::GetDefaultIndirScale()
  11. {
  12. return IndirScale4;
  13. }
  14. RegNum
  15. LowererMDArch::GetRegShiftCount()
  16. {
  17. return RegECX;
  18. }
  19. RegNum
  20. LowererMDArch::GetRegReturn(IRType type)
  21. {
  22. return ( IRType_IsFloat(type) || IRType_IsSimd128(type) ) ? RegNOREG : RegEAX;
  23. }
  24. RegNum
  25. LowererMDArch::GetRegReturnAsmJs(IRType type)
  26. {
  27. if (IRType_IsFloat(type))
  28. {
  29. return RegXMM0;
  30. }
  31. else if (IRType_IsSimd128(type))
  32. {
  33. return RegXMM0;
  34. }
  35. else
  36. {
  37. return RegEAX;
  38. }
  39. }
  40. RegNum
  41. LowererMDArch::GetRegStackPointer()
  42. {
  43. return RegESP;
  44. }
  45. RegNum
  46. LowererMDArch::GetRegBlockPointer()
  47. {
  48. return RegEBP;
  49. }
  50. RegNum
  51. LowererMDArch::GetRegFramePointer()
  52. {
  53. return RegEBP;
  54. }
  55. RegNum
  56. LowererMDArch::GetRegChkStkParam()
  57. {
  58. return RegEAX;
  59. }
  60. RegNum
  61. LowererMDArch::GetRegIMulDestLower()
  62. {
  63. return RegEAX;
  64. }
  65. RegNum
  66. LowererMDArch::GetRegIMulHighDestLower()
  67. {
  68. return RegEDX;
  69. }
  70. RegNum
  71. LowererMDArch::GetRegArgI4(int32 argNum)
  72. {
  73. return RegNOREG;
  74. }
  75. RegNum
  76. LowererMDArch::GetRegArgR8(int32 argNum)
  77. {
  78. return RegNOREG;
  79. }
  80. Js::OpCode
  81. LowererMDArch::GetAssignOp(IRType type)
  82. {
  83. switch (type)
  84. {
  85. case TyFloat64:
  86. return Js::OpCode::MOVSD;
  87. case TyFloat32:
  88. return Js::OpCode::MOVSS;
  89. case TySimd128F4:
  90. case TySimd128I4:
  91. case TySimd128D2:
  92. return Js::OpCode::MOVUPS;
  93. default:
  94. return Js::OpCode::MOV;
  95. }
  96. }
  97. void
  98. LowererMDArch::Init(LowererMD *lowererMD)
  99. {
  100. this->lowererMD = lowererMD;
  101. this->helperCallArgsCount = 0;
  102. }
  103. ///----------------------------------------------------------------------------
  104. ///
  105. /// LowererMD::LoadInputParamPtr
  106. ///
  107. /// Load the address of the start of the passed-in parameters not including
  108. /// the this parameter.
  109. ///
  110. ///----------------------------------------------------------------------------
  111. IR::Instr *
  112. LowererMDArch::LoadInputParamPtr(IR::Instr *instrInsert, IR::RegOpnd *optionalDstOpnd /* = nullptr */)
  113. {
  114. if (this->m_func->GetJnFunction()->IsGenerator())
  115. {
  116. IR::RegOpnd * argPtrRegOpnd = Lowerer::LoadGeneratorArgsPtr(instrInsert);
  117. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(argPtrRegOpnd, 1 * MachPtr, TyMachPtr, this->m_func);
  118. IR::RegOpnd * dstOpnd = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachPtr, this->m_func);
  119. return Lowerer::InsertLea(dstOpnd, indirOpnd, instrInsert);
  120. }
  121. else
  122. {
  123. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  124. StackSym *paramSym = StackSym::New(TyVar, this->m_func);
  125. this->m_func->SetArgOffset(paramSym, 5 * MachPtr);
  126. IR::Instr *instr = this->lowererMD->LoadStackAddress(paramSym, optionalDstOpnd);
  127. instrInsert->InsertBefore(instr);
  128. return instr;
  129. }
  130. }
  131. IR::Instr *
  132. LowererMDArch::LoadStackArgPtr(IR::Instr * instrArgPtr)
  133. {
  134. // if (actual count >= formal count)
  135. // dst = ebp + 5 * sizeof(Var) -- point to the first input parameter after "this"
  136. // else
  137. // sub esp, (size of formals) -- we'll copy the input params to the callee frame, since the caller frame
  138. // doesn't have space for them all
  139. // dst = esp + 3 * sizeof(var) -- point to the location of the first input param (after "this")
  140. // within the area we just allocated on the callee frame
  141. IR::Instr * instrPrev = instrArgPtr;
  142. IR::LabelInstr * instrLabelExtra = nullptr;
  143. IR::Instr * instr;
  144. IR::Opnd * opnd;
  145. Js::ArgSlot formalsCount = this->m_func->GetInParamsCount();
  146. // Only need to check the number of actuals if there's at least 1 formal (plus "this")
  147. if (formalsCount > 1)
  148. {
  149. instrPrev = this->lowererMD->LoadInputParamCount(instrArgPtr);
  150. IR::Opnd * opndActuals = instrPrev->GetDst();
  151. IR::Opnd * opndFormals =
  152. IR::IntConstOpnd::New(formalsCount, TyMachReg, this->m_func);
  153. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  154. instr->SetSrc1(opndActuals);
  155. instr->SetSrc2(opndFormals);
  156. instrArgPtr->InsertBefore(instr);
  157. instrLabelExtra = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  158. instr = IR::BranchInstr::New(Js::OpCode::JB, instrLabelExtra, this->m_func);
  159. instrArgPtr->InsertBefore(instr);
  160. }
  161. // Modify the original instruction to load the addr of the input parameters on the caller's frame.
  162. instr = LoadInputParamPtr(instrArgPtr, instrArgPtr->UnlinkDst()->AsRegOpnd());
  163. instrArgPtr->Remove();
  164. instrArgPtr = instr;
  165. if (instrLabelExtra)
  166. {
  167. IR::LabelInstr *instrLabelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  168. instr = IR::BranchInstr::New(Js::OpCode::JMP, instrLabelDone, this->m_func);
  169. instrArgPtr->InsertAfter(instr);
  170. instr->InsertAfter(instrLabelExtra);
  171. instrLabelExtra->InsertAfter(instrLabelDone);
  172. // Allocate space on the callee's frame for a copy of the formals, plus the callee object pointer
  173. // and the callinfo.
  174. // Be sure to double-align the allocation.
  175. // REVIEW: Do we ever need to generate a chkstk call here?
  176. int formalsBytes = (formalsCount + 2) * sizeof(Js::Var);
  177. formalsBytes = Math::Align<size_t>(formalsBytes, MachStackAlignment);
  178. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  179. opnd = IR::IndirOpnd::New(espOpnd, -formalsBytes, TyMachReg, this->m_func);
  180. instr = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  181. instrLabelDone->InsertBefore(instr);
  182. // Result is the pointer to the address where we'll store the first input param
  183. // (after "this") in the callee's frame.
  184. opnd = IR::IndirOpnd::New(espOpnd, 3 * sizeof(Js::Var), TyMachReg, this->m_func);
  185. instr = IR::Instr::New(Js::OpCode::LEA, instrArgPtr->GetDst(), opnd, this->m_func);
  186. instrLabelDone->InsertBefore(instr);
  187. }
  188. return instrPrev;
  189. }
  190. ///----------------------------------------------------------------------------
  191. ///
  192. /// LowererMDArch::LoadHeapArguments
  193. ///
  194. /// Load the heap-based arguments object
  195. ///
  196. ///----------------------------------------------------------------------------
  197. IR::Instr *
  198. LowererMDArch::LoadHeapArguments(IR::Instr *instrArgs, bool force, IR::Opnd* opndInputParamCount)
  199. {
  200. ASSERT_INLINEE_FUNC(instrArgs);
  201. Func *func = instrArgs->m_func;
  202. IR::Instr * instrPrev = instrArgs->m_prev;
  203. if (!force && func->GetHasStackArgs() && this->m_func->GetHasStackArgs()) //both inlinee & inliner has stack args. We don't support other scenarios.
  204. {
  205. // The initial args slot value is zero. (TODO: it should be possible to dead-store the LdHeapArgs in this case.)
  206. instrArgs->m_opcode = Js::OpCode::MOV;
  207. instrArgs->ReplaceSrc1(IR::IntConstOpnd::New(0, TyMachReg, func));
  208. instrArgs->FreeSrc2();
  209. }
  210. else
  211. {
  212. // s7 = formals are let decls
  213. // s6 = memory context
  214. // s5 = array of property ID's
  215. // s4 = local frame instance
  216. // s3 = address of first actual argument (after "this")
  217. // s2 = actual argument count
  218. // s1 = current function
  219. // dst = JavascriptOperators::LoadHeapArguments(s1, s2, s3, s4, s5, s6, s7)
  220. // s7 = formals are let decls
  221. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(instrArgs->m_opcode == Js::OpCode::LdLetHeapArguments ? TRUE : FALSE, TyUint8, func));
  222. // s6 = memory context
  223. instrPrev = this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  224. // s5 = array of property ID's
  225. IR::Opnd *argArray = instrArgs->UnlinkSrc2();
  226. this->LoadHelperArgument(instrArgs, argArray);
  227. // s4 = local frame instance
  228. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  229. this->LoadHelperArgument(instrArgs, frameObj);
  230. if (func->IsInlinee())
  231. {
  232. /*
  233. * s3 = address of first actual argument (after "this").
  234. * Stack looks like arg 1 ('this') <-- low address
  235. * ...
  236. * arg N
  237. * arguments object
  238. * function object
  239. * argc <-- frameStartSym
  240. */
  241. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  242. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  243. IR::Instr *instr = this->lowererMD->LoadStackAddress(firstRealArgSlotSym);
  244. instrArgs->InsertBefore(instr);
  245. this->LoadHelperArgument(instrArgs, instr->GetDst());
  246. // s2 = actual argument count (without counting "this").
  247. instr = IR::Instr::New(Js::OpCode::MOV,
  248. IR::RegOpnd::New(TyMachReg, func),
  249. IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func),
  250. func);
  251. instrArgs->InsertBefore(instr);
  252. this->LoadHelperArgument(instrArgs, instr->GetDst());
  253. // s1 = current function.
  254. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  255. // Save the newly-created args object to its dedicated stack slot.
  256. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  257. instr = IR::Instr::New(Js::OpCode::MOV,
  258. argObjSlotOpnd,
  259. instrArgs->GetDst(),
  260. func);
  261. instrArgs->InsertAfter(instr);
  262. }
  263. else
  264. {
  265. // s3 = address of first actual argument (after "this")
  266. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  267. this->LoadHelperArgument(instrArgs, instr->GetDst());
  268. // s2 = actual argument count (without counting "this")
  269. if (opndInputParamCount == nullptr)
  270. {
  271. instr = this->lowererMD->LoadInputParamCount(instrArgs, -1);
  272. opndInputParamCount = instr->GetDst();
  273. }
  274. this->LoadHelperArgument(instrArgs, opndInputParamCount);
  275. // s1 = current function
  276. StackSym *paramSym = StackSym::New(TyMachReg, func);
  277. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  278. IR::Opnd *srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  279. if (this->m_func->GetJnFunction()->IsGenerator())
  280. {
  281. // the function object for generator calls is a GeneratorVirtualScriptFunction object
  282. // and we need to pass the real JavascriptGeneratorFunction object so grab it instead
  283. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
  284. LowererMD::CreateAssign(tmpOpnd, srcOpnd, instrArgs);
  285. srcOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
  286. }
  287. this->LoadHelperArgument(instrArgs, srcOpnd);
  288. // Save the newly-created args object to its dedicated stack slot.
  289. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  290. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  291. instrArgs->InsertAfter(instr);
  292. }
  293. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArguments);
  294. }
  295. return instrPrev;
  296. }
  297. ///----------------------------------------------------------------------------
  298. ///
  299. /// LowererMDArch::LoadHeapArgsCached
  300. ///
  301. /// Load the heap-based arguments object using a cached scope
  302. ///
  303. ///----------------------------------------------------------------------------
  304. IR::Instr *
  305. LowererMDArch::LoadHeapArgsCached(IR::Instr *instrArgs)
  306. {
  307. // s7 = formals are let decls
  308. // s6 = memory context
  309. // s5 = local frame instance
  310. // s4 = address of first actual argument (after "this")
  311. // s3 = formal argument count
  312. // s2 = actual argument count
  313. // s1 = current function
  314. // dst = JavascriptOperators::LoadArguments(s1, s2, s3, s4, s5, s6, s7)
  315. ASSERT_INLINEE_FUNC(instrArgs);
  316. Func *func = instrArgs->m_func;
  317. IR::Instr *instrPrev = instrArgs->m_prev;
  318. // s7 = formals are let decls
  319. IR::Opnd * formalsAreLetDecls = IR::IntConstOpnd::New((IntConstType)(instrArgs->m_opcode == Js::OpCode::LdLetHeapArgsCached), TyUint8, func);
  320. this->LoadHelperArgument(instrArgs, formalsAreLetDecls);
  321. // s6 = memory context
  322. this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  323. // s5 = local frame instance
  324. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  325. this->LoadHelperArgument(instrArgs, frameObj);
  326. if (func->IsInlinee())
  327. {
  328. // s4 = address of first actual argument (after "this")
  329. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  330. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  331. IR::Instr *instr = this->lowererMD->LoadStackAddress(firstRealArgSlotSym);
  332. instrArgs->InsertBefore(instr);
  333. this->LoadHelperArgument(instrArgs, instr->GetDst());
  334. // s3 = formal argument count (without counting "this")
  335. uint32 formalsCount = func->GetJnFunction()->GetInParamsCount() - 1;
  336. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyMachReg, func));
  337. // s2 = actual argument count (without counting "this").
  338. instr = IR::Instr::New(Js::OpCode::MOV,
  339. IR::RegOpnd::New(TyMachReg, func),
  340. IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func),
  341. func);
  342. instrArgs->InsertBefore(instr);
  343. this->LoadHelperArgument(instrArgs, instr->GetDst());
  344. // s1 = current function.
  345. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  346. // Save the newly-created args object to its dedicated stack slot.
  347. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  348. instr = IR::Instr::New(Js::OpCode::MOV,
  349. argObjSlotOpnd,
  350. instrArgs->GetDst(),
  351. func);
  352. instrArgs->InsertAfter(instr);
  353. }
  354. else
  355. {
  356. // s4 = address of first actual argument (after "this")
  357. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  358. this->LoadHelperArgument(instrArgs, instr->GetDst());
  359. // s3 = formal argument count (without counting "this")
  360. uint32 formalsCount = func->GetInParamsCount() - 1;
  361. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyMachReg, func));
  362. // s2 = actual argument count (without counting "this")
  363. instr = this->lowererMD->LoadInputParamCount(instrArgs);
  364. instr = IR::Instr::New(Js::OpCode::DEC, instr->GetDst(), instr->GetDst(), func);
  365. instrArgs->InsertBefore(instr);
  366. this->LoadHelperArgument(instrArgs, instr->GetDst());
  367. // s1 = current function
  368. StackSym *paramSym = StackSym::New(TyMachReg, func);
  369. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  370. IR::Opnd *srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  371. this->LoadHelperArgument(instrArgs, srcOpnd);
  372. // Save the newly-created args object to its dedicated stack slot.
  373. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  374. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  375. instrArgs->InsertAfter(instr);
  376. }
  377. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArgsCached);
  378. return instrPrev;
  379. }
  380. ///----------------------------------------------------------------------------
  381. ///
  382. /// LowererMDArch::LoadFuncExpression
  383. ///
  384. /// Load the function expression to src1 from [ebp + 8]
  385. ///
  386. ///----------------------------------------------------------------------------
  387. IR::Instr *
  388. LowererMDArch::LoadFuncExpression(IR::Instr *instrFuncExpr)
  389. {
  390. ASSERT_INLINEE_FUNC(instrFuncExpr);
  391. Func *func = instrFuncExpr->m_func;
  392. IR::Opnd *paramOpnd = nullptr;
  393. if (func->IsInlinee())
  394. {
  395. paramOpnd = func->GetInlineeFunctionObjectSlotOpnd();
  396. }
  397. else
  398. {
  399. //
  400. // dst = current function ([ebp + 8])
  401. //
  402. StackSym *paramSym = StackSym::New(TyMachReg, func);
  403. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  404. paramOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  405. }
  406. if (this->m_func->GetJnFunction()->IsGenerator())
  407. {
  408. // the function object for generator calls is a GeneratorVirtualScriptFunction object
  409. // and we need to return the real JavascriptGeneratorFunction object so grab it before
  410. // assigning to the dst
  411. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
  412. LowererMD::CreateAssign(tmpOpnd, paramOpnd, instrFuncExpr);
  413. paramOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
  414. }
  415. // mov dst, param
  416. instrFuncExpr->SetSrc1(paramOpnd);
  417. LowererMD::ChangeToAssign(instrFuncExpr);
  418. return instrFuncExpr;
  419. }
  420. //
  421. // Load the parameter in the first argument slot
  422. //
  423. IR::Instr *
  424. LowererMDArch::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * dst, ushort extraArgs)
  425. {
  426. // No need to do anything different for spread calls on x86 since we push args.
  427. IR::SymOpnd * argOpnd = IR::SymOpnd::New(this->m_func->m_symTable->GetArgSlotSym(1), TyVar, this->m_func);
  428. IR::Instr * argInstr = LowererMD::CreateAssign(argOpnd, dst, instr);
  429. return argInstr;
  430. }
  431. void
  432. LowererMDArch::GenerateFunctionObjectTest(IR::Instr * callInstr, IR::RegOpnd *functionObjOpnd, bool isHelper, IR::LabelInstr* continueAfterExLabel /* = nullptr */)
  433. {
  434. AssertMsg(!m_func->IsJitInDebugMode() || continueAfterExLabel, "When jit is in debug mode, continueAfterExLabel must be provided otherwise continue after exception may cause AV.");
  435. if (!functionObjOpnd->IsNotTaggedValue())
  436. {
  437. IR::Instr * insertBeforeInstr = callInstr;
  438. // Need check and error if we are calling a tagged int.
  439. if (!functionObjOpnd->IsTaggedInt())
  440. {
  441. // TEST s1, 1
  442. // JEQ $callLabel
  443. IR::LabelInstr * callLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func /*, isHelper*/);
  444. this->lowererMD->GenerateObjectTest(functionObjOpnd, callInstr, callLabel, true);
  445. #if DBG
  446. int count = 0;
  447. FOREACH_SLIST_ENTRY(IR::BranchInstr *, branchInstr, &callLabel->labelRefs)
  448. {
  449. branchInstr->m_isHelperToNonHelperBranch = true;
  450. count++;
  451. } NEXT_SLIST_ENTRY;
  452. Assert(count == 1);
  453. #endif
  454. callInstr->InsertBefore(callLabel);
  455. insertBeforeInstr = callLabel;
  456. }
  457. lowererMD->m_lowerer->GenerateRuntimeError(insertBeforeInstr, JSERR_NeedFunction);
  458. if (continueAfterExLabel)
  459. {
  460. // Under debugger the RuntimeError (exception) can be ignored, generate branch right after RunTimeError instr
  461. // to jmp to a safe place (which would normally be debugger bailout check).
  462. IR::BranchInstr* continueAfterEx = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueAfterExLabel, this->m_func);
  463. insertBeforeInstr->InsertBefore(continueAfterEx);
  464. }
  465. }
  466. }
  467. void
  468. LowererMDArch::LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  469. {
  470. Func *const func = callInstr->m_func;
  471. // Align frame
  472. IR::Instr *orInstr = IR::Instr::New(Js::OpCode::OR, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func), this->m_func);
  473. callInstr->InsertBefore(orInstr);
  474. IR::LabelInstr *startLoopLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  475. startLoopLabel->m_isLoopTop = true;
  476. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
  477. startLoopLabel->SetLoop(loop);
  478. loop->SetLoopTopInstr(startLoopLabel);
  479. loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  480. loop->regAlloc.liveOnBackEdgeSyms->Set(indexOpnd->m_sym->m_id);
  481. loop->regAlloc.liveOnBackEdgeSyms->Set(arrayElementsStartOpnd->m_sym->m_id);
  482. callInstr->InsertBefore(startLoopLabel);
  483. this->lowererMD->m_lowerer->InsertSub(false, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt8, func), callInstr);
  484. IR::IndirOpnd *elemPtrOpnd = IR::IndirOpnd::New(arrayElementsStartOpnd, indexOpnd, GetDefaultIndirScale(), TyMachPtr, func);
  485. // Generate argout for n+2 arg (skipping function object + this)
  486. IR::Instr *argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  487. argout->SetSrc1(elemPtrOpnd);
  488. callInstr->InsertBefore(argout);
  489. this->lowererMD->LoadDynamicArgument(argout);
  490. this->lowererMD->m_lowerer->InsertCompareBranch(indexOpnd,
  491. IR::IntConstOpnd::New(0, TyUint8, func),
  492. Js::OpCode::BrNeq_A,
  493. true,
  494. startLoopLabel,
  495. callInstr);
  496. }
  497. IR::Instr *
  498. LowererMDArch::LowerCallIDynamic(IR::Instr * callInstr, IR::Instr*saveThisArgOutInstr, IR::Opnd *argsLength, ushort callFlags, IR::Instr * insertBeforeInstrForCFG)
  499. {
  500. callInstr->InsertBefore(saveThisArgOutInstr); //Move this Argout next to call;
  501. this->LoadDynamicArgument(saveThisArgOutInstr);
  502. Func *func = callInstr->m_func;
  503. bool bIsInlinee = func->IsInlinee();
  504. if (bIsInlinee)
  505. {
  506. Assert(argsLength->AsIntConstOpnd()->GetValue() == callInstr->m_func->actualCount);
  507. }
  508. else
  509. {
  510. Assert(argsLength->IsRegOpnd());
  511. /*callInfo*/
  512. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, argsLength, argsLength, IR::IntConstOpnd::New(1, TyInt8, this->m_func), this->m_func));
  513. }
  514. IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, this->m_func);
  515. argout->SetSrc1(argsLength);
  516. callInstr->InsertBefore(argout);
  517. this->LoadDynamicArgument(argout);
  518. // load native entry point from script function into eax
  519. AssertMsg(callInstr->GetSrc1()->IsRegOpnd() && callInstr->GetSrc1()->AsRegOpnd()->m_sym->IsStackSym(),
  520. "Expected call src to be stackSym");
  521. IR::RegOpnd * functionWrapOpnd = callInstr->UnlinkSrc1()->AsRegOpnd();
  522. GeneratePreCall(callInstr, functionWrapOpnd);
  523. LowerCall(callInstr, 0);
  524. //Restore stack back to original state.
  525. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, RegESP, TyMachReg, this->m_func);
  526. if (bIsInlinee)
  527. {
  528. // +2 for callInfo & function object;
  529. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(espOpnd, (callInstr->m_func->actualCount + (callInstr->m_func->actualCount&1) + 2) * MachPtr, TyMachReg, this->m_func);
  530. callInstr->InsertAfter(IR::Instr::New(Js::OpCode::LEA, espOpnd, indirOpnd, this->m_func));
  531. }
  532. else
  533. {
  534. IR::RegOpnd *argsLengthRegOpnd = argsLength->AsRegOpnd();
  535. //Account for callInfo & function object in argsLength
  536. IR::Instr * addInstr = IR::Instr::New(Js::OpCode::ADD, argsLengthRegOpnd, argsLengthRegOpnd, IR::IntConstOpnd::New(2, TyInt8, this->m_func), this->m_func);
  537. callInstr->InsertBefore(addInstr);
  538. IR::Instr *insertInstr = callInstr->m_next;
  539. // Align stack
  540. //
  541. // INC argLengthReg
  542. IR::Instr * incInstr = IR::Instr::New(Js::OpCode::INC, argsLengthRegOpnd, argsLengthRegOpnd, this->m_func);
  543. insertInstr->InsertBefore(incInstr);
  544. // AND argLengthReg, (~1)
  545. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, argsLengthRegOpnd, argsLengthRegOpnd, IR::IntConstOpnd::New(~1, TyInt32, this->m_func, true), this->m_func);
  546. insertInstr->InsertBefore(andInstr);
  547. // LEA ESP, [ESP + argsLengthReg*4]
  548. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(espOpnd, argsLengthRegOpnd, IndirScale4, TyMachReg, this->m_func);
  549. addInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, indirOpnd, this->m_func);
  550. insertInstr->InsertBefore(addInstr);
  551. }
  552. return argout;
  553. }
  554. void
  555. LowererMDArch::GeneratePreCall(IR::Instr * callInstr, IR::Opnd *functionObjOpnd)
  556. {
  557. IR::RegOpnd* functionTypeRegOpnd = nullptr;
  558. // For calls to fixed functions we load the function's type directly from the known (hard-coded) function object address.
  559. // For other calls, we need to load it from the function object stored in a register operand.
  560. if (functionObjOpnd->IsAddrOpnd() && functionObjOpnd->AsAddrOpnd()->m_isFunction)
  561. {
  562. functionTypeRegOpnd = this->lowererMD->m_lowerer->GenerateFunctionTypeFromFixedFunctionObject(callInstr, functionObjOpnd);
  563. }
  564. else if (functionObjOpnd->IsRegOpnd())
  565. {
  566. AssertMsg(functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym(), "Expected call target to be stackSym");
  567. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  568. // functionTypeRegOpnd = MOV function->type
  569. IR::IndirOpnd* functionTypeIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  570. Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  571. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionTypeIndirOpnd, this->m_func);
  572. callInstr->InsertBefore(instr);
  573. }
  574. else
  575. {
  576. AssertMsg(false, "Unexpected call target operand type.");
  577. }
  578. // Push function object
  579. this->LoadHelperArgument(callInstr, functionObjOpnd);
  580. int entryPointOffset = Js::Type::GetOffsetOfEntryPoint();
  581. IR::IndirOpnd* entryPointOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, entryPointOffset, TyMachPtr, this->m_func);
  582. callInstr->SetSrc1(entryPointOpnd);
  583. // Atom prefers "CALL reg" over "CALL [reg]"
  584. IR::Instr * hoistedCallSrcInstr = nullptr;
  585. hoistedCallSrcInstr = callInstr->HoistSrc1(Js::OpCode::MOV);
  586. #if defined(_CONTROL_FLOW_GUARD)
  587. if (!PHASE_OFF(Js::CFGInJitPhase, this->m_func))
  588. {
  589. this->lowererMD->GenerateCFGCheck(hoistedCallSrcInstr->GetDst(), callInstr);
  590. }
  591. #endif
  592. }
  593. IR::Instr *
  594. LowererMDArch::LowerCallI(IR::Instr *callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  595. {
  596. // We need to get the calculated CallInfo in SimpleJit because that doesn't include any changes for stack alignment
  597. IR::IntConstOpnd *callInfo;
  598. int32 argCount = this->LowerCallArgs(callInstr, callFlags, 1, &callInfo);
  599. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  600. // If this is a call for new, we already pass the function operand through NewScObject,
  601. // which checks if the function operand is a real function or not, don't need to add a check again
  602. // If this is a call to a fixed function, we've already verified that the target is, indeed, a function.
  603. if (callInstr->m_opcode != Js::OpCode::CallIFixed && !(callFlags & Js::CallFlags_New))
  604. {
  605. AssertMsg(functionObjOpnd->IsRegOpnd() && functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym(), "Expected call src to be stackSym");
  606. IR::LabelInstr* continueAfterExLabel = Lowerer::InsertContinueAfterExceptionLabelForDebugger(m_func, callInstr, isHelper);
  607. GenerateFunctionObjectTest(callInstr, functionObjOpnd->AsRegOpnd(), isHelper, continueAfterExLabel);
  608. }
  609. // Can't assert until we remove unreachable code if we have proved that it is a tagged int.
  610. // Assert((callFlags & Js::CallFlags_New) || !functionWrapOpnd->IsTaggedInt());
  611. GeneratePreCall(callInstr, functionObjOpnd);
  612. IR::Opnd *const finalDst = callInstr->GetDst();
  613. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  614. IR::AutoReuseOpnd autoReuseSavedFunctionObjOpnd;
  615. if (callInstr->IsJitProfilingInstr())
  616. {
  617. Assert(callInstr->m_func->IsSimpleJit());
  618. Assert(!Js::FunctionBody::IsNewSimpleJit());
  619. if(finalDst &&
  620. finalDst->IsRegOpnd() &&
  621. functionObjOpnd->IsRegOpnd() &&
  622. finalDst->AsRegOpnd()->m_sym == functionObjOpnd->AsRegOpnd()->m_sym)
  623. {
  624. // The function object sym is going to be overwritten, so save it in a temp for profiling
  625. IR::RegOpnd *const savedFunctionObjOpnd = IR::RegOpnd::New(functionObjOpnd->GetType(), callInstr->m_func);
  626. autoReuseSavedFunctionObjOpnd.Initialize(savedFunctionObjOpnd, callInstr->m_func);
  627. Lowerer::InsertMove(savedFunctionObjOpnd, functionObjOpnd, callInstr->m_next);
  628. functionObjOpnd = savedFunctionObjOpnd;
  629. }
  630. auto instr = callInstr->AsJitProfilingInstr();
  631. ret = this->lowererMD->m_lowerer->GenerateCallProfiling(
  632. instr->profileId,
  633. instr->inlineCacheIndex,
  634. instr->GetDst(),
  635. functionObjOpnd,
  636. callInfo,
  637. instr->isProfiledReturnCall,
  638. callInstr,
  639. ret);
  640. }
  641. return ret;
  642. }
  643. IR::Instr *
  644. LowererMDArch::LowerAsmJsCallE(IR::Instr *callInstr)
  645. {
  646. IR::IntConstOpnd *callInfo;
  647. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_Value, 1, &callInfo);
  648. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  649. GeneratePreCall(callInstr, functionObjOpnd);
  650. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  651. return ret;
  652. }
  653. IR::Instr *
  654. LowererMDArch::LowerAsmJsCallI(IR::Instr * callInstr)
  655. {
  656. IR::Instr * argInstr;
  657. int32 argCount = 0;
  658. // Lower args and look for StartCall
  659. argInstr = callInstr;
  660. IR::Opnd *src2 = argInstr->UnlinkSrc2();
  661. while (src2->IsSymOpnd())
  662. {
  663. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  664. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  665. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  666. argLinkOpnd->Free(m_func);
  667. argInstr = argLinkSym->m_instrDef;
  668. // Mov each arg to it's argSlot
  669. src2 = argInstr->UnlinkSrc2();
  670. lowererMD->ChangeToAssign(argInstr);
  671. ++argCount;
  672. }
  673. // increment again for FunctionObject
  674. ++argCount;
  675. IR::RegOpnd * argLinkOpnd = src2->AsRegOpnd();
  676. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  677. AssertMsg(!argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  678. IR::Instr * startCallInstr = argLinkSym->m_instrDef;
  679. Assert(startCallInstr->m_opcode == Js::OpCode::StartCall);
  680. Assert(startCallInstr->GetSrc1()->IsIntConstOpnd());
  681. int32 stackAlignment = LowerStartCallAsmJs(startCallInstr, startCallInstr, callInstr);
  682. const uint32 argSlots = argCount + (stackAlignment / 4) + 1;
  683. m_func->m_argSlotsForFunctionsCalled = max(m_func->m_argSlotsForFunctionsCalled, argSlots);
  684. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  685. // we will not have function object mem ref in the case of function table calls, so we cannot calculate the call address ahead of time
  686. Assert(functionObjOpnd->IsRegOpnd() && functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym());
  687. // Push function object
  688. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, callInstr->m_func);
  689. pushInstr->SetSrc1(functionObjOpnd);
  690. callInstr->InsertBefore(pushInstr);
  691. IR::RegOpnd* functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  692. IR::IndirOpnd* functionInfoIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetTypeOffset(), TyMachReg, m_func);
  693. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  694. callInstr->InsertBefore(instr);
  695. functionInfoIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::ScriptFunctionType::GetEntryPointInfoOffset(), TyMachReg, m_func);
  696. instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  697. callInstr->InsertBefore(instr);
  698. uint32 entryPointOffset = Js::ProxyEntryPointInfo::GetAddressOffset();
  699. IR::Opnd * entryPointOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, entryPointOffset, TyMachReg, m_func);
  700. callInstr->SetSrc1(entryPointOpnd);
  701. // Atom prefers "CALL reg" over "CALL [reg]"
  702. IR::Instr * hoistedCallSrcInstr = callInstr->HoistSrc1(Js::OpCode::MOV);
  703. #if defined(_CONTROL_FLOW_GUARD)
  704. if (!PHASE_OFF(Js::CFGInJitPhase, this->m_func))
  705. {
  706. this->lowererMD->GenerateCFGCheck(hoistedCallSrcInstr->GetDst(), callInstr);
  707. }
  708. #else
  709. Unused(hoistedCallSrcInstr);
  710. #endif
  711. IR::Instr * retInstr = callInstr;
  712. callInstr->m_opcode = Js::OpCode::CALL;
  713. if (callInstr->GetDst())
  714. {
  715. IRType dstType = callInstr->GetDst()->GetType();
  716. IR::Instr * movInstr = callInstr->SinkDst(GetAssignOp(dstType));
  717. RegNum returnReg = GetRegReturn(dstType);
  718. callInstr->GetDst()->AsRegOpnd()->SetReg(returnReg);
  719. movInstr->GetSrc1()->AsRegOpnd()->SetReg(returnReg);
  720. retInstr = movInstr;
  721. }
  722. return retInstr;
  723. }
  724. IR::Instr*
  725. LowererMDArch::LowerAsmJsLdElemHelper(IR::Instr * instr, bool isSimdLoad /*= false*/, bool checkEndOffset /*= false*/)
  726. {
  727. IR::Opnd * src1 = instr->UnlinkSrc1();
  728. IRType type = src1->GetType();
  729. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  730. IR::LabelInstr * loadLabel = Lowerer::InsertLabel(false, instr);
  731. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  732. IR::RegOpnd * indexOpnd = src1->AsIndirOpnd()->GetIndexOpnd();
  733. IR::Opnd * cmpOpnd;
  734. const uint8 dataWidth = instr->dataWidth;
  735. Assert(isSimdLoad == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  736. if (indexOpnd)
  737. {
  738. cmpOpnd = indexOpnd;
  739. }
  740. else
  741. {
  742. cmpOpnd = IR::IntConstOpnd::New(src1->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  743. }
  744. // if dataWidth != byte per element, we need to check end offset
  745. if (isSimdLoad && checkEndOffset)
  746. {
  747. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  748. // MOV tmp, cmpOnd
  749. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  750. // ADD tmp, dataWidth
  751. Lowerer::InsertAdd(false, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, TyInt8, m_func, true), helperLabel);
  752. // CMP tmp, size
  753. // JG $helper
  754. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  755. }
  756. else
  757. {
  758. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  759. }
  760. Lowerer::InsertBranch(Js::OpCode::Br, loadLabel, helperLabel);
  761. if (isSimdLoad)
  762. {
  763. lowererMD->m_lowerer->GenerateRuntimeError(loadLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  764. }
  765. else
  766. {
  767. if (IRType_IsFloat(type))
  768. {
  769. Lowerer::InsertMove(instr->UnlinkDst(), IR::FloatConstOpnd::New(Js::NumberConstants::NaN, type, m_func), loadLabel);
  770. }
  771. else
  772. {
  773. Lowerer::InsertMove(instr->UnlinkDst(), IR::IntConstOpnd::New(0, TyInt8, m_func), loadLabel);
  774. }
  775. }
  776. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, loadLabel);
  777. return doneLabel;
  778. }
  779. IR::Instr*
  780. LowererMDArch::LowerAsmJsStElemHelper(IR::Instr * instr, bool isSimdStore /*= false*/, bool checkEndOffset /*= false*/)
  781. {
  782. IR::Opnd * dst = instr->UnlinkDst();
  783. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  784. IR::LabelInstr * storeLabel = Lowerer::InsertLabel(false, instr);
  785. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  786. IR::Opnd * cmpOpnd;
  787. IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  788. const uint8 dataWidth = instr->dataWidth;
  789. Assert(isSimdStore == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  790. if (indexOpnd)
  791. {
  792. cmpOpnd = indexOpnd;
  793. }
  794. else
  795. {
  796. cmpOpnd = IR::IntConstOpnd::New(dst->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  797. }
  798. if (isSimdStore && checkEndOffset)
  799. {
  800. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  801. // MOV tmp, cmpOnd
  802. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  803. // ADD tmp, dataWidth
  804. Lowerer::InsertAdd(false, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, TyInt8, m_func, true), helperLabel);
  805. // CMP tmp, size
  806. // JG $helper
  807. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  808. }
  809. else
  810. {
  811. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  812. }
  813. if (isSimdStore)
  814. {
  815. lowererMD->m_lowerer->GenerateRuntimeError(storeLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  816. }
  817. Lowerer::InsertBranch(Js::OpCode::Br, storeLabel, helperLabel);
  818. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, storeLabel);
  819. return doneLabel;
  820. }
  821. IR::Instr *
  822. LowererMDArch::LowerCallPut(IR::Instr *callInstr)
  823. {
  824. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_None);
  825. // load native entry point from script function into eax
  826. IR::Opnd * functionWrapOpnd = callInstr->UnlinkSrc1();
  827. AssertMsg(functionWrapOpnd->IsRegOpnd() && functionWrapOpnd->AsRegOpnd()->m_sym->IsStackSym(),
  828. "Expected call src to be stackSym");
  829. // push function wrapper
  830. this->LoadHelperArgument(callInstr, functionWrapOpnd);
  831. IR::HelperCallOpnd *helperCallOpnd = IR::HelperCallOpnd::New(IR::HelperOp_InvokePut, this->m_func);
  832. callInstr->SetSrc1(helperCallOpnd);
  833. return this->LowerCall(callInstr, argCount);
  834. }
  835. int32
  836. LowererMDArch::LowerCallArgs(IR::Instr *callInstr, ushort callFlags, Js::ArgSlot extraArgs, IR::IntConstOpnd **callInfoOpndRef)
  837. {
  838. IR::Instr * argInstr;
  839. uint32 argCount = 0;
  840. // Lower args and look for StartCall
  841. argInstr = callInstr;
  842. IR::Opnd *src2 = argInstr->UnlinkSrc2();
  843. while (src2->IsSymOpnd())
  844. {
  845. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  846. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  847. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  848. argLinkOpnd->Free(this->m_func);
  849. argInstr = argLinkSym->m_instrDef;
  850. // Mov each arg to it's argSlot
  851. src2 = argInstr->UnlinkSrc2();
  852. this->lowererMD->ChangeToAssign(argInstr);
  853. argCount++;
  854. }
  855. IR::RegOpnd * argLinkOpnd = src2->AsRegOpnd();
  856. StackSym *argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  857. AssertMsg(!argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  858. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  859. if (callInstr->m_opcode == Js::OpCode::NewScObject ||
  860. callInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
  861. callInstr->m_opcode == Js::OpCode::NewScObjArray ||
  862. callInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
  863. {
  864. // These push an extra arg.
  865. argCount++;
  866. }
  867. AssertMsg(startCallInstr->m_opcode == Js::OpCode::StartCall || startCallInstr->m_opcode == Js::OpCode::LoweredStartCall, "Problem with arg chain.");
  868. AssertMsg(m_func->GetJnFunction()->GetIsAsmjsMode() || startCallInstr->GetArgOutCount(/*getInterpreterArgOutCount*/ false) == argCount, "ArgCount doesn't match StartCall count");
  869. //
  870. // Machine dependent lowering
  871. //
  872. IR::Instr * insertInstr;
  873. if (callInstr->IsCloned())
  874. {
  875. insertInstr = argInstr;
  876. }
  877. else
  878. {
  879. insertInstr = startCallInstr;
  880. }
  881. int32 stackAlignment;
  882. if (callInstr->m_opcode == Js::OpCode::AsmJsCallE)
  883. {
  884. stackAlignment = LowerStartCallAsmJs(startCallInstr, insertInstr, callInstr);
  885. }
  886. else
  887. {
  888. stackAlignment = LowerStartCall(startCallInstr, insertInstr);
  889. }
  890. startCallInstr->SetIsCloned(callInstr->IsCloned());
  891. // Push argCount
  892. IR::IntConstOpnd * argCountOpnd = Lowerer::MakeCallInfoConst(callFlags, argCount, m_func);
  893. if(callInfoOpndRef)
  894. {
  895. argCountOpnd->Use(m_func);
  896. *callInfoOpndRef = argCountOpnd;
  897. }
  898. this->LoadHelperArgument(callInstr, argCountOpnd);
  899. uint32 argSlots;
  900. argSlots = argCount + (stackAlignment / 4) + 1 + extraArgs; // + 1 for call flags
  901. this->m_func->m_argSlotsForFunctionsCalled = max(this->m_func->m_argSlotsForFunctionsCalled, argSlots);
  902. return argSlots;
  903. }
  904. ///----------------------------------------------------------------------------
  905. ///
  906. /// LowererMDArch::LowerCall
  907. ///
  908. /// Machine dependent (x86) lowering for calls.
  909. /// Adds an "ADD ESP, argCount*4" if argCount is not 0.
  910. ///
  911. ///----------------------------------------------------------------------------
  912. IR::Instr *
  913. LowererMDArch::LowerCall(IR::Instr * callInstr, uint32 argCount, RegNum regNum)
  914. {
  915. IR::Instr *retInstr = callInstr;
  916. callInstr->m_opcode = Js::OpCode::CALL;
  917. if (callInstr->GetDst())
  918. {
  919. IR::Opnd * dstOpnd = callInstr->GetDst();
  920. IRType dstType = dstOpnd->GetType();
  921. Js::OpCode assignOp = GetAssignOp(dstType);
  922. IR::Instr * movInstr = nullptr;
  923. RegNum reg = GetRegReturn(dstType);
  924. if (IRType_IsFloat(dstType))
  925. {
  926. // We should only generate this if sse2 is available
  927. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "SSE2 not supported");
  928. AssertMsg(reg == RegNOREG, "No register should be assigned for float Reg");
  929. // We pop the Float X87 stack using FSTP for the return value of the CALL, instead of storing in XMM0 directly.
  930. //Before: oldDst = CALL xxx
  931. //After:
  932. // CALL xxx
  933. // newDstOpnd = FSTP
  934. // oldDst = MOVSD [newDstOpnd]
  935. IR::Instr * floatPopInstr = IR::Instr::New(Js::OpCode::FSTP, m_func);
  936. IR::Opnd * oldDst = callInstr->UnlinkDst();
  937. StackSym * newDstStackSym = StackSym::New(dstType, this->m_func);
  938. Assert(dstType == TyMachDouble);
  939. this->m_func->StackAllocate(newDstStackSym, MachDouble);
  940. IR::SymOpnd * newDstOpnd = IR::SymOpnd::New(newDstStackSym, dstType, this->m_func);
  941. floatPopInstr->SetDst(newDstOpnd);
  942. callInstr->InsertAfter(floatPopInstr);
  943. movInstr = IR::Instr::New(Js::OpCode::MOVSD, oldDst, newDstOpnd, this->m_func);
  944. floatPopInstr->InsertAfter(movInstr);
  945. }
  946. else
  947. {
  948. movInstr = callInstr->SinkDst(assignOp);
  949. callInstr->GetDst()->AsRegOpnd()->SetReg(reg);
  950. movInstr->GetSrc1()->AsRegOpnd()->SetReg(reg);
  951. }
  952. Assert(movInstr);
  953. retInstr = movInstr;
  954. }
  955. if (argCount)
  956. {
  957. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, RegESP, TyMachReg, this->m_func);
  958. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(espOpnd, argCount * MachPtr, TyMachReg, this->m_func);
  959. IR::Instr * addInstr = IR::Instr::New(Js::OpCode::LEA,
  960. espOpnd, indirOpnd, this->m_func);
  961. callInstr->InsertAfter(addInstr);
  962. }
  963. this->helperCallArgsCount = 0;
  964. return retInstr;
  965. }
  966. ///----------------------------------------------------------------------------
  967. ///
  968. /// LowererMDArch::LowerStartCall
  969. ///
  970. /// Lower StartCall to a "SUB ESP, argCount * 4"
  971. ///
  972. ///----------------------------------------------------------------------------
  973. int32
  974. LowererMDArch::LowerStartCall(IR::Instr * startCallInstr, IR::Instr* insertInstr)
  975. {
  976. AssertMsg(startCallInstr->GetSrc1()->IsIntConstOpnd(), "Bad src on StartCall");
  977. IR::IntConstOpnd *sizeOpnd = startCallInstr->GetSrc1()->AsIntConstOpnd();
  978. IntConstType sizeValue = sizeOpnd->GetValue();
  979. // Maintain 8 byte alignment of the stack.
  980. // We do this by adjusting the SUB for stackCall to make sure it maintains 8 byte alignment.
  981. int32 stackAlignment = Math::Align<int32>(sizeValue*MachPtr, MachStackAlignment) - sizeValue*MachPtr;
  982. if (stackAlignment != 0)
  983. {
  984. sizeValue += 1;
  985. }
  986. sizeValue *= MachPtr;
  987. IR::Instr* newStartCall;
  988. if ((uint32)sizeValue > AutoSystemInfo::PageSize) {
  989. // Convert StartCall into a chkstk
  990. // mov eax, sizeOpnd->m_value
  991. // call _chkstk
  992. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, this->GetRegChkStkParam(), TyMachReg, this->m_func);
  993. this->lowererMD->CreateAssign(eaxOpnd, IR::IntConstOpnd::New(sizeValue, TyInt32, this->m_func, /*dontEncode*/true), insertInstr);
  994. newStartCall = IR::Instr::New(Js::OpCode::Call, this->m_func);
  995. newStartCall->SetSrc1(IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, this->m_func));
  996. insertInstr->InsertBefore(newStartCall);
  997. this->LowerCall(newStartCall, 0);
  998. } else {
  999. // Convert StartCall into
  1000. // lea esp, [esp - sizeValue]
  1001. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1002. newStartCall = IR::Instr::New(Js::OpCode::LEA, espOpnd, IR::IndirOpnd::New(espOpnd, -sizeValue, TyMachReg, this->m_func), this->m_func);
  1003. insertInstr->InsertBefore(newStartCall);
  1004. }
  1005. newStartCall->SetByteCodeOffset(startCallInstr);
  1006. // Mark the start call as being lowered - this is required by the bailout encoding logic
  1007. startCallInstr->m_opcode = Js::OpCode::LoweredStartCall;
  1008. return stackAlignment;
  1009. }
  1010. int32
  1011. LowererMDArch::LowerStartCallAsmJs(IR::Instr * startCallInstr, IR::Instr * insertInstr, IR::Instr * callInstr)
  1012. {
  1013. AssertMsg(startCallInstr->GetSrc1()->IsIntConstOpnd(), "Bad src on StartCall");
  1014. IR::IntConstOpnd * sizeOpnd = startCallInstr->GetSrc1()->AsIntConstOpnd();
  1015. IntConstType sizeValue = sizeOpnd->GetValue();
  1016. if (callInstr->m_opcode == Js::OpCode::AsmJsCallI)
  1017. {
  1018. // we will push FunctionObject, so don't need to worry about that
  1019. sizeValue -= MachPtr;
  1020. }
  1021. // Maintain 8 byte alignment of the stack.
  1022. // We do this by adjusting the SUB for stackCall to make sure it maintains 8 byte alignment.
  1023. int32 stackAlignment = Math::Align<int32>(sizeValue, MachStackAlignment) - sizeValue;
  1024. if (stackAlignment != 0)
  1025. {
  1026. sizeValue += MachPtr;
  1027. }
  1028. IR::Instr* newStartCall;
  1029. if ((uint32)sizeValue > AutoSystemInfo::PageSize) {
  1030. // Convert StartCall into a chkstk
  1031. // mov eax, sizeOpnd->m_value
  1032. // call _chkstk
  1033. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, GetRegChkStkParam(), TyMachReg, m_func);
  1034. lowererMD->CreateAssign(eaxOpnd, IR::IntConstOpnd::New(sizeValue, TyInt32, m_func, /*dontEncode*/true), insertInstr);
  1035. newStartCall = IR::Instr::New(Js::OpCode::Call, m_func);
  1036. newStartCall->SetSrc1(IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, m_func));
  1037. insertInstr->InsertBefore(newStartCall);
  1038. LowerCall(newStartCall, 0);
  1039. }
  1040. else {
  1041. // Convert StartCall into
  1042. // lea esp, [esp - sizeValue]
  1043. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, m_func);
  1044. newStartCall = IR::Instr::New(Js::OpCode::LEA, espOpnd, IR::IndirOpnd::New(espOpnd, -sizeValue, TyMachReg, m_func), m_func);
  1045. insertInstr->InsertBefore(newStartCall);
  1046. }
  1047. newStartCall->SetByteCodeOffset(startCallInstr);
  1048. // Mark the start call as being lowered - this is required by the bailout encoding logic
  1049. startCallInstr->m_opcode = Js::OpCode::LoweredStartCall;
  1050. return stackAlignment;
  1051. }
  1052. ///----------------------------------------------------------------------------
  1053. ///
  1054. /// LowererMDArch::LoadHelperArgument
  1055. ///
  1056. /// Change to a PUSH.
  1057. ///
  1058. ///----------------------------------------------------------------------------
  1059. IR::Instr *
  1060. LowererMDArch::LoadHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  1061. {
  1062. IR::Instr * pushInstr;
  1063. pushInstr = IR::Instr::New(Js::OpCode::PUSH, instr->m_func);
  1064. if(TySize[opndArg->GetType()] < TySize[TyMachReg])
  1065. {
  1066. Assert(!opndArg->IsMemoryOpnd()); // if it's a memory opnd, it would need to be loaded into a register first
  1067. opndArg = opndArg->UseWithNewType(TyMachReg, instr->m_func);
  1068. }
  1069. pushInstr->SetSrc1(opndArg);
  1070. instr->InsertBefore(pushInstr);
  1071. this->helperCallArgsCount++;
  1072. AssertMsg(helperCallArgsCount <= LowererMDArch::MaxArgumentsToHelper, "The # of arguments to the helper is too big.");
  1073. return pushInstr;
  1074. }
  1075. IR::Instr *
  1076. LowererMDArch::LoadDynamicArgument(IR::Instr * instr, uint argNumber /*ignore for x86*/)
  1077. {
  1078. //Convert to push instruction.
  1079. instr->m_opcode = Js::OpCode::PUSH;
  1080. return instr;
  1081. }
  1082. IR::Instr *
  1083. LowererMDArch::LoadDoubleHelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1084. {
  1085. IR::Instr * instrPrev;
  1086. IR::Instr * instr;
  1087. IR::Opnd * opnd;
  1088. IR::Opnd * float64Opnd;
  1089. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1090. opnd = IR::IndirOpnd::New(espOpnd, -8, TyMachReg, this->m_func);
  1091. instrPrev = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  1092. instrInsert->InsertBefore(instrPrev);
  1093. opnd = IR::IndirOpnd::New(espOpnd, (int32)0, TyFloat64, this->m_func);
  1094. if (opndArg->GetType() == TyFloat32)
  1095. {
  1096. float64Opnd = IR::RegOpnd::New(TyFloat64, m_func);
  1097. instr = IR::Instr::New(Js::OpCode::CVTSS2SD, float64Opnd, opndArg, this->m_func);
  1098. instrInsert->InsertBefore(instr);
  1099. }
  1100. else
  1101. {
  1102. float64Opnd = opndArg;
  1103. }
  1104. instr = IR::Instr::New(Js::OpCode::MOVSD, opnd, float64Opnd, this->m_func);
  1105. instrInsert->InsertBefore(instr);
  1106. LowererMD::Legalize(instr);
  1107. return instrPrev;
  1108. }
  1109. ///----------------------------------------------------------------------------
  1110. ///
  1111. /// LowererMDArch::LowerEntryInstr
  1112. ///
  1113. /// Emit prolog.
  1114. ///
  1115. ///----------------------------------------------------------------------------
  1116. IR::Instr *
  1117. LowererMDArch::LowerEntryInstr(IR::EntryInstr * entryInstr)
  1118. {
  1119. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1120. if (Js::Configuration::Global.flags.IsEnabled(Js::CheckAlignmentFlag))
  1121. {
  1122. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  1123. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrFunc_CheckAlignment, this->m_func));
  1124. entryInstr->InsertAfter(callInstr);
  1125. this->LowerCall(callInstr, 0, RegEAX);
  1126. }
  1127. #endif
  1128. int32 bytesOnStack = MachRegInt+MachRegInt; // Account for return address+push EBP...
  1129. // PUSH used callee-saved registers
  1130. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg+1))
  1131. {
  1132. if (LinearScan::IsCalleeSaved(reg) && (this->m_func->m_regsUsed.Test(reg)))
  1133. {
  1134. IR::RegOpnd * regOpnd = IR::RegOpnd::New(nullptr, reg, TyMachReg, this->m_func);
  1135. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1136. pushInstr->SetSrc1(regOpnd);
  1137. entryInstr->InsertAfter(pushInstr);
  1138. bytesOnStack += MachRegInt;
  1139. }
  1140. }
  1141. // Allocate frame
  1142. IR::RegOpnd * ebpOpnd = IR::RegOpnd::New(nullptr, this->GetRegBlockPointer(), TyMachReg, this->m_func);
  1143. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1144. // Dedicated argument slot is already included in the m_localStackHeight (see Func ctor)
  1145. // Allocate the inlined arg out stack in the locals. Allocate an additional slot so that
  1146. // we can unconditionally clear the argc slot of the next frame.
  1147. this->m_func->m_localStackHeight += ((this->m_func->GetMaxInlineeArgOutCount() + 1) * MachPtr);
  1148. bytesOnStack += this->m_func->m_localStackHeight;
  1149. int32 alignment = Math::Align<int32>(bytesOnStack, MachStackAlignment) - bytesOnStack;
  1150. // Make sure this frame allocation maintains 8-byte alignment. Our point of reference is the return address
  1151. this->m_func->m_localStackHeight += alignment;
  1152. bytesOnStack += alignment;
  1153. Assert(Math::Align<int32>(bytesOnStack, MachStackAlignment) == bytesOnStack);
  1154. Assert(this->m_func->hasBailout || this->bailOutStackRestoreLabel == nullptr);
  1155. this->m_func->frameSize = bytesOnStack;
  1156. if (this->m_func->GetMaxInlineeArgOutCount())
  1157. {
  1158. this->m_func->m_workItem->GetFunctionBody()->SetFrameHeight(this->m_func->m_workItem->GetEntryPoint(), this->m_func->m_localStackHeight);
  1159. }
  1160. // Zero initialize the first inlinee frames argc.
  1161. if (this->m_func->GetMaxInlineeArgOutCount())
  1162. {
  1163. StackSym *sym = this->m_func->m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1164. sym->m_isInlinedArgSlot = true;
  1165. sym->m_offset = 0;
  1166. IR::Opnd *dst = IR::SymOpnd::New(sym, TyMachReg, this->m_func);
  1167. entryInstr->InsertAfter(IR::Instr::New(Js::OpCode::MOV,
  1168. dst,
  1169. IR::AddrOpnd::NewNull(this->m_func),
  1170. this->m_func));
  1171. }
  1172. if (this->m_func->m_localStackHeight != 0)
  1173. {
  1174. int32 stackSize = this->m_func->m_localStackHeight;
  1175. if (this->m_func->HasArgumentSlot())
  1176. {
  1177. // We separately push the stack argument slot below
  1178. stackSize -= MachPtr;
  1179. }
  1180. if (this->m_func->m_localStackHeight <= PAGESIZE)
  1181. {
  1182. // Generate LEA ESP, [esp - stackSize] // Atom prefers LEA for address computations
  1183. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(espOpnd, -stackSize, TyMachReg, this->m_func);
  1184. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, indirOpnd, this->m_func);
  1185. entryInstr->InsertAfter(subInstr);
  1186. }
  1187. else
  1188. {
  1189. // Generate chkstk call
  1190. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, this->GetRegChkStkParam(), TyMachReg, this->m_func);
  1191. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, eaxOpnd,
  1192. IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, this->m_func), this->m_func);
  1193. entryInstr->InsertAfter(callInstr);
  1194. this->LowerCall(callInstr, 0, RegECX);
  1195. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New(stackSize, TyMachReg, this->m_func);
  1196. this->lowererMD->CreateAssign(eaxOpnd, stackSizeOpnd, entryInstr->m_next);
  1197. }
  1198. }
  1199. // Zero-initialize dedicated arguments slot
  1200. if (this->m_func->HasArgumentSlot())
  1201. {
  1202. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1203. pushInstr->SetSrc1(IR::IntConstOpnd::New(0, TyMachPtr, this->m_func));
  1204. entryInstr->InsertAfter(pushInstr);
  1205. }
  1206. size_t frameSize = bytesOnStack + ((this->m_func->m_argSlotsForFunctionsCalled + 1) * MachPtr) + Js::Constants::MinStackJIT;
  1207. this->GeneratePrologueStackProbe(entryInstr, frameSize);
  1208. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, ebpOpnd, espOpnd, this->m_func);
  1209. entryInstr->InsertAfter(movInstr);
  1210. // Generate PUSH EBP
  1211. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1212. pushInstr->SetSrc1(ebpOpnd);
  1213. entryInstr->InsertAfter(pushInstr);
  1214. return entryInstr;
  1215. }
  1216. IR::Instr *
  1217. LowererMDArch::LowerEntryInstrAsmJs(IR::EntryInstr * entryInstr)
  1218. {
  1219. // PUSH EBP
  1220. // MOV EBP, ESP
  1221. // StackProbe
  1222. // MOV EAX, LocalStackHeight / LEA ESP, [ESP - stackSize]
  1223. // CALL chkstk /
  1224. // PUSH used nonvolatiles
  1225. // Calculate stack size
  1226. int32 bytesOnStack = MachRegInt + MachRegInt; // Account for return address+push EBP...
  1227. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg + 1))
  1228. {
  1229. if (LinearScan::IsCalleeSaved(reg) && (m_func->m_regsUsed.Test(reg)))
  1230. {
  1231. bytesOnStack += MachRegInt;
  1232. }
  1233. }
  1234. m_func->m_localStackHeight += MachPtr;
  1235. bytesOnStack += m_func->m_localStackHeight;
  1236. int32 alignment = Math::Align<int32>(bytesOnStack, MachStackAlignment) - bytesOnStack;
  1237. // Make sure this frame allocation maintains 8-byte alignment. Our point of reference is the return address
  1238. m_func->m_localStackHeight += alignment;
  1239. bytesOnStack += alignment;
  1240. Assert(Math::Align<int32>(bytesOnStack, MachStackAlignment) == bytesOnStack);
  1241. m_func->frameSize = bytesOnStack;
  1242. IR::Instr * insertInstr = entryInstr->m_next;
  1243. IR::RegOpnd * ebpOpnd = IR::RegOpnd::New(nullptr, GetRegBlockPointer(), TyMachReg, m_func);
  1244. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, m_func);
  1245. // Generate PUSH EBP
  1246. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, m_func);
  1247. pushInstr->SetSrc1(ebpOpnd);
  1248. insertInstr->InsertBefore(pushInstr);
  1249. // Generate MOV EBP, ESP
  1250. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, ebpOpnd, espOpnd, m_func);
  1251. insertInstr->InsertBefore(movInstr);
  1252. // Generate StackProbe
  1253. size_t frameSize = bytesOnStack + m_func->m_argSlotsForFunctionsCalled * MachPtr + Js::Constants::MinStackJIT;
  1254. GeneratePrologueStackProbe(insertInstr->m_prev, frameSize);
  1255. if (m_func->m_localStackHeight != 0)
  1256. {
  1257. int32 stackSize = m_func->m_localStackHeight - MachPtr;
  1258. if (m_func->m_localStackHeight <= PAGESIZE)
  1259. {
  1260. // Generate LEA ESP, [ESP - LocalStackHeight] // Atom prefers LEA for address computations
  1261. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(espOpnd, -stackSize, TyMachReg, m_func);
  1262. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, indirOpnd, m_func);
  1263. insertInstr->InsertBefore(subInstr);
  1264. }
  1265. else
  1266. {
  1267. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, GetRegChkStkParam(), TyMachReg, m_func);
  1268. // Generate MOV EAX, LocalStackHeight
  1269. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New(stackSize, TyMachReg, m_func);
  1270. lowererMD->CreateAssign(eaxOpnd, stackSizeOpnd, insertInstr);
  1271. // Generate CALL chkstk
  1272. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, eaxOpnd,
  1273. IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, m_func), m_func);
  1274. insertInstr->InsertBefore(callInstr);
  1275. LowerCall(callInstr, 0, RegECX);
  1276. }
  1277. }
  1278. // PUSH used callee-saved registers
  1279. for (RegNum reg = (RegNum)(RegNumCount - 1); reg > RegNOREG; reg = (RegNum)(reg - 1))
  1280. {
  1281. if (LinearScan::IsCalleeSaved(reg) && (m_func->m_regsUsed.Test(reg)))
  1282. {
  1283. IR::RegOpnd * regOpnd = IR::RegOpnd::New(nullptr, reg, TyMachReg, m_func);
  1284. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, m_func);
  1285. pushInstr->SetSrc1(regOpnd);
  1286. insertInstr->InsertBefore(pushInstr);
  1287. }
  1288. }
  1289. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1290. if (Js::Configuration::Global.flags.IsEnabled(Js::CheckAlignmentFlag))
  1291. {
  1292. // CALL CheckAlignment
  1293. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  1294. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrFunc_CheckAlignment, m_func));
  1295. insertInstr->InsertBefore(callInstr);
  1296. LowerCall(callInstr, 0, RegEAX);
  1297. }
  1298. #endif
  1299. return entryInstr;
  1300. }
  1301. void
  1302. LowererMDArch::GeneratePrologueStackProbe(IR::Instr *entryInstr, size_t frameSize)
  1303. {
  1304. //
  1305. // Generate a stack overflow check. This can be as simple as a cmp esp, const
  1306. // because this function is guaranteed to be called on its base thread only.
  1307. // If the check fails call ThreadContext::ProbeCurrentStack which will check again and throw if needed.
  1308. //
  1309. // cmp esp, ThreadContext::scriptStackLimit + frameSize
  1310. // jg done
  1311. // push frameSize
  1312. // call ThreadContext::ProbeCurrentStack
  1313. // For thread-agile thread context
  1314. // mov eax, [ThreadContext::stackLimitForCurrentThread]
  1315. // add eax, frameSize
  1316. // cmp esp, eax
  1317. // jg done
  1318. // push frameSize
  1319. // call ThreadContext::ProbeCurrentStack
  1320. // done:
  1321. //
  1322. // For thread context with script interrupt enabled:
  1323. // mov eax, [ThreadContext::stackLimitForCurrentThread]
  1324. // add eax, frameSize
  1325. // jo $helper
  1326. // cmp esp, eax
  1327. // jg done
  1328. // $helper:
  1329. // push frameSize
  1330. // call ThreadContext::ProbeCurrentStack
  1331. // done:
  1332. //
  1333. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  1334. IR::Instr *insertInstr = entryInstr->m_next;
  1335. IR::Instr *instr;
  1336. IR::Opnd *stackLimitOpnd;
  1337. ThreadContext *threadContext = this->m_func->GetScriptContext()->GetThreadContext();
  1338. bool doInterruptProbe = threadContext->DoInterruptProbe(this->m_func->GetJnFunction());
  1339. if (doInterruptProbe || !threadContext->GetIsThreadBound())
  1340. {
  1341. // Load the current stack limit from the ThreadContext, then increment this value by the size of the
  1342. // current frame. This is the value we'll compare against below.
  1343. stackLimitOpnd = IR::RegOpnd::New(nullptr, RegEAX, TyMachReg, this->m_func);
  1344. void *pLimit = threadContext->GetAddressOfStackLimitForCurrentThread();
  1345. IR::MemRefOpnd * memOpnd = IR::MemRefOpnd::New(pLimit, TyMachReg, this->m_func);
  1346. this->lowererMD->CreateAssign(stackLimitOpnd, memOpnd, insertInstr);
  1347. instr = IR::Instr::New(Js::OpCode::ADD, stackLimitOpnd, stackLimitOpnd,
  1348. IR::AddrOpnd::New((void*)frameSize, IR::AddrOpndKindConstant, this->m_func), this->m_func);
  1349. insertInstr->InsertBefore(instr);
  1350. if (doInterruptProbe)
  1351. {
  1352. // If this add overflows, then we need to call out to the helper.
  1353. instr = IR::BranchInstr::New(Js::OpCode::JO, helperLabel, this->m_func);
  1354. insertInstr->InsertBefore(instr);
  1355. }
  1356. }
  1357. else
  1358. {
  1359. // The incremented stack limit is a compile-time constant.
  1360. size_t scriptStackLimit = (size_t)threadContext->GetScriptStackLimit();
  1361. stackLimitOpnd = IR::AddrOpnd::New((void *)(frameSize + scriptStackLimit), IR::AddrOpndKindDynamicMisc, this->m_func);
  1362. }
  1363. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  1364. if (!IS_FAULTINJECT_STACK_PROBE_ON) // Do stack check fastpath only if not doing StackProbe fault injection
  1365. {
  1366. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  1367. instr->SetSrc1(IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func));
  1368. instr->SetSrc2(stackLimitOpnd);
  1369. insertInstr->InsertBefore(instr);
  1370. instr = IR::BranchInstr::New(Js::OpCode::JGT, doneLabel, this->m_func);
  1371. insertInstr->InsertBefore(instr);
  1372. }
  1373. insertInstr->InsertBefore(helperLabel);
  1374. // Make sure we have zero where we expect to find the stack nested func pointer relative to EBP.
  1375. LoadHelperArgument(insertInstr, IR::IntConstOpnd::New(0, TyMachReg, m_func));
  1376. LoadHelperArgument(insertInstr, IR::IntConstOpnd::New(0, TyMachReg, m_func));
  1377. // Load the arguments to the probe helper and do the call.
  1378. lowererMD->m_lowerer->LoadScriptContext(insertInstr);
  1379. this->lowererMD->LoadHelperArgument(
  1380. insertInstr, IR::AddrOpnd::New((void*)frameSize, IR::AddrOpndKindConstant, this->m_func));
  1381. instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  1382. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProbeCurrentStack2, this->m_func));
  1383. insertInstr->InsertBefore(instr);
  1384. this->LowerCall(instr, 0, RegEAX);
  1385. insertInstr->InsertBefore(doneLabel);
  1386. Security::InsertRandomFunctionPad(doneLabel);
  1387. }
  1388. ///----------------------------------------------------------------------------
  1389. ///
  1390. /// LowererMDArch::LowerExitInstr
  1391. ///
  1392. /// Emit epilog.
  1393. ///
  1394. ///----------------------------------------------------------------------------
  1395. IR::Instr *
  1396. LowererMDArch::LowerExitInstr(IR::ExitInstr * exitInstr)
  1397. {
  1398. exitInstr = LowerExitInstrCommon(exitInstr);
  1399. // Insert RET
  1400. IR::IntConstOpnd * intSrc = IR::IntConstOpnd::New(0, TyMachReg, this->m_func);
  1401. IR::RegOpnd *eaxReg = IR::RegOpnd::New(nullptr, this->GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  1402. IR::Instr *retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  1403. retInstr->SetSrc1(intSrc);
  1404. retInstr->SetSrc2(eaxReg);
  1405. exitInstr->InsertBefore(retInstr);
  1406. return exitInstr;
  1407. }
  1408. IR::Instr *
  1409. LowererMDArch::LowerExitInstrAsmJs(IR::ExitInstr * exitInstr)
  1410. {
  1411. exitInstr = LowerExitInstrCommon(exitInstr);
  1412. // get asm.js return type
  1413. Js::AsmJsRetType asmRetType = m_func->GetJnFunction()->GetAsmJsFunctionInfo()->GetReturnType();
  1414. IRType regType;
  1415. if (asmRetType.which() == Js::AsmJsRetType::Double)
  1416. {
  1417. regType = TyFloat64;
  1418. }
  1419. else if (asmRetType.which() == Js::AsmJsRetType::Float)
  1420. {
  1421. regType = TyFloat32;
  1422. }
  1423. else if (asmRetType.toVarType().isFloat32x4())
  1424. {
  1425. regType = TySimd128F4;
  1426. }
  1427. else if (asmRetType.toVarType().isInt32x4())
  1428. {
  1429. regType = TySimd128I4;
  1430. }
  1431. else if (asmRetType.toVarType().isFloat64x2())
  1432. {
  1433. regType = TySimd128D2;
  1434. }
  1435. else
  1436. {
  1437. Assert(asmRetType.which() == Js::AsmJsRetType::Signed || asmRetType.which() == Js::AsmJsRetType::Void);
  1438. regType = TyInt32;
  1439. }
  1440. if (m_func->IsLoopBody())
  1441. {
  1442. // Insert RET
  1443. IR::IntConstOpnd * intSrc = IR::IntConstOpnd::New(0, TyMachReg, this->m_func);
  1444. IR::RegOpnd *eaxReg = IR::RegOpnd::New(nullptr, this->GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  1445. IR::Instr *retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  1446. retInstr->SetSrc1(intSrc);
  1447. retInstr->SetSrc2(eaxReg);
  1448. exitInstr->InsertBefore(retInstr);
  1449. }
  1450. else
  1451. {
  1452. // Generate RET
  1453. int32 alignedSize = Math::Align<int32>(m_func->GetJnFunction()->GetAsmJsFunctionInfo()->GetArgByteSize(), MachStackAlignment);
  1454. IR::IntConstOpnd * intSrc = IR::IntConstOpnd::New(alignedSize + MachPtr, TyMachReg, m_func);
  1455. IR::RegOpnd * retReg = IR::RegOpnd::New(nullptr, GetRegReturnAsmJs(regType), regType, m_func);
  1456. IR::Instr *retInstr = IR::Instr::New(Js::OpCode::RET, m_func);
  1457. retInstr->SetSrc1(intSrc);
  1458. retInstr->SetSrc2(retReg);
  1459. exitInstr->InsertBefore(retInstr);
  1460. }
  1461. return exitInstr;
  1462. }
  1463. IR::ExitInstr *
  1464. LowererMDArch::LowerExitInstrCommon(IR::ExitInstr * exitInstr)
  1465. {
  1466. IR::RegOpnd * ebpOpnd = IR::RegOpnd::New(nullptr, GetRegBlockPointer(), TyMachReg, m_func);
  1467. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, m_func);
  1468. // POP used callee-saved registers
  1469. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg + 1))
  1470. {
  1471. if (LinearScan::IsCalleeSaved(reg) && (m_func->m_regsUsed.Test(reg)))
  1472. {
  1473. IR::RegOpnd * regOpnd = IR::RegOpnd::New(nullptr, reg, TyMachReg, m_func);
  1474. IR::Instr * popInstr = IR::Instr::New(Js::OpCode::POP, regOpnd, m_func);
  1475. exitInstr->InsertBefore(popInstr);
  1476. }
  1477. }
  1478. // Restore frame
  1479. // Generate MOV ESP, EBP
  1480. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, espOpnd, ebpOpnd, m_func);
  1481. exitInstr->InsertBefore(movInstr);
  1482. // Generate POP EBP
  1483. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::POP, ebpOpnd, m_func);
  1484. exitInstr->InsertBefore(pushInstr);
  1485. return exitInstr;
  1486. }
  1487. void
  1488. LowererMDArch::EmitPtrInstr(IR::Instr *instr)
  1489. {
  1490. bool legalize = false;
  1491. switch (instr->m_opcode)
  1492. {
  1493. case Js::OpCode::Add_Ptr:
  1494. LowererMD::ChangeToAdd(instr, false /* needFlags */);
  1495. legalize = true;
  1496. break;
  1497. default:
  1498. AssertMsg(UNREACHED, "Un-implemented ptr opcode");
  1499. }
  1500. // OpEq's
  1501. if (legalize)
  1502. {
  1503. LowererMD::Legalize(instr);
  1504. }
  1505. else
  1506. {
  1507. LowererMD::MakeDstEquSrc1(instr);
  1508. }
  1509. }
  1510. void
  1511. LowererMDArch::EmitInt4Instr(IR::Instr *instr)
  1512. {
  1513. IR::Instr *newInstr;
  1514. IR::Opnd *src1, *src2;
  1515. IR::RegOpnd *regEDX;
  1516. bool legalize = false;
  1517. switch(instr->m_opcode)
  1518. {
  1519. case Js::OpCode::Neg_I4:
  1520. instr->m_opcode = Js::OpCode::NEG;
  1521. break;
  1522. case Js::OpCode::Not_I4:
  1523. instr->m_opcode = Js::OpCode::NOT;
  1524. break;
  1525. case Js::OpCode::Add_I4:
  1526. LowererMD::ChangeToAdd(instr, false /* needFlags */);
  1527. legalize = true;
  1528. break;
  1529. case Js::OpCode::Sub_I4:
  1530. LowererMD::ChangeToSub(instr, false /* needFlags */);
  1531. legalize = true;
  1532. break;
  1533. case Js::OpCode::Mul_I4:
  1534. instr->m_opcode = Js::OpCode::IMUL2;
  1535. break;
  1536. case Js::OpCode::Div_I4:
  1537. instr->SinkDst(Js::OpCode::MOV, RegEAX);
  1538. goto idiv_common;
  1539. case Js::OpCode::Rem_I4:
  1540. instr->SinkDst(Js::OpCode::MOV, RegEDX);
  1541. idiv_common:
  1542. if (instr->GetSrc1()->GetType() == TyUint32)
  1543. {
  1544. Assert(instr->GetSrc2()->GetType() == TyUint32);
  1545. instr->m_opcode = Js::OpCode::DIV;
  1546. }
  1547. else
  1548. {
  1549. instr->m_opcode = Js::OpCode::IDIV;
  1550. }
  1551. instr->HoistSrc1(Js::OpCode::MOV, RegEAX);
  1552. regEDX = IR::RegOpnd::New(TyInt32, instr->m_func);
  1553. regEDX->SetReg(RegEDX);
  1554. if (instr->GetSrc1()->GetType() == TyUint32)
  1555. {
  1556. // we need to ensure that register allocator doesn't muck about with edx
  1557. instr->HoistSrc2(Js::OpCode::MOV, RegECX);
  1558. newInstr = IR::Instr::New(Js::OpCode::Ld_I4, regEDX, IR::IntConstOpnd::New(0, TyInt32, instr->m_func), instr->m_func);
  1559. instr->InsertBefore(newInstr);
  1560. LowererMD::ChangeToAssign(newInstr);
  1561. // NOP ensures that the EDX = Ld_I4 0 doesn't get deadstored, will be removed in peeps
  1562. instr->InsertBefore(IR::Instr::New(Js::OpCode::NOP, regEDX, regEDX, instr->m_func));
  1563. }
  1564. else
  1565. {
  1566. if (instr->GetSrc2()->IsImmediateOpnd())
  1567. {
  1568. instr->HoistSrc2(Js::OpCode::MOV);
  1569. }
  1570. instr->InsertBefore(IR::Instr::New(Js::OpCode::CDQ, regEDX, instr->m_func));
  1571. }
  1572. return;
  1573. case Js::OpCode::Or_I4:
  1574. instr->m_opcode = Js::OpCode::OR;
  1575. break;
  1576. case Js::OpCode::Xor_I4:
  1577. instr->m_opcode = Js::OpCode::XOR;
  1578. break;
  1579. case Js::OpCode::And_I4:
  1580. instr->m_opcode = Js::OpCode::AND;
  1581. break;
  1582. case Js::OpCode::Shl_I4:
  1583. case Js::OpCode::ShrU_I4:
  1584. case Js::OpCode::Shr_I4:
  1585. LowererMD::ChangeToShift(instr, false /* needFlags */);
  1586. legalize = true;
  1587. break;
  1588. case Js::OpCode::BrTrue_I4:
  1589. instr->m_opcode = Js::OpCode::JNE;
  1590. goto br1_Common;
  1591. case Js::OpCode::BrFalse_I4:
  1592. instr->m_opcode = Js::OpCode::JEQ;
  1593. br1_Common:
  1594. src1 = instr->UnlinkSrc1();
  1595. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  1596. instr->InsertBefore(newInstr);
  1597. newInstr->SetSrc1(src1);
  1598. newInstr->SetSrc2(src1);
  1599. return;
  1600. case Js::OpCode::BrEq_I4:
  1601. instr->m_opcode = Js::OpCode::JEQ;
  1602. goto br2_Common;
  1603. case Js::OpCode::BrNeq_I4:
  1604. instr->m_opcode = Js::OpCode::JNE;
  1605. goto br2_Common;
  1606. case Js::OpCode::BrUnGt_I4:
  1607. instr->m_opcode = Js::OpCode::JA;
  1608. goto br2_Common;
  1609. case Js::OpCode::BrUnGe_I4:
  1610. instr->m_opcode = Js::OpCode::JAE;
  1611. goto br2_Common;
  1612. case Js::OpCode::BrUnLe_I4:
  1613. instr->m_opcode = Js::OpCode::JBE;
  1614. goto br2_Common;
  1615. case Js::OpCode::BrUnLt_I4:
  1616. instr->m_opcode = Js::OpCode::JB;
  1617. goto br2_Common;
  1618. case Js::OpCode::BrGt_I4:
  1619. instr->m_opcode = Js::OpCode::JGT;
  1620. goto br2_Common;
  1621. case Js::OpCode::BrGe_I4:
  1622. instr->m_opcode = Js::OpCode::JGE;
  1623. goto br2_Common;
  1624. case Js::OpCode::BrLe_I4:
  1625. instr->m_opcode = Js::OpCode::JLE;
  1626. goto br2_Common;
  1627. case Js::OpCode::BrLt_I4:
  1628. instr->m_opcode = Js::OpCode::JLT;
  1629. br2_Common:
  1630. src1 = instr->UnlinkSrc1();
  1631. src2 = instr->UnlinkSrc2();
  1632. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  1633. instr->InsertBefore(newInstr);
  1634. newInstr->SetSrc1(src1);
  1635. newInstr->SetSrc2(src2);
  1636. return;
  1637. default:
  1638. AssertMsg(UNREACHED, "Un-implemented int4 opcode");
  1639. }
  1640. if(legalize)
  1641. {
  1642. LowererMD::Legalize(instr);
  1643. }
  1644. else
  1645. {
  1646. // OpEq's
  1647. LowererMD::MakeDstEquSrc1(instr);
  1648. }
  1649. }
  1650. void
  1651. LowererMDArch::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  1652. {
  1653. // s2 = MOV src1
  1654. // s2 = SHL s2, Js::VarTag_Shift -- restore the var tag on the result
  1655. // JO $ToVar
  1656. // JB $ToVar [isFromUint32]
  1657. // s2 = INC s2
  1658. // dst = MOV s2
  1659. // JMP $done
  1660. //$ToVar:
  1661. // EmitLoadVarNoCheck
  1662. //$Done:
  1663. AssertMsg(instrLoad->GetSrc1()->IsRegOpnd(), "Should be regOpnd");
  1664. bool isInt = false;
  1665. bool isNotInt = false;
  1666. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  1667. IR::LabelInstr *labelToVar = nullptr;
  1668. IR::LabelInstr *labelDone = nullptr;
  1669. IR::Instr *instr;
  1670. if (src1->IsTaggedInt())
  1671. {
  1672. isInt = true;
  1673. }
  1674. else if (src1->IsNotInt())
  1675. {
  1676. isNotInt = true;
  1677. }
  1678. if (!isNotInt)
  1679. {
  1680. // s2 = MOV s1
  1681. IR::Opnd * opnd32src1 = src1->UseWithNewType(TyInt32, this->m_func);
  1682. IR::RegOpnd * opndReg2 = IR::RegOpnd::New(TyMachReg, this->m_func);
  1683. IR::Opnd * opnd32Reg2 = opndReg2->UseWithNewType(TyInt32, this->m_func);
  1684. instr = IR::Instr::New(Js::OpCode::MOV, opnd32Reg2, opnd32src1, this->m_func);
  1685. instrLoad->InsertBefore(instr);
  1686. // s2 = SHL s2, Js::VarTag_Shift -- restore the var tag on the result
  1687. instr = IR::Instr::New(Js::OpCode::SHL, opnd32Reg2, opnd32Reg2,
  1688. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func),
  1689. this->m_func);
  1690. instrLoad->InsertBefore(instr);
  1691. if (!isInt)
  1692. {
  1693. // JO $ToVar
  1694. labelToVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  1695. instr = IR::BranchInstr::New(Js::OpCode::JO, labelToVar, this->m_func);
  1696. instrLoad->InsertBefore(instr);
  1697. if (isFromUint32)
  1698. {
  1699. // JB $ToVar [isFromUint32]
  1700. instr = IR::BranchInstr::New(Js::OpCode::JB, labelToVar, this->m_func);
  1701. instrLoad->InsertBefore(instr);
  1702. }
  1703. }
  1704. // s2 = INC s2
  1705. instr = IR::Instr::New(Js::OpCode::INC, opndReg2, opndReg2, this->m_func);
  1706. instrLoad->InsertBefore(instr);
  1707. // dst = MOV s2
  1708. instr = IR::Instr::New(Js::OpCode::MOV, instrLoad->GetDst(), opndReg2, this->m_func);
  1709. instrLoad->InsertBefore(instr);
  1710. if (!isInt)
  1711. {
  1712. // JMP $done
  1713. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  1714. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  1715. instrLoad->InsertBefore(instr);
  1716. }
  1717. }
  1718. if (!isInt)
  1719. {
  1720. //$ToVar:
  1721. if (labelToVar)
  1722. {
  1723. instrLoad->InsertBefore(labelToVar);
  1724. }
  1725. this->lowererMD->EmitLoadVarNoCheck(instrLoad->GetDst()->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper || labelToVar != nullptr);
  1726. }
  1727. //$Done:
  1728. if (labelDone)
  1729. {
  1730. instrLoad->InsertAfter(labelDone);
  1731. }
  1732. instrLoad->Remove();
  1733. }
  1734. void
  1735. LowererMDArch::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  1736. {
  1737. // We should only generate this if sse2 is available
  1738. Assert(AutoSystemInfo::Data.SSE2Available());
  1739. Assert(dst->IsRegOpnd() && dst->IsFloat());
  1740. Assert(src->IsRegOpnd() && (src->GetType() == TyInt32 || src->GetType() == TyUint32));
  1741. instrInsert->InsertBefore(IR::Instr::New(dst->IsFloat64() ? Js::OpCode::CVTSI2SD : Js::OpCode::CVTSI2SS, dst, src, this->m_func));
  1742. }
  1743. void
  1744. LowererMDArch::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  1745. {
  1746. // We should only generate this if sse2 is available
  1747. Assert(AutoSystemInfo::Data.SSE2Available());
  1748. this->lowererMD->EmitIntToFloat(dst, src, instrInsert);
  1749. IR::RegOpnd * highestBitOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  1750. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, highestBitOpnd, src, this->m_func);
  1751. instrInsert->InsertBefore(instr);
  1752. instr = IR::Instr::New(Js::OpCode::SHR, highestBitOpnd, highestBitOpnd,
  1753. IR::IntConstOpnd::New(31, TyInt8, this->m_func, true), this->m_func);
  1754. instrInsert->InsertBefore(instr);
  1755. // TODO: Encode indir with base as address opnd instead
  1756. IR::RegOpnd * baseOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  1757. instr = IR::Instr::New(Js::OpCode::MOV, baseOpnd, IR::AddrOpnd::New((Js::Var)&Js::JavascriptNumber::UIntConvertConst,
  1758. IR::AddrOpndKindDynamicMisc, this->m_func), this->m_func);
  1759. instrInsert->InsertBefore(instr);
  1760. instr = IR::Instr::New(Js::OpCode::ADDSD, dst, dst, IR::IndirOpnd::New(baseOpnd,
  1761. highestBitOpnd, IndirScale8, TyFloat64, this->m_func), this->m_func);
  1762. instrInsert->InsertBefore(instr);
  1763. }
  1764. bool
  1765. LowererMDArch::EmitLoadInt32(IR::Instr *instrLoad)
  1766. {
  1767. // if(doShiftFirst)
  1768. // {
  1769. // r1 = MOV src1
  1770. // r1 = SAR r1, VarTag_Shift (move last-shifted bit into CF)
  1771. // JAE (CF == 0) $helper or $float
  1772. // }
  1773. // else
  1774. // {
  1775. // TEST src1, AtomTag
  1776. // JEQ $helper or $float
  1777. // r1 = MOV src1
  1778. // r1 = SAR r1, VarTag_Shift
  1779. // }
  1780. // dst = MOV r1
  1781. // JMP $Done
  1782. // $float:
  1783. // dst = ConvertToFloat(src1, $helper)
  1784. // $Helper
  1785. // dst = ToInt32(src1)
  1786. // $Done
  1787. AssertMsg(instrLoad->GetSrc1()->IsRegOpnd(), "Should be regOpnd");
  1788. bool isInt = false;
  1789. bool isNotInt = false;
  1790. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  1791. IR::LabelInstr *labelHelper = nullptr;
  1792. IR::LabelInstr *labelDone = nullptr;
  1793. IR::LabelInstr* labelFloat = nullptr;
  1794. IR::Instr *instr;
  1795. if (src1->IsTaggedInt())
  1796. {
  1797. isInt = true;
  1798. }
  1799. else if (src1->IsNotInt())
  1800. {
  1801. isNotInt = true;
  1802. }
  1803. const ValueType src1ValueType(src1->GetValueType());
  1804. const bool doShiftFirst = src1ValueType.IsLikelyTaggedInt(); // faster to shift and check flags if it's likely tagged
  1805. const bool doFloatToIntFastPath =
  1806. (src1ValueType.IsLikelyFloat() || src1ValueType.IsLikelyUntaggedInt()) &&
  1807. !(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger)) &&
  1808. AutoSystemInfo::Data.SSE2Available();
  1809. IR::RegOpnd * r1 = nullptr;
  1810. if(doShiftFirst)
  1811. {
  1812. // r1 = MOV src1
  1813. r1 = IR::RegOpnd::New(TyVar, instrLoad->m_func);
  1814. r1->SetValueType(src1->GetValueType());
  1815. instr = IR::Instr::New(Js::OpCode::MOV, r1, src1, instrLoad->m_func);
  1816. instrLoad->InsertBefore(instr);
  1817. }
  1818. // It could be an integer in this case
  1819. if (!isNotInt)
  1820. {
  1821. if(doShiftFirst)
  1822. {
  1823. // r1 = SAR r1, VarTag_Shift (move last-shifted bit into CF)
  1824. Assert(r1);
  1825. instr = IR::Instr::New(Js::OpCode::SAR, r1, r1,
  1826. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrLoad->m_func), instrLoad->m_func);
  1827. instrLoad->InsertBefore(instr);
  1828. }
  1829. // We do not know for sure it is an integer - add a Smint test
  1830. if (!isInt)
  1831. {
  1832. if(doFloatToIntFastPath)
  1833. {
  1834. labelFloat = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  1835. }
  1836. else
  1837. {
  1838. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  1839. }
  1840. if(doShiftFirst)
  1841. {
  1842. // JAE (CF == 0) $helper or $float
  1843. instrLoad->InsertBefore(
  1844. IR::BranchInstr::New(Js::OpCode::JAE, labelFloat ? labelFloat : labelHelper, this->m_func));
  1845. }
  1846. else
  1847. {
  1848. // TEST src1, AtomTag
  1849. // JEQ $helper or $float
  1850. this->lowererMD->GenerateSmIntTest(src1, instrLoad, labelFloat ? labelFloat : labelHelper);
  1851. }
  1852. }
  1853. if(!doShiftFirst)
  1854. {
  1855. if(src1->IsEqual(instrLoad->GetDst()))
  1856. {
  1857. // Go ahead and change src1, since it was already confirmed that we won't bail out or go to helper where src1
  1858. // may be used
  1859. r1 = src1;
  1860. }
  1861. else
  1862. {
  1863. // r1 = MOV src1
  1864. Assert(!r1);
  1865. r1 = IR::RegOpnd::New(TyVar, instrLoad->m_func);
  1866. r1->SetValueType(src1->GetValueType());
  1867. instr = IR::Instr::New(Js::OpCode::MOV, r1, src1, instrLoad->m_func);
  1868. instrLoad->InsertBefore(instr);
  1869. }
  1870. // r1 = SAR r1, VarTag_Shift
  1871. Assert(r1);
  1872. instr = IR::Instr::New(Js::OpCode::SAR, r1, r1,
  1873. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrLoad->m_func), instrLoad->m_func);
  1874. instrLoad->InsertBefore(instr);
  1875. }
  1876. // dst = MOV r1
  1877. Assert(r1);
  1878. instr = IR::Instr::New(Js::OpCode::MOV, instrLoad->GetDst(), r1, instrLoad->m_func);
  1879. instrLoad->InsertBefore(instr);
  1880. if (!isInt)
  1881. {
  1882. // JMP $Done
  1883. labelDone = instrLoad->GetOrCreateContinueLabel();
  1884. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  1885. instrLoad->InsertBefore(instr);
  1886. }
  1887. }
  1888. // if it is not an int - we need to convert.
  1889. if (!isInt)
  1890. {
  1891. if(doFloatToIntFastPath)
  1892. {
  1893. if(labelFloat)
  1894. {
  1895. instrLoad->InsertBefore(labelFloat);
  1896. }
  1897. if(!labelHelper)
  1898. {
  1899. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  1900. }
  1901. if(!labelDone)
  1902. {
  1903. labelDone = instrLoad->GetOrCreateContinueLabel();
  1904. }
  1905. this->lowererMD->GenerateFloatTest(src1, instrLoad, labelHelper, instrLoad->HasBailOutInfo());
  1906. IR::Opnd* floatOpnd = IR::IndirOpnd::New(src1, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  1907. this->lowererMD->ConvertFloatToInt32(instrLoad->GetDst(), floatOpnd, labelHelper, labelDone, instrLoad);
  1908. }
  1909. // $Helper
  1910. // dst = ToInt32(r1)
  1911. // $Done
  1912. if (labelHelper)
  1913. {
  1914. instrLoad->InsertBefore(labelHelper);
  1915. }
  1916. if(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger))
  1917. {
  1918. // Avoid bailout if we have a JavascriptNumber whose value is a signed 32-bit integer
  1919. lowererMD->m_lowerer->LoadInt32FromUntaggedVar(instrLoad);
  1920. // Need to bail out instead of calling a helper
  1921. return true;
  1922. }
  1923. this->lowererMD->m_lowerer->LowerUnaryHelperMem(instrLoad, IR::HelperConv_ToInt32);
  1924. }
  1925. else
  1926. {
  1927. instrLoad->Remove();
  1928. }
  1929. return false;
  1930. }
  1931. IR::Instr *
  1932. LowererMDArch::LoadCheckedFloat(
  1933. IR::RegOpnd *opndOrig,
  1934. IR::RegOpnd *opndFloat,
  1935. IR::LabelInstr *labelInline,
  1936. IR::LabelInstr *labelHelper,
  1937. IR::Instr *instrInsert,
  1938. const bool checkForNullInLoopBody)
  1939. {
  1940. // Load one floating-point var into an XMM register, inserting checks to see if it's really a float:
  1941. // TEST src, 1
  1942. // JNE $non-int
  1943. // t0 = MOV src // convert a tagged int to float
  1944. // t0 = SAR t0, 1
  1945. // flt = CVTSI2SD t0
  1946. // JMP $labelInline
  1947. // $non-int
  1948. // CMP [src], JavascriptNumber::`vtable'
  1949. // JNE $labelHelper
  1950. // flt = MOVSD [t0 + offset(value)]
  1951. IR::Opnd * opnd;
  1952. IR::Instr * instr;
  1953. IR::Instr * instrFirst = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  1954. instrFirst->SetSrc1(opndOrig);
  1955. instrFirst->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func));
  1956. instrInsert->InsertBefore(instrFirst);
  1957. IR::LabelInstr * labelVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  1958. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelVar, this->m_func);
  1959. instrInsert->InsertBefore(instr);
  1960. if (opndOrig->GetValueType().IsLikelyFloat())
  1961. {
  1962. // Make this path helper if value is likely a float
  1963. instrInsert->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true));
  1964. }
  1965. opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  1966. instr = IR::Instr::New(Js::OpCode::MOV, opnd, opndOrig, this->m_func);
  1967. instrInsert->InsertBefore(instr);
  1968. instr = IR::Instr::New(
  1969. Js::OpCode::SAR, opnd, opnd, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  1970. instrInsert->InsertBefore(instr);
  1971. instr = IR::Instr::New(Js::OpCode::CVTSI2SD, opndFloat, opnd, this->m_func);
  1972. instrInsert->InsertBefore(instr);
  1973. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelInline, this->m_func);
  1974. instrInsert->InsertBefore(instr);
  1975. instrInsert->InsertBefore(labelVar);
  1976. lowererMD->GenerateFloatTest(opndOrig, instrInsert, labelHelper, checkForNullInLoopBody);
  1977. opnd = IR::IndirOpnd::New(opndOrig, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  1978. instr = IR::Instr::New(Js::OpCode::MOVSD, opndFloat, opnd, this->m_func);
  1979. instrInsert->InsertBefore(instr);
  1980. return instrFirst;
  1981. }
  1982. IR::LabelInstr *
  1983. LowererMDArch::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  1984. {
  1985. IR::Instr * exitPrevInstr = exitTargetInstr->m_prev;
  1986. // On x86 we push and pop the out param area, but the start call can be moved passed the bailout instruction
  1987. // which we don't keep track of. There isn't a flow based pass after lowerer,
  1988. // So we don't know how much stack we need to pop. Instead, generate a landing area to restore the stack
  1989. // Via EBP, the prolog/epilog phase will fix up the size from EBP we need to restore to ESP before the epilog
  1990. if (bailOutInfo->startCallCount != 0)
  1991. {
  1992. if (this->bailOutStackRestoreLabel == nullptr)
  1993. {
  1994. if (exitPrevInstr->HasFallThrough())
  1995. {
  1996. // Branch around the stack reload
  1997. IR::BranchInstr * branchToExit = IR::BranchInstr::New(Js::OpCode::JMP, exitTargetInstr, this->m_func);
  1998. exitPrevInstr->InsertAfter(branchToExit);
  1999. exitPrevInstr = branchToExit;
  2000. }
  2001. this->bailOutStackRestoreLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2002. IR::RegOpnd * ebpOpnd = IR::RegOpnd::New(nullptr, RegEBP, TyMachReg, this->m_func);
  2003. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, RegESP, TyMachReg, this->m_func);
  2004. // -4 for now, fix up in prolog/epilog phase
  2005. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(ebpOpnd, (size_t)-4, TyMachReg, this->m_func);
  2006. // Lower this after register allocation, once we know the frame size.
  2007. IR::Instr *bailOutStackRestoreInstr = IR::Instr::New(Js::OpCode::BailOutStackRestore, espOpnd, indirOpnd, this->m_func);
  2008. exitPrevInstr->InsertAfter(bailOutStackRestoreInstr);
  2009. exitPrevInstr->InsertAfter(this->bailOutStackRestoreLabel);
  2010. }
  2011. // Jump to the stack restore label instead
  2012. exitTargetInstr = this->bailOutStackRestoreLabel;
  2013. }
  2014. return exitTargetInstr;
  2015. }
  2016. ///----------------------------------------------------------------------------
  2017. ///
  2018. /// LowererMDArch::GenerateFastShiftLeft
  2019. ///
  2020. ///----------------------------------------------------------------------------
  2021. bool
  2022. LowererMDArch::GenerateFastShiftLeft(IR::Instr * instrShift)
  2023. {
  2024. // Given:
  2025. //
  2026. // dst = Shl src1, src2
  2027. //
  2028. // Generate:
  2029. //
  2030. // (If not 2 Int31's, jump to $helper.)
  2031. // s1 = MOV src1
  2032. // s1 = SAR s1, Js::VarTag_Shift -- Remove the var tag from the value to be shifted
  2033. // s2 = MOV src2
  2034. // s2 = SAR s2, Js::VarTag_Shift -- extract the real shift amount from the var
  2035. // s1 = SHL s1, s2 -- do the inline shift
  2036. // s3 = MOV s1
  2037. // s3 = SHL s3, Js::VarTag_Shift -- restore the var tag on the result
  2038. // JO $ToVar
  2039. // s3 = INC s3
  2040. // dst = MOV s3
  2041. // JMP $fallthru
  2042. //$ToVar:
  2043. // PUSH scriptContext
  2044. // PUSH s1
  2045. // dst = ToVar()
  2046. // JMP $fallthru
  2047. // $helper:
  2048. // (caller generates helper call)
  2049. // $fallthru:
  2050. IR::LabelInstr * labelHelper = nullptr;
  2051. IR::LabelInstr * labelFallThru;
  2052. IR::Instr * instr;
  2053. IR::RegOpnd * opndReg1;
  2054. IR::RegOpnd * opndReg2;
  2055. IR::Opnd * opndSrc1;
  2056. IR::Opnd * opndSrc2;
  2057. opndSrc1 = instrShift->GetSrc1();
  2058. opndSrc2 = instrShift->GetSrc2();
  2059. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Shl instruction");
  2060. // Not tagged ints?
  2061. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2062. {
  2063. return true;
  2064. }
  2065. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2066. {
  2067. return true;
  2068. }
  2069. // Tagged ints?
  2070. bool isTaggedInts = false;
  2071. bool src2IsIntConst = false;
  2072. IntConstType s2Value = 0;
  2073. if (opndSrc2->IsRegOpnd())
  2074. {
  2075. if (opndSrc2->AsRegOpnd()->IsTaggedInt())
  2076. {
  2077. if (opndSrc2->AsRegOpnd()->m_sym->IsTaggableIntConst())
  2078. {
  2079. src2IsIntConst = true;
  2080. s2Value = opndSrc2->AsRegOpnd()->m_sym->GetIntConstValue();
  2081. s2Value = (s2Value & 0x1F);
  2082. }
  2083. if (opndSrc1->IsTaggedInt())
  2084. {
  2085. isTaggedInts = true;
  2086. }
  2087. }
  2088. }
  2089. else
  2090. {
  2091. AssertMsg(opndSrc2->IsAddrOpnd() && Js::TaggedInt::Is(opndSrc2->AsAddrOpnd()->m_address),
  2092. "Expect src2 of shift right to be reg or Var.");
  2093. src2IsIntConst = true;
  2094. s2Value = Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address);
  2095. s2Value = (s2Value & 0x1F);
  2096. if (opndSrc1->IsTaggedInt())
  2097. {
  2098. isTaggedInts = true;
  2099. }
  2100. }
  2101. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2102. if (!isTaggedInts)
  2103. {
  2104. // (If not 2 Int31's, jump to $helper.)
  2105. lowererMD->GenerateSmIntPairTest(instrShift, opndSrc1, opndSrc2, labelHelper);
  2106. }
  2107. // s1 = MOV src1
  2108. opndReg1 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2109. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc1, instrShift->m_func);
  2110. instrShift->InsertBefore(instr);
  2111. // s1 = SAR s1, Js::VarTag_Shift
  2112. //
  2113. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2114. //
  2115. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2116. instr = IR::Instr::New(
  2117. Js::OpCode::SAR, opnd32Reg1, opnd32Reg1,
  2118. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2119. instrShift->InsertBefore(instr);
  2120. IR::Opnd *countOpnd;
  2121. if (src2IsIntConst)
  2122. {
  2123. countOpnd = IR::IntConstOpnd::New(s2Value, TyMachReg, instrShift->m_func);
  2124. }
  2125. else
  2126. {
  2127. // s2 = MOV src2
  2128. opndReg2 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2129. // Shift count needs to be in ECX
  2130. opndReg2->SetReg(this->GetRegShiftCount());
  2131. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opndSrc2, instrShift->m_func);
  2132. instrShift->InsertBefore(instr);
  2133. // s2 = SAR s2, Js::VarTag_Shift
  2134. //
  2135. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2136. //
  2137. IR::Opnd * opnd32Reg2 = opndReg2->UseWithNewType(TyInt32, instrShift->m_func);
  2138. instr = IR::Instr::New(
  2139. Js::OpCode::SAR, opnd32Reg2, opnd32Reg2,
  2140. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2141. instrShift->InsertBefore(instr);
  2142. countOpnd = opndReg2;
  2143. }
  2144. // s1 = SHL s1, s2
  2145. //
  2146. // Ecmascript spec says we only need mask the shift amount by 0x1F. But intel uses 0x3F
  2147. // for 64 operands. Hence using 32 bits. opnd32Reg1 is already refined. reusing that.
  2148. //
  2149. instr = IR::Instr::New(Js::OpCode::SHL, opnd32Reg1, opnd32Reg1, countOpnd, instrShift->m_func);
  2150. instrShift->InsertBefore(instr);
  2151. // s3 = MOV s1
  2152. IR::RegOpnd * opndReg3 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2153. IR::Opnd * opnd32Reg3 = opndReg3->UseWithNewType(TyInt32, instrShift->m_func);
  2154. instr = IR::Instr::New(Js::OpCode::MOV, opnd32Reg3, opnd32Reg1, instrShift->m_func);
  2155. instrShift->InsertBefore(instr);
  2156. // s3 = SHL s3, Js::VarTag_Shift -- restore the var tag on the result
  2157. //
  2158. // Ecmascript spec says we only need mask the shift amount by 0x1F. But intel uses 0x3F
  2159. // for 64 operands. Hence using 32 bits. opnd32Reg1 is already refined. reusing that.
  2160. //
  2161. instr = IR::Instr::New(
  2162. Js::OpCode::SHL, opnd32Reg3, opnd32Reg3,
  2163. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2164. instrShift->InsertBefore(instr);
  2165. // JO $ToVar
  2166. IR::LabelInstr *labelToVar = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2167. instr = IR::BranchInstr::New(Js::OpCode::JO, labelToVar, instrShift->m_func);
  2168. instrShift->InsertBefore(instr);
  2169. // s3 = INC s3
  2170. instr = IR::Instr::New(Js::OpCode::INC, opndReg3, opndReg3, instrShift->m_func);
  2171. instrShift->InsertBefore(instr);
  2172. // dst = MOV s3
  2173. instr = IR::Instr::New(Js::OpCode::MOV, instrShift->GetDst(), opndReg3, instrShift->m_func);
  2174. instrShift->InsertBefore(instr);
  2175. // JMP $fallthru
  2176. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func);
  2177. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrShift->m_func);
  2178. instrShift->InsertBefore(instr);
  2179. //$ToVar:
  2180. instrShift->InsertBefore(labelToVar);
  2181. IR::JnHelperMethod helperMethod;
  2182. IR::Opnd *dst;
  2183. dst = instrShift->GetDst();
  2184. if (instrShift->dstIsTempNumber)
  2185. {
  2186. IR::Opnd *tempOpnd;
  2187. helperMethod = IR::HelperOp_Int32ToAtomInPlace;
  2188. Assert(dst->IsRegOpnd());
  2189. StackSym * tempNumberSym = lowererMD->GetLowerer()->GetTempNumberSym(dst, instrShift->dstIsTempNumberTransferred);
  2190. IR::Instr *load = lowererMD->LoadStackAddress(tempNumberSym);
  2191. instrShift->InsertBefore(load);
  2192. tempOpnd = load->GetDst();
  2193. this->LoadHelperArgument(instrShift, tempOpnd);
  2194. }
  2195. else
  2196. {
  2197. helperMethod = IR::HelperOp_Int32ToAtom;
  2198. }
  2199. // PUSH scriptContext
  2200. this->lowererMD->m_lowerer->LoadScriptContext(instrShift);
  2201. // PUSH s1
  2202. this->LoadHelperArgument(instrShift, opndReg1);
  2203. // dst = ToVar()
  2204. instr = IR::Instr::New(Js::OpCode::Call, dst,
  2205. IR::HelperCallOpnd::New(helperMethod, instrShift->m_func), instrShift->m_func);
  2206. instrShift->InsertBefore(instr);
  2207. this->LowerCall(instr, 0);
  2208. // JMP $fallthru
  2209. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrShift->m_func);
  2210. instrShift->InsertBefore(instr);
  2211. // $helper:
  2212. // (caller generates helper call)
  2213. // $fallthru:
  2214. instrShift->InsertBefore(labelHelper);
  2215. instrShift->InsertAfter(labelFallThru);
  2216. return true;
  2217. }
  2218. ///----------------------------------------------------------------------------
  2219. ///
  2220. /// LowererMDArch::GenerateFastShiftRight
  2221. ///
  2222. ///----------------------------------------------------------------------------
  2223. bool
  2224. LowererMDArch::GenerateFastShiftRight(IR::Instr * instrShift)
  2225. {
  2226. // Given:
  2227. //
  2228. // dst = Shr/Sar src1, src2
  2229. //
  2230. // Generate:
  2231. //
  2232. // s1 = MOV src1
  2233. // TEST s1, 1
  2234. // JEQ $S1ToInt
  2235. // s1 = SAR s1, VarTag_Shift -- extract the real shift amount from the var
  2236. // JMP $src2
  2237. //$S1ToInt:
  2238. // PUSH scriptContext
  2239. // PUSH s1
  2240. // s1 = ToInt32()/ToUInt32
  2241. //$src2:
  2242. // Load s2
  2243. // TEST s2, 1
  2244. // JEQ $S2ToUInt
  2245. // s2 = SAR s2, VarTag_Shift -- extract the real shift amount from the var
  2246. // JMP $Shr
  2247. //$S2ToUInt:
  2248. // PUSH scriptContext
  2249. // PUSH s2
  2250. // s2 = ToUInt32()
  2251. //$Shr:
  2252. // s1 = SHR/SAR s1, s2 -- do the inline shift
  2253. // s3 = MOV s1
  2254. //ECX = MOV s2
  2255. // s3 = SHL s3, ECX -- To tagInt
  2256. // JO $ToVar
  2257. // JS $ToVar
  2258. // s3 = INC s3
  2259. // JMP $done
  2260. //$ToVar:
  2261. // EmitLoadVarNoCheck
  2262. //$Done:
  2263. // dst = MOV s3
  2264. IR::LabelInstr * labelS1ToInt = nullptr;
  2265. IR::LabelInstr * labelSrc2 = nullptr;
  2266. IR::LabelInstr * labelS2ToUInt = nullptr;
  2267. IR::LabelInstr * labelShr = nullptr;
  2268. IR::LabelInstr * labelToVar = nullptr;
  2269. IR::LabelInstr * labelDone = nullptr;
  2270. IR::Instr * instr;
  2271. IR::RegOpnd * opndReg1;
  2272. IR::RegOpnd * opndReg2;
  2273. IR::Opnd * opndSrc1;
  2274. IR::Opnd * opndSrc2;
  2275. bool src1IsInt = false;
  2276. bool src1IsNotInt = false;
  2277. bool src2IsInt = false;
  2278. bool src2IsIntConst = false;
  2279. bool src2IsNotInt = false;
  2280. bool resultIsTaggedInt = false;
  2281. bool isUnsignedShift = (instrShift->m_opcode == Js::OpCode::ShrU_A);
  2282. opndSrc1 = instrShift->UnlinkSrc1();
  2283. opndSrc2 = instrShift->UnlinkSrc2();
  2284. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Shl instruction");
  2285. if (instrShift->HasBailOutInfo())
  2286. {
  2287. IR::Instr * bailOutInstr = this->lowererMD->m_lowerer->SplitBailOnImplicitCall(instrShift);
  2288. this->lowererMD->m_lowerer->LowerBailOnEqualOrNotEqual(bailOutInstr);
  2289. }
  2290. AssertMsg(opndSrc1->IsRegOpnd(), "We expect this to be a regOpnd");
  2291. opndReg1 = opndSrc1->AsRegOpnd();
  2292. src1IsInt = opndReg1->IsTaggedInt();
  2293. if (src1IsInt && !isUnsignedShift)
  2294. {
  2295. // -1 >>> 0 != taggedInt...
  2296. resultIsTaggedInt = true;
  2297. }
  2298. src1IsNotInt = opndReg1->IsNotInt();
  2299. // s1 = MOV src1
  2300. opndReg1 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2301. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc1, instrShift->m_func);
  2302. instrShift->InsertBefore(instr);
  2303. IR::Opnd *dst = instrShift->GetDst();
  2304. AssertMsg(dst->IsRegOpnd(), "We expect this to be a regOpnd");
  2305. IntConstType s2Value = 0;
  2306. if (opndSrc2->IsRegOpnd())
  2307. {
  2308. opndReg2 = opndSrc2->AsRegOpnd();
  2309. src2IsInt = opndReg2->IsTaggedInt();
  2310. src2IsIntConst = opndReg2->m_sym->IsTaggableIntConst();
  2311. src2IsNotInt = opndReg2->IsNotInt();
  2312. }
  2313. else
  2314. {
  2315. AssertMsg(opndSrc2->IsAddrOpnd() && Js::TaggedInt::Is(opndSrc2->AsAddrOpnd()->m_address),
  2316. "Expect src2 of shift right to be reg or Var.");
  2317. src2IsInt = src2IsIntConst = true;
  2318. opndReg2 = nullptr;
  2319. }
  2320. if (isUnsignedShift)
  2321. {
  2322. // We use the src2IsIntConst to combine the tag shifting with the actual shift.
  2323. // The tag shift however needs to be a signed shift...
  2324. src2IsIntConst = false;
  2325. if (opndSrc2->IsAddrOpnd())
  2326. {
  2327. instr = lowererMD->CreateAssign(
  2328. IR::RegOpnd::New(opndSrc2->GetType(), instrShift->m_func),
  2329. opndSrc2, instrShift);
  2330. opndSrc2 = instr->GetDst();
  2331. opndReg2 = opndSrc2->AsRegOpnd();
  2332. }
  2333. }
  2334. if (src2IsIntConst)
  2335. {
  2336. if (opndSrc2->IsRegOpnd())
  2337. {
  2338. AnalysisAssert(opndReg2);
  2339. s2Value = opndReg2->m_sym->GetIntConstValue();
  2340. }
  2341. else
  2342. {
  2343. s2Value = Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address);
  2344. }
  2345. s2Value = (s2Value & 0x1F);
  2346. if (s2Value >= Js::VarTag_Shift)
  2347. {
  2348. resultIsTaggedInt = true;
  2349. if ((unsigned)(s2Value + Js::VarTag_Shift) > 0x1f)
  2350. {
  2351. // Can't combine the SHR with the AtomTag shift if we overflow...
  2352. s2Value = 0;
  2353. src2IsIntConst = false;
  2354. }
  2355. }
  2356. }
  2357. if (!src1IsNotInt)
  2358. {
  2359. if (!src1IsInt)
  2360. {
  2361. // TEST s1, AtomTag
  2362. instr = IR::Instr::New(Js::OpCode::TEST, instrShift->m_func);
  2363. instr->SetSrc1(opndReg1);
  2364. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, instrShift->m_func));
  2365. instrShift->InsertBefore(instr);
  2366. // JEQ $S1ToInt
  2367. labelS1ToInt = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2368. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelS1ToInt, instrShift->m_func);
  2369. instrShift->InsertBefore(instr);
  2370. }
  2371. // s1 = SAR s1, VarTag_Shift -- extract the real shift amount from the var
  2372. //
  2373. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2374. //
  2375. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2376. instr = IR::Instr::New(Js::OpCode::SAR, opnd32Reg1, opnd32Reg1,
  2377. IR::IntConstOpnd::New(Js::VarTag_Shift + s2Value, TyInt8, instrShift->m_func), instrShift->m_func);
  2378. instrShift->InsertBefore(instr);
  2379. // JMP $src2
  2380. labelSrc2 = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func);
  2381. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelSrc2, instrShift->m_func);
  2382. instrShift->InsertBefore(instr);
  2383. }
  2384. if (!src1IsInt)
  2385. {
  2386. if (labelS1ToInt)
  2387. {
  2388. //$S1ToInt:
  2389. instrShift->InsertBefore(labelS1ToInt);
  2390. }
  2391. // PUSH scriptContext
  2392. this->lowererMD->m_lowerer->LoadScriptContext(instrShift);
  2393. // PUSH s1
  2394. this->LoadHelperArgument(instrShift, opndReg1);
  2395. // s1 = ToInt32()/ToUint32
  2396. instr = IR::Instr::New(Js::OpCode::Call, opndReg1,
  2397. IR::HelperCallOpnd::New((isUnsignedShift ? IR::HelperConv_ToUInt32_Full : IR::HelperConv_ToInt32_Full), instrShift->m_func),
  2398. instrShift->m_func);
  2399. instrShift->InsertBefore(instr);
  2400. this->LowerCall(instr, 0);
  2401. if (src2IsIntConst && s2Value != 0)
  2402. {
  2403. // s1 = SHR/SAR s1, s2 -- do the inline shift
  2404. //
  2405. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2406. //
  2407. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2408. instr = IR::Instr::New(isUnsignedShift ? Js::OpCode::SHR : Js::OpCode::SAR,
  2409. opnd32Reg1, opnd32Reg1, IR::IntConstOpnd::New(s2Value, TyInt8, instrShift->m_func), instrShift->m_func);
  2410. instrShift->InsertBefore(instr);
  2411. }
  2412. }
  2413. //$src2:
  2414. if (labelSrc2)
  2415. {
  2416. instrShift->InsertBefore(labelSrc2);
  2417. }
  2418. if (!src2IsIntConst)
  2419. {
  2420. // Load s2
  2421. opndReg2 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2422. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opndSrc2, instrShift->m_func);
  2423. instrShift->InsertBefore(instr);
  2424. }
  2425. if (!src2IsNotInt)
  2426. {
  2427. if (!src2IsInt)
  2428. {
  2429. // TEST s2, AtomTag
  2430. instr = IR::Instr::New(Js::OpCode::TEST, instrShift->m_func);
  2431. instr->SetSrc1(opndReg2);
  2432. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, instrShift->m_func));
  2433. instrShift->InsertBefore(instr);
  2434. // JEQ $ToUInt
  2435. labelS2ToUInt = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2436. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelS2ToUInt, instrShift->m_func);
  2437. instrShift->InsertBefore(instr);
  2438. }
  2439. if (!src2IsIntConst)
  2440. {
  2441. // s2 = SAR s2, VarTag_Shift -- extract the real shift amount from the var
  2442. //
  2443. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2444. //
  2445. IR::Opnd * opnd32Reg2 = opndReg2->UseWithNewType(TyInt32, instrShift->m_func);
  2446. instr = IR::Instr::New(Js::OpCode::SAR, opnd32Reg2, opnd32Reg2,
  2447. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2448. instrShift->InsertBefore(instr);
  2449. }
  2450. // JMP $shr
  2451. labelShr = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func);
  2452. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelShr, instrShift->m_func);
  2453. instrShift->InsertBefore(instr);
  2454. }
  2455. if (!src2IsInt)
  2456. {
  2457. if (labelS2ToUInt)
  2458. {
  2459. //$S2ToUInt:
  2460. instrShift->InsertBefore(labelS2ToUInt);
  2461. }
  2462. // PUSH scriptContext
  2463. this->lowererMD->m_lowerer->LoadScriptContext(instrShift);
  2464. // PUSH s2
  2465. this->LoadHelperArgument(instrShift, opndReg2);
  2466. // s2 = ToUInt32()
  2467. instr = IR::Instr::New(Js::OpCode::Call, opndReg2,
  2468. IR::HelperCallOpnd::New(IR::HelperConv_ToUInt32_Full, instrShift->m_func), instrShift->m_func);
  2469. instrShift->InsertBefore(instr);
  2470. this->LowerCall(instr, 0);
  2471. }
  2472. //$Shr:
  2473. if (labelShr)
  2474. {
  2475. instrShift->InsertBefore(labelShr);
  2476. }
  2477. if (!src2IsIntConst)
  2478. {
  2479. // s1 = SHR/SAR s1, s2 -- do the inline shift
  2480. //
  2481. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2482. //
  2483. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2484. IR::RegOpnd * opnd32Ecx = IR::RegOpnd::New(TyInt32, this->m_func);
  2485. opnd32Ecx->SetReg(this->GetRegShiftCount());
  2486. instr = IR::Instr::New(Js::OpCode::MOV, opnd32Ecx, opndReg2, this->m_func);
  2487. instrShift->InsertBefore(instr);
  2488. instr = IR::Instr::New(isUnsignedShift ? Js::OpCode::SHR : Js::OpCode::SAR,
  2489. opnd32Reg1, opnd32Reg1, opnd32Ecx, instrShift->m_func);
  2490. instrShift->InsertBefore(instr);
  2491. }
  2492. // s3 = MOV s1
  2493. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2494. IR::RegOpnd * opndReg3 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2495. IR::Opnd * opnd32Reg3 = opndReg3->UseWithNewType(TyInt32, instrShift->m_func);
  2496. instr = IR::Instr::New(Js::OpCode::MOV, opnd32Reg3, opnd32Reg1, instrShift->m_func);
  2497. instrShift->InsertBefore(instr);
  2498. // s3 = SHL s3, VarTag_Shift -- To tagInt
  2499. //
  2500. // Ecmascript spec says we only need mask the shift amount by 0x1F. But intel uses 0x3F
  2501. // for 64 operands. Hence using 32 bits.
  2502. //
  2503. instr = IR::Instr::New(Js::OpCode::SHL, opnd32Reg3, opnd32Reg3,
  2504. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2505. instrShift->InsertBefore(instr);
  2506. if (!resultIsTaggedInt)
  2507. {
  2508. // JO $ToVar
  2509. labelToVar = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2510. instr = IR::BranchInstr::New(Js::OpCode::JO, labelToVar, instrShift->m_func);
  2511. instrShift->InsertBefore(instr);
  2512. if (isUnsignedShift)
  2513. {
  2514. // JS $ToVar
  2515. instr = IR::BranchInstr::New(Js::OpCode::JSB, labelToVar, instrShift->m_func);
  2516. instrShift->InsertBefore(instr);
  2517. }
  2518. }
  2519. // s1 = INC s1
  2520. instr = IR::Instr::New(Js::OpCode::INC, opndReg3, opndReg3, instrShift->m_func);
  2521. instrShift->InsertBefore(instr);
  2522. if (!src1IsInt || !src2IsInt || !resultIsTaggedInt)
  2523. {
  2524. // JMP $done
  2525. labelDone = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func);
  2526. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, instrShift->m_func);
  2527. instrShift->InsertBefore(instr);
  2528. }
  2529. if (!resultIsTaggedInt)
  2530. {
  2531. //$ToVar:
  2532. instrShift->InsertBefore(labelToVar);
  2533. this->lowererMD->EmitLoadVarNoCheck(opndReg3, opndReg1, instrShift, isUnsignedShift, true);
  2534. }
  2535. if (labelDone)
  2536. {
  2537. //$Done:
  2538. instrShift->InsertBefore(labelDone);
  2539. }
  2540. // dst = MOV s3
  2541. instrShift->m_opcode = Js::OpCode::MOV;
  2542. instrShift->SetSrc1(opndReg3);
  2543. // Skip lowering call to helper
  2544. return false;
  2545. }
  2546. ///----------------------------------------------------------------------------
  2547. ///
  2548. /// LowererMDArch::GenerateFastAnd
  2549. ///
  2550. ///----------------------------------------------------------------------------
  2551. bool
  2552. LowererMDArch::GenerateFastAnd(IR::Instr * instrAnd)
  2553. {
  2554. // Given:
  2555. //
  2556. // dst = And src1, src2
  2557. //
  2558. // Generate:
  2559. //
  2560. // s1 = MOV src1
  2561. // s1 = AND s1, src2 -- try an inline add
  2562. // TEST s1, 1 -- if both opnds are ints, the int tag will be set in the result
  2563. // JEQ $helper
  2564. // dst = MOV s1
  2565. // JMP $fallthru
  2566. // $helper:
  2567. // (caller generates helper sequence)
  2568. // $fallthru:
  2569. IR::Instr * instr;
  2570. IR::LabelInstr * labelHelper=nullptr;
  2571. IR::LabelInstr * labelFallThru;
  2572. IR::Opnd * opndReg;
  2573. IR::Opnd * opndSrc1;
  2574. IR::Opnd * opndSrc2;
  2575. opndSrc1 = instrAnd->GetSrc1();
  2576. opndSrc2 = instrAnd->GetSrc2();
  2577. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on And instruction");
  2578. // Not tagged ints?
  2579. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2580. {
  2581. return true;
  2582. }
  2583. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2584. {
  2585. return true;
  2586. }
  2587. // Tagged ints?
  2588. bool isTaggedInts = false;
  2589. if (opndSrc1->IsTaggedInt())
  2590. {
  2591. if (opndSrc2->IsTaggedInt())
  2592. {
  2593. isTaggedInts = true;
  2594. }
  2595. }
  2596. // s1 = MOV src1
  2597. opndReg = IR::RegOpnd::New(TyMachReg, instrAnd->m_func);
  2598. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, instrAnd->m_func);
  2599. instrAnd->InsertBefore(instr);
  2600. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->m_sym->IsTaggableIntConst())
  2601. {
  2602. Js::Var value = Js::TaggedInt::ToVarUnchecked(opndSrc2->AsRegOpnd()->m_sym->GetIntConstValue());
  2603. opndSrc2 = IR::AddrOpnd::New(value, IR::AddrOpndKindConstantVar, instrAnd->m_func);
  2604. }
  2605. // s1 = AND s1, src2
  2606. instr = IR::Instr::New(Js::OpCode::AND, opndReg, opndReg, opndSrc2, instrAnd->m_func);
  2607. instrAnd->InsertBefore(instr);
  2608. if (!isTaggedInts)
  2609. {
  2610. // TEST s1, 1
  2611. instr = IR::Instr::New(Js::OpCode::TEST, instrAnd->m_func);
  2612. instr->SetSrc1(opndReg);
  2613. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, instrAnd->m_func));
  2614. instrAnd->InsertBefore(instr);
  2615. // JNE $helper
  2616. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrAnd->m_func, true);
  2617. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, instrAnd->m_func);
  2618. instrAnd->InsertBefore(instr);
  2619. }
  2620. // dst = MOV s1
  2621. if (isTaggedInts)
  2622. {
  2623. // Reuse the existing instruction
  2624. instrAnd->m_opcode = Js::OpCode::MOV;
  2625. instrAnd->ReplaceSrc1(opndReg);
  2626. instrAnd->FreeSrc2();
  2627. // Skip lowering call to helper
  2628. return false;
  2629. }
  2630. instr = IR::Instr::New(Js::OpCode::MOV, instrAnd->GetDst(), opndReg, instrAnd->m_func);
  2631. instrAnd->InsertBefore(instr);
  2632. // JMP $fallthru
  2633. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrAnd->m_func);
  2634. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrAnd->m_func);
  2635. instrAnd->InsertBefore(instr);
  2636. // $helper:
  2637. // (caller generates helper sequence)
  2638. // $fallthru:
  2639. AssertMsg(labelHelper, "Should not be NULL");
  2640. instrAnd->InsertBefore(labelHelper);
  2641. instrAnd->InsertAfter(labelFallThru);
  2642. return true;
  2643. }
  2644. ///----------------------------------------------------------------------------
  2645. ///
  2646. /// LowererMDArch::GenerateFastOr
  2647. ///
  2648. ///----------------------------------------------------------------------------
  2649. bool
  2650. LowererMDArch::GenerateFastOr(IR::Instr * instrOr)
  2651. {
  2652. // Given:
  2653. //
  2654. // dst = Or src1, src2
  2655. //
  2656. // Generate:
  2657. //
  2658. // (If not 2 Int31's, jump to $helper.)
  2659. //
  2660. // s1 = MOV src1
  2661. // s1 = OR s1, src2 -- try an inline OR
  2662. // dst = MOV s1
  2663. // JMP $fallthru
  2664. // $helper:
  2665. // (caller generates helper sequence)
  2666. // $fallthru:
  2667. IR::Instr * instr;
  2668. IR::LabelInstr * labelHelper=nullptr;
  2669. IR::LabelInstr * labelFallThru;
  2670. IR::Opnd * opndReg;
  2671. IR::Opnd * opndSrc1;
  2672. IR::Opnd * opndSrc2;
  2673. opndSrc1 = instrOr->GetSrc1();
  2674. opndSrc2 = instrOr->GetSrc2();
  2675. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Or instruction");
  2676. // Not tagged ints?
  2677. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2678. {
  2679. return true;
  2680. }
  2681. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2682. {
  2683. return true;
  2684. }
  2685. // Tagged ints?
  2686. bool isTaggedInts = false;
  2687. if (opndSrc1->IsTaggedInt())
  2688. {
  2689. if (opndSrc2->IsTaggedInt())
  2690. {
  2691. isTaggedInts = true;
  2692. }
  2693. }
  2694. if (!isTaggedInts)
  2695. {
  2696. // (If not 2 Int31's, jump to $helper.)
  2697. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrOr->m_func, true);
  2698. lowererMD->GenerateSmIntPairTest(instrOr, opndSrc1, opndSrc2, labelHelper);
  2699. }
  2700. // s1 = MOV src1
  2701. opndReg = IR::RegOpnd::New(TyMachReg, instrOr->m_func);
  2702. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, instrOr->m_func);
  2703. instrOr->InsertBefore(instr);
  2704. // s1 = OR s1, src2
  2705. instr = IR::Instr::New(Js::OpCode::OR, opndReg, opndReg, opndSrc2, instrOr->m_func);
  2706. instrOr->InsertBefore(instr);
  2707. // dst = MOV s1
  2708. if (isTaggedInts)
  2709. {
  2710. // Reuse the existing instruction
  2711. instrOr->m_opcode = Js::OpCode::MOV;
  2712. instrOr->ReplaceSrc1(opndReg);
  2713. instrOr->FreeSrc2();
  2714. // Skip lowering call to helper
  2715. return false;
  2716. }
  2717. instr = IR::Instr::New(Js::OpCode::MOV, instrOr->GetDst(), opndReg, instrOr->m_func);
  2718. instrOr->InsertBefore(instr);
  2719. // JMP $fallthru
  2720. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrOr->m_func);
  2721. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrOr->m_func);
  2722. instrOr->InsertBefore(instr);
  2723. // $helper:
  2724. // (caller generates helper sequence)
  2725. // $fallthru:
  2726. AssertMsg(labelHelper, "Should not be NULL");
  2727. instrOr->InsertBefore(labelHelper);
  2728. instrOr->InsertAfter(labelFallThru);
  2729. return true;
  2730. }
  2731. ///----------------------------------------------------------------------------
  2732. ///
  2733. /// LowererMD::GenerateFastXor
  2734. ///
  2735. ///----------------------------------------------------------------------------
  2736. bool
  2737. LowererMDArch::GenerateFastXor(IR::Instr * instrXor)
  2738. {
  2739. // Given:
  2740. //
  2741. // dst = Xor src1, src2
  2742. //
  2743. // Generate:
  2744. //
  2745. // (If not 2 Int31's, jump to $helper.)
  2746. //
  2747. // s1 = MOV src1
  2748. // s1 = XOR s1, src2 -- try an inline XOR
  2749. // s1 = INC s1
  2750. // dst = MOV s1
  2751. // JMP $fallthru
  2752. // $helper:
  2753. // (caller generates helper sequence)
  2754. // $fallthru:
  2755. IR::Instr * instr;
  2756. IR::LabelInstr * labelHelper=nullptr;
  2757. IR::LabelInstr * labelFallThru;
  2758. IR::Opnd * opndReg;
  2759. IR::Opnd * opndSrc1;
  2760. IR::Opnd * opndSrc2;
  2761. opndSrc1 = instrXor->GetSrc1();
  2762. opndSrc2 = instrXor->GetSrc2();
  2763. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Xor instruction");
  2764. // Not tagged ints?
  2765. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2766. {
  2767. return true;
  2768. }
  2769. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2770. {
  2771. return true;
  2772. }
  2773. // Tagged ints?
  2774. bool isTaggedInts = false;
  2775. if (opndSrc1->IsTaggedInt())
  2776. {
  2777. if (opndSrc2->IsTaggedInt())
  2778. {
  2779. isTaggedInts = true;
  2780. }
  2781. }
  2782. if (!isTaggedInts)
  2783. {
  2784. // (If not 2 Int31's, jump to $helper.)
  2785. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrXor->m_func, true);
  2786. lowererMD->GenerateSmIntPairTest(instrXor, opndSrc1, opndSrc2, labelHelper);
  2787. }
  2788. // s1 = MOV src1
  2789. opndReg = IR::RegOpnd::New(TyMachReg, instrXor->m_func);
  2790. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, instrXor->m_func);
  2791. instrXor->InsertBefore(instr);
  2792. // s1 = XOR s1, src2
  2793. instr = IR::Instr::New(Js::OpCode::XOR, opndReg, opndReg, opndSrc2, instrXor->m_func);
  2794. instrXor->InsertBefore(instr);
  2795. // s1 = INC s1
  2796. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, instrXor->m_func);
  2797. instrXor->InsertBefore(instr);
  2798. // dst = MOV s1
  2799. if (isTaggedInts)
  2800. {
  2801. // Reuse the existing instruction
  2802. instrXor->m_opcode = Js::OpCode::MOV;
  2803. instrXor->ReplaceSrc1(opndReg);
  2804. instrXor->FreeSrc2();
  2805. // Skip lowering call to helper
  2806. return false;
  2807. }
  2808. instr = IR::Instr::New(Js::OpCode::MOV, instrXor->GetDst(), opndReg, instrXor->m_func);
  2809. instrXor->InsertBefore(instr);
  2810. // JMP $fallthru
  2811. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrXor->m_func);
  2812. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrXor->m_func);
  2813. instrXor->InsertBefore(instr);
  2814. // $helper:
  2815. // (caller generates helper sequence)
  2816. // $fallthru:
  2817. AssertMsg(labelHelper, "Should not be NULL");
  2818. instrXor->InsertBefore(labelHelper);
  2819. instrXor->InsertAfter(labelFallThru);
  2820. return true;
  2821. }
  2822. //----------------------------------------------------------------------------
  2823. //
  2824. // LowererMD::GenerateFastNot
  2825. //
  2826. //----------------------------------------------------------------------------
  2827. bool
  2828. LowererMDArch::GenerateFastNot(IR::Instr * instrNot)
  2829. {
  2830. // Given:
  2831. //
  2832. // dst = Not src
  2833. //
  2834. // Generate:
  2835. //
  2836. // TEST src, 1 -- test for int src
  2837. // JEQ $helper
  2838. // dst = MOV src
  2839. // dst = NOT dst -- do an inline NOT
  2840. // dst = INC dst -- restore the var tag on the result (!1 becomes 0, INC to get 1 again)
  2841. // JMP $fallthru
  2842. // $helper:
  2843. // (caller generates helper call)
  2844. // $fallthru:
  2845. IR::Instr * instr;
  2846. IR::LabelInstr * labelHelper = nullptr;
  2847. IR::LabelInstr * labelFallThru = nullptr;
  2848. IR::Opnd * opndSrc1;
  2849. IR::Opnd * opndDst;
  2850. opndSrc1 = instrNot->GetSrc1();
  2851. AssertMsg(opndSrc1, "Expected src opnd on Not instruction");
  2852. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->IsIntConst())
  2853. {
  2854. IntConstType value = opndSrc1->AsRegOpnd()->m_sym->GetIntConstValue();
  2855. value = ~value;
  2856. instrNot->ClearBailOutInfo();
  2857. instrNot->FreeSrc1();
  2858. instrNot->SetSrc1(IR::AddrOpnd::NewFromNumber(value, instrNot->m_func));
  2859. instrNot = this->lowererMD->ChangeToAssign(instrNot);
  2860. // Skip lowering call to helper
  2861. return false;
  2862. }
  2863. bool isInt = (opndSrc1->IsTaggedInt());
  2864. if (!isInt)
  2865. {
  2866. // TEST src1, AtomTag
  2867. instr = IR::Instr::New(Js::OpCode::TEST, instrNot->m_func);
  2868. instr->SetSrc1(opndSrc1);
  2869. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, instrNot->m_func));
  2870. instrNot->InsertBefore(instr);
  2871. // JEQ $helper
  2872. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrNot->m_func, true);
  2873. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, instrNot->m_func);
  2874. instrNot->InsertBefore(instr);
  2875. }
  2876. // dst = MOV src
  2877. opndDst = instrNot->GetDst();
  2878. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndSrc1, instrNot->m_func);
  2879. instrNot->InsertBefore(instr);
  2880. // dst = NOT dst
  2881. instr = IR::Instr::New(Js::OpCode::NOT, opndDst, opndDst, instrNot->m_func);
  2882. instrNot->InsertBefore(instr);
  2883. // dst = INC dst
  2884. instr = IR::Instr::New(Js::OpCode::INC, opndDst, opndDst, instrNot->m_func);
  2885. instrNot->InsertBefore(instr);
  2886. if (isInt)
  2887. {
  2888. instrNot->Remove();
  2889. // Skip lowering call to helper
  2890. return false;
  2891. }
  2892. // JMP $fallthru
  2893. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrNot->m_func);
  2894. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrNot->m_func);
  2895. instrNot->InsertBefore(instr);
  2896. // $helper:
  2897. // (caller generates helper sequence)
  2898. // $fallthru:
  2899. AssertMsg(labelHelper, "Should not be NULL");
  2900. instrNot->InsertBefore(labelHelper);
  2901. instrNot->InsertAfter(labelFallThru);
  2902. return true;
  2903. }
  2904. void
  2905. LowererMDArch::FinalLower()
  2906. {
  2907. int32 offset;
  2908. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, this->m_func->m_tailInstr, this->m_func->m_headInstr)
  2909. {
  2910. switch (instr->m_opcode)
  2911. {
  2912. case Js::OpCode::Leave:
  2913. Assert(this->m_func->DoOptimizeTryCatch() && !this->m_func->IsLoopBodyInTry());
  2914. this->lowererMD->LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), true /*fromFinalLower*/);
  2915. break;
  2916. case Js::OpCode::BailOutStackRestore:
  2917. // We don't know the frameSize at lower time...
  2918. instr->m_opcode = Js::OpCode::LEA;
  2919. // exclude the EBP and return address
  2920. instr->GetSrc1()->AsIndirOpnd()->SetOffset(-(int)(this->m_func->frameSize) + 2 * MachPtr);
  2921. break;
  2922. case Js::OpCode::RestoreOutParam:
  2923. Assert(instr->GetDst() != nullptr);
  2924. Assert(instr->GetDst()->IsIndirOpnd());
  2925. offset = instr->GetDst()->AsIndirOpnd()->GetOffset();
  2926. offset -= this->m_func->frameSize;
  2927. offset += 2 * sizeof(void*);
  2928. instr->GetDst()->AsIndirOpnd()->SetOffset(offset, true);
  2929. instr->m_opcode = Js::OpCode::MOV;
  2930. break;
  2931. case Js::OpCode::CMOVA:
  2932. case Js::OpCode::CMOVAE:
  2933. case Js::OpCode::CMOVB:
  2934. case Js::OpCode::CMOVBE:
  2935. case Js::OpCode::CMOVE:
  2936. case Js::OpCode::CMOVG:
  2937. case Js::OpCode::CMOVGE:
  2938. case Js::OpCode::CMOVL:
  2939. case Js::OpCode::CMOVLE:
  2940. case Js::OpCode::CMOVNE:
  2941. case Js::OpCode::CMOVNO:
  2942. case Js::OpCode::CMOVNP:
  2943. case Js::OpCode::CMOVNS:
  2944. case Js::OpCode::CMOVO:
  2945. case Js::OpCode::CMOVP:
  2946. case Js::OpCode::CMOVS:
  2947. // Get rid of fake src1.
  2948. if (instr->GetSrc2())
  2949. {
  2950. // CMOV inserted before regalloc have a dummy src1 to simulate the fact that
  2951. // CMOV is not an definite def of the dst.
  2952. instr->SwapOpnds();
  2953. instr->FreeSrc2();
  2954. }
  2955. break;
  2956. }
  2957. }
  2958. NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  2959. }
  2960. //This is dependent on calling convention and harder to do common thing here.
  2961. IR::Opnd*
  2962. LowererMDArch::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  2963. {
  2964. // x86:
  2965. // s25.i32 = LdLen_A s4.var
  2966. // s26.i32 = Ld_A s25.i32
  2967. // s25.i32 = Or_I4 s25.i32, 1 // For alignment
  2968. // $L2:
  2969. // s10.var = LdElemI_A [s4.var+s25.i32].var
  2970. // ArgOut_A_Dynamic s10.var
  2971. // s25.i32 = SUB_I4 s25.i32, 0x1
  2972. // JNE $L2
  2973. // $L3
  2974. GenerateFunctionObjectTest(callInstr, callInstr->GetSrc1()->AsRegOpnd(), false);
  2975. if (callInstr->m_func->IsInlinee())
  2976. {
  2977. return this->lowererMD->m_lowerer->GenerateArgOutForInlineeStackArgs(callInstr, stackArgsInstr);
  2978. }
  2979. Assert(stackArgsInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  2980. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamic);
  2981. Func *func = callInstr->m_func;
  2982. IR::RegOpnd* stackArgs = stackArgsInstr->GetSrc1()->AsRegOpnd();
  2983. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, func);
  2984. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd, stackArgs, func);
  2985. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); // LdLen_A works only on stack arguments
  2986. callInstr->InsertBefore(ldLen);
  2987. this->lowererMD->m_lowerer->GenerateFastRealStackArgumentsLdLen(ldLen);
  2988. IR::Instr* saveLenInstr = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyUint32, func), ldLenDstOpnd, func);
  2989. saveLenInstr->GetDst()->SetValueType(ValueType::GetTaggedInt());
  2990. callInstr->InsertBefore(saveLenInstr);
  2991. // Align frame
  2992. IR::Instr* orInstr = IR::Instr::New(Js::OpCode::OR, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func), this->m_func);
  2993. callInstr->InsertBefore(orInstr);
  2994. IR::LabelInstr* startLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
  2995. startLoop->m_isLoopTop = true;
  2996. Loop *loop = JitAnew(this->m_func->m_alloc, Loop, this->m_func->m_alloc, this->m_func);
  2997. startLoop->SetLoop(loop);
  2998. loop->SetLoopTopInstr(startLoop);
  2999. loop->regAlloc.liveOnBackEdgeSyms = JitAnew(func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  3000. callInstr->InsertBefore(startLoop);
  3001. IR::IndirOpnd *nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
  3002. nthArgument->SetOffset(-1);
  3003. IR::RegOpnd* ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
  3004. IR::Instr* ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  3005. callInstr->InsertBefore(ldElem);
  3006. this->lowererMD->m_lowerer->GenerateFastStackArgumentsLdElemI(ldElem);
  3007. IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  3008. argout->SetSrc1(ldElemDstOpnd);
  3009. callInstr->InsertBefore(argout);
  3010. this->LoadDynamicArgument(argout);
  3011. IR::Instr *subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt8, func),func);
  3012. callInstr->InsertBefore(subInstr);
  3013. this->lowererMD->EmitInt4Instr(subInstr);
  3014. IR::BranchInstr *tailBranch = IR::BranchInstr::New(Js::OpCode::JNE, startLoop, func);
  3015. callInstr->InsertBefore(tailBranch);
  3016. loop->regAlloc.liveOnBackEdgeSyms->Set(ldLenDstOpnd->m_sym->m_id);
  3017. // return the length which will be used for callInfo generations & stack allocation
  3018. return saveLenInstr->GetDst()->AsRegOpnd();
  3019. }
  3020. IR::Instr *
  3021. LowererMDArch::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  3022. {
  3023. IR::RegOpnd *retReg = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  3024. // Load the continuation address into the return register.
  3025. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, retReg, targetOpnd, this->m_func));
  3026. IR::IntConstOpnd *intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  3027. IR::Instr * retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  3028. retInstr->SetSrc1(intSrc);
  3029. retInstr->SetSrc2(retReg);
  3030. insertBeforeInstr->InsertBefore(retInstr);
  3031. // return the last instruction inserted
  3032. return retInstr;
  3033. }