LowererMDArch.cpp 144 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "LowererMDArch.h"
  7. #include "Library/JavascriptGeneratorFunction.h"
  8. const Js::OpCode LowererMD::MDExtend32Opcode = Js::OpCode::MOV;
  9. BYTE
  10. LowererMDArch::GetDefaultIndirScale()
  11. {
  12. return IndirScale4;
  13. }
  14. RegNum
  15. LowererMDArch::GetRegShiftCount()
  16. {
  17. return RegECX;
  18. }
  19. RegNum
  20. LowererMDArch::GetRegReturn(IRType type)
  21. {
  22. return ( IRType_IsFloat(type) || IRType_IsSimd128(type) || IRType_IsInt64(type) ) ? RegNOREG : RegEAX;
  23. }
  24. RegNum
  25. LowererMDArch::GetRegReturnAsmJs(IRType type)
  26. {
  27. if (IRType_IsFloat(type) || IRType_IsSimd128(type))
  28. {
  29. return RegXMM0;
  30. }
  31. else
  32. {
  33. Assert(type == TyInt32 || type == TyInt64);
  34. return RegEAX;
  35. }
  36. }
  37. RegNum
  38. LowererMDArch::GetRegStackPointer()
  39. {
  40. return RegESP;
  41. }
  42. RegNum
  43. LowererMDArch::GetRegBlockPointer()
  44. {
  45. return RegEBP;
  46. }
  47. RegNum
  48. LowererMDArch::GetRegFramePointer()
  49. {
  50. return RegEBP;
  51. }
  52. RegNum
  53. LowererMDArch::GetRegChkStkParam()
  54. {
  55. return RegEAX;
  56. }
  57. RegNum
  58. LowererMDArch::GetRegIMulDestLower()
  59. {
  60. return RegEAX;
  61. }
  62. RegNum
  63. LowererMDArch::GetRegIMulHighDestLower()
  64. {
  65. return RegEDX;
  66. }
  67. RegNum
  68. LowererMDArch::GetRegArgI4(int32 argNum)
  69. {
  70. return RegNOREG;
  71. }
  72. RegNum
  73. LowererMDArch::GetRegArgR8(int32 argNum)
  74. {
  75. return RegNOREG;
  76. }
  77. Js::OpCode
  78. LowererMDArch::GetAssignOp(IRType type)
  79. {
  80. switch (type)
  81. {
  82. case TyFloat64:
  83. return Js::OpCode::MOVSD;
  84. case TyFloat32:
  85. return Js::OpCode::MOVSS;
  86. case TySimd128F4:
  87. case TySimd128I4:
  88. case TySimd128I8:
  89. case TySimd128I16:
  90. case TySimd128U4:
  91. case TySimd128U8:
  92. case TySimd128U16:
  93. case TySimd128B4:
  94. case TySimd128B8:
  95. case TySimd128B16:
  96. case TySimd128D2:
  97. return Js::OpCode::MOVUPS;
  98. default:
  99. return Js::OpCode::MOV;
  100. }
  101. }
  102. void
  103. LowererMDArch::Init(LowererMD *lowererMD)
  104. {
  105. this->lowererMD = lowererMD;
  106. this->helperCallArgsCount = 0;
  107. }
  108. ///----------------------------------------------------------------------------
  109. ///
  110. /// LowererMD::LoadInputParamPtr
  111. ///
  112. /// Load the address of the start of the passed-in parameters not including
  113. /// the this parameter.
  114. ///
  115. ///----------------------------------------------------------------------------
  116. IR::Instr *
  117. LowererMDArch::LoadInputParamPtr(IR::Instr *instrInsert, IR::RegOpnd *optionalDstOpnd /* = nullptr */)
  118. {
  119. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  120. {
  121. IR::RegOpnd * argPtrRegOpnd = Lowerer::LoadGeneratorArgsPtr(instrInsert);
  122. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(argPtrRegOpnd, 1 * MachPtr, TyMachPtr, this->m_func);
  123. IR::RegOpnd * dstOpnd = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachPtr, this->m_func);
  124. return Lowerer::InsertLea(dstOpnd, indirOpnd, instrInsert);
  125. }
  126. else
  127. {
  128. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  129. StackSym *paramSym = StackSym::New(TyVar, this->m_func);
  130. this->m_func->SetArgOffset(paramSym, 5 * MachPtr);
  131. IR::Instr *instr = this->lowererMD->LoadStackAddress(paramSym, optionalDstOpnd);
  132. instrInsert->InsertBefore(instr);
  133. return instr;
  134. }
  135. }
  136. IR::Instr *
  137. LowererMDArch::LoadStackArgPtr(IR::Instr * instrArgPtr)
  138. {
  139. // if (actual count >= formal count)
  140. // dst = ebp + 5 * sizeof(Var) -- point to the first input parameter after "this"
  141. // else
  142. // sub esp, (size of formals) -- we'll copy the input params to the callee frame, since the caller frame
  143. // doesn't have space for them all
  144. // dst = esp + 3 * sizeof(var) -- point to the location of the first input param (after "this")
  145. // within the area we just allocated on the callee frame
  146. IR::Instr * instrPrev = instrArgPtr;
  147. IR::LabelInstr * instrLabelExtra = nullptr;
  148. IR::Instr * instr;
  149. IR::Opnd * opnd;
  150. Js::ArgSlot formalsCount = this->m_func->GetInParamsCount();
  151. // Only need to check the number of actuals if there's at least 1 formal (plus "this")
  152. if (formalsCount > 1)
  153. {
  154. instrPrev = this->lowererMD->LoadInputParamCount(instrArgPtr);
  155. IR::Opnd * opndActuals = instrPrev->GetDst();
  156. IR::Opnd * opndFormals =
  157. IR::IntConstOpnd::New(formalsCount, TyMachReg, this->m_func);
  158. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  159. instr->SetSrc1(opndActuals);
  160. instr->SetSrc2(opndFormals);
  161. instrArgPtr->InsertBefore(instr);
  162. instrLabelExtra = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  163. instr = IR::BranchInstr::New(Js::OpCode::JB, instrLabelExtra, this->m_func);
  164. instrArgPtr->InsertBefore(instr);
  165. }
  166. // Modify the original instruction to load the addr of the input parameters on the caller's frame.
  167. instr = LoadInputParamPtr(instrArgPtr, instrArgPtr->UnlinkDst()->AsRegOpnd());
  168. instrArgPtr->Remove();
  169. instrArgPtr = instr;
  170. if (instrLabelExtra)
  171. {
  172. IR::LabelInstr *instrLabelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  173. instr = IR::BranchInstr::New(Js::OpCode::JMP, instrLabelDone, this->m_func);
  174. instrArgPtr->InsertAfter(instr);
  175. instr->InsertAfter(instrLabelExtra);
  176. instrLabelExtra->InsertAfter(instrLabelDone);
  177. // Allocate space on the callee's frame for a copy of the formals, plus the callee object pointer
  178. // and the callinfo.
  179. // Be sure to double-align the allocation.
  180. // REVIEW: Do we ever need to generate a chkstk call here?
  181. int formalsBytes = (formalsCount + 2) * sizeof(Js::Var);
  182. formalsBytes = Math::Align<size_t>(formalsBytes, MachStackAlignment);
  183. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  184. opnd = IR::IndirOpnd::New(espOpnd, -formalsBytes, TyMachReg, this->m_func);
  185. instr = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  186. instrLabelDone->InsertBefore(instr);
  187. // Result is the pointer to the address where we'll store the first input param
  188. // (after "this") in the callee's frame.
  189. opnd = IR::IndirOpnd::New(espOpnd, 3 * sizeof(Js::Var), TyMachReg, this->m_func);
  190. instr = IR::Instr::New(Js::OpCode::LEA, instrArgPtr->GetDst(), opnd, this->m_func);
  191. instrLabelDone->InsertBefore(instr);
  192. }
  193. return instrPrev;
  194. }
  195. ///----------------------------------------------------------------------------
  196. ///
  197. /// LowererMDArch::LoadHeapArguments
  198. ///
  199. /// Load the heap-based arguments object
  200. ///
  201. ///----------------------------------------------------------------------------
  202. IR::Instr *
  203. LowererMDArch::LoadHeapArguments(IR::Instr *instrArgs)
  204. {
  205. ASSERT_INLINEE_FUNC(instrArgs);
  206. Func *func = instrArgs->m_func;
  207. IR::Instr * instrPrev = instrArgs->m_prev;
  208. if (func->IsStackArgsEnabled()) //both inlinee & inliner has stack args. We don't support other scenarios.
  209. {
  210. // The initial args slot value is zero. (TODO: it should be possible to dead-store the LdHeapArgs in this case.)
  211. instrArgs->m_opcode = Js::OpCode::MOV;
  212. instrArgs->ReplaceSrc1(IR::IntConstOpnd::New(0, TyMachReg, func));
  213. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  214. {
  215. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  216. Output::Flush();
  217. }
  218. }
  219. else
  220. {
  221. // s7 = formals are let decls
  222. // s6 = memory context
  223. // s5 = array of property ID's
  224. // s4 = local frame instance
  225. // s3 = address of first actual argument (after "this")
  226. // s2 = actual argument count
  227. // s1 = current function
  228. // dst = JavascriptOperators::LoadHeapArguments(s1, s2, s3, s4, s5, s6, s7)
  229. // s7 = formals are let decls
  230. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(instrArgs->m_opcode == Js::OpCode::LdLetHeapArguments ? TRUE : FALSE, TyUint8, func));
  231. // s6 = memory context
  232. instrPrev = this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  233. // s5 = array of property ID's
  234. intptr_t formalsPropIdArray = instrArgs->m_func->GetJITFunctionBody()->GetFormalsPropIdArrayAddr();
  235. if (!formalsPropIdArray)
  236. {
  237. formalsPropIdArray = instrArgs->m_func->GetScriptContextInfo()->GetNullAddr();
  238. }
  239. IR::Opnd * argArray = IR::AddrOpnd::New(formalsPropIdArray, IR::AddrOpndKindDynamicMisc, m_func);
  240. this->LoadHelperArgument(instrArgs, argArray);
  241. // s4 = local frame instance
  242. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  243. this->LoadHelperArgument(instrArgs, frameObj);
  244. if (func->IsInlinee())
  245. {
  246. /*
  247. * s3 = address of first actual argument (after "this").
  248. * Stack looks like arg 1 ('this') <-- low address
  249. * ...
  250. * arg N
  251. * arguments object
  252. * function object
  253. * argc <-- frameStartSym
  254. */
  255. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  256. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  257. IR::Instr *instr = this->lowererMD->LoadStackAddress(firstRealArgSlotSym);
  258. instrArgs->InsertBefore(instr);
  259. this->LoadHelperArgument(instrArgs, instr->GetDst());
  260. // s2 = actual argument count (without counting "this").
  261. instr = IR::Instr::New(Js::OpCode::MOV,
  262. IR::RegOpnd::New(TyMachReg, func),
  263. IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func),
  264. func);
  265. instrArgs->InsertBefore(instr);
  266. this->LoadHelperArgument(instrArgs, instr->GetDst());
  267. // s1 = current function.
  268. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  269. // Save the newly-created args object to its dedicated stack slot.
  270. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  271. instr = IR::Instr::New(Js::OpCode::MOV,
  272. argObjSlotOpnd,
  273. instrArgs->GetDst(),
  274. func);
  275. instrArgs->InsertAfter(instr);
  276. }
  277. else
  278. {
  279. // s3 = address of first actual argument (after "this")
  280. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  281. this->LoadHelperArgument(instrArgs, instr->GetDst());
  282. // s2 = actual argument count (without counting "this")
  283. instr = this->lowererMD->LoadInputParamCount(instrArgs, -1);
  284. IR::Opnd* opndInputParamCount = instr->GetDst();
  285. this->LoadHelperArgument(instrArgs, opndInputParamCount);
  286. // s1 = current function
  287. StackSym *paramSym = StackSym::New(TyMachReg, func);
  288. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  289. IR::Opnd *srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  290. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  291. {
  292. // the function object for generator calls is a GeneratorVirtualScriptFunction object
  293. // and we need to pass the real JavascriptGeneratorFunction object so grab it instead
  294. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
  295. LowererMD::CreateAssign(tmpOpnd, srcOpnd, instrArgs);
  296. srcOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
  297. }
  298. this->LoadHelperArgument(instrArgs, srcOpnd);
  299. // Save the newly-created args object to its dedicated stack slot.
  300. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  301. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  302. instrArgs->InsertAfter(instr);
  303. }
  304. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArguments);
  305. }
  306. return instrPrev;
  307. }
  308. ///----------------------------------------------------------------------------
  309. ///
  310. /// LowererMDArch::LoadHeapArgsCached
  311. ///
  312. /// Load the heap-based arguments object using a cached scope
  313. ///
  314. ///----------------------------------------------------------------------------
  315. IR::Instr *
  316. LowererMDArch::LoadHeapArgsCached(IR::Instr *instrArgs)
  317. {
  318. ASSERT_INLINEE_FUNC(instrArgs);
  319. Func *func = instrArgs->m_func;
  320. IR::Instr *instrPrev = instrArgs->m_prev;
  321. if (instrArgs->m_func->IsStackArgsEnabled())
  322. {
  323. // The initial args slot value is zero. (TODO: it should be possible to dead-store the LdHeapArgs in this case.)
  324. instrArgs->m_opcode = Js::OpCode::MOV;
  325. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  326. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  327. {
  328. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  329. Output::Flush();
  330. }
  331. }
  332. else
  333. {
  334. // s7 = formals are let decls
  335. // s6 = memory context
  336. // s5 = local frame instance
  337. // s4 = address of first actual argument (after "this")
  338. // s3 = formal argument count
  339. // s2 = actual argument count
  340. // s1 = current function
  341. // dst = JavascriptOperators::LoadArguments(s1, s2, s3, s4, s5, s6, s7)
  342. // s7 = formals are let decls
  343. IR::Opnd * formalsAreLetDecls = IR::IntConstOpnd::New((IntConstType)(instrArgs->m_opcode == Js::OpCode::LdLetHeapArgsCached), TyUint8, func);
  344. this->LoadHelperArgument(instrArgs, formalsAreLetDecls);
  345. // s6 = memory context
  346. this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  347. // s5 = local frame instance
  348. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  349. this->LoadHelperArgument(instrArgs, frameObj);
  350. if (func->IsInlinee())
  351. {
  352. // s4 = address of first actual argument (after "this")
  353. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  354. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  355. IR::Instr *instr = this->lowererMD->LoadStackAddress(firstRealArgSlotSym);
  356. instrArgs->InsertBefore(instr);
  357. this->LoadHelperArgument(instrArgs, instr->GetDst());
  358. // s3 = formal argument count (without counting "this")
  359. uint32 formalsCount = func->GetJITFunctionBody()->GetInParamsCount() - 1;
  360. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyMachReg, func));
  361. // s2 = actual argument count (without counting "this").
  362. instr = IR::Instr::New(Js::OpCode::MOV,
  363. IR::RegOpnd::New(TyMachReg, func),
  364. IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func),
  365. func);
  366. instrArgs->InsertBefore(instr);
  367. this->LoadHelperArgument(instrArgs, instr->GetDst());
  368. // s1 = current function.
  369. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  370. // Save the newly-created args object to its dedicated stack slot.
  371. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  372. instr = IR::Instr::New(Js::OpCode::MOV,
  373. argObjSlotOpnd,
  374. instrArgs->GetDst(),
  375. func);
  376. instrArgs->InsertAfter(instr);
  377. }
  378. else
  379. {
  380. // s4 = address of first actual argument (after "this")
  381. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  382. this->LoadHelperArgument(instrArgs, instr->GetDst());
  383. // s3 = formal argument count (without counting "this")
  384. uint32 formalsCount = func->GetInParamsCount() - 1;
  385. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyMachReg, func));
  386. // s2 = actual argument count (without counting "this")
  387. instr = this->lowererMD->LoadInputParamCount(instrArgs);
  388. instr = IR::Instr::New(Js::OpCode::DEC, instr->GetDst(), instr->GetDst(), func);
  389. instrArgs->InsertBefore(instr);
  390. this->LoadHelperArgument(instrArgs, instr->GetDst());
  391. // s1 = current function
  392. StackSym *paramSym = StackSym::New(TyMachReg, func);
  393. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  394. IR::Opnd *srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  395. this->LoadHelperArgument(instrArgs, srcOpnd);
  396. // Save the newly-created args object to its dedicated stack slot.
  397. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  398. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  399. instrArgs->InsertAfter(instr);
  400. }
  401. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArgsCached);
  402. }
  403. return instrPrev;
  404. }
  405. //
  406. // Load the parameter in the first argument slot
  407. //
  408. IR::Instr *
  409. LowererMDArch::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * dst, ushort extraArgs)
  410. {
  411. // No need to do anything different for spread calls on x86 since we push args.
  412. IR::SymOpnd * argOpnd = IR::SymOpnd::New(this->m_func->m_symTable->GetArgSlotSym(1), TyVar, this->m_func);
  413. IR::Instr * argInstr = LowererMD::CreateAssign(argOpnd, dst, instr);
  414. return argInstr;
  415. }
  416. void
  417. LowererMDArch::GenerateFunctionObjectTest(IR::Instr * callInstr, IR::RegOpnd *functionObjOpnd, bool isHelper, IR::LabelInstr* continueAfterExLabel /* = nullptr */)
  418. {
  419. AssertMsg(!m_func->IsJitInDebugMode() || continueAfterExLabel, "When jit is in debug mode, continueAfterExLabel must be provided otherwise continue after exception may cause AV.");
  420. if (!functionObjOpnd->IsNotTaggedValue())
  421. {
  422. IR::Instr * insertBeforeInstr = callInstr;
  423. // Need check and error if we are calling a tagged int.
  424. if (!functionObjOpnd->IsTaggedInt())
  425. {
  426. // TEST s1, 1
  427. // JEQ $callLabel
  428. IR::LabelInstr * callLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func /*, isHelper*/);
  429. this->lowererMD->GenerateObjectTest(functionObjOpnd, callInstr, callLabel, true);
  430. #if DBG
  431. int count = 0;
  432. FOREACH_SLIST_ENTRY(IR::BranchInstr *, branchInstr, &callLabel->labelRefs)
  433. {
  434. branchInstr->m_isHelperToNonHelperBranch = true;
  435. count++;
  436. } NEXT_SLIST_ENTRY;
  437. Assert(count == 1);
  438. #endif
  439. callInstr->InsertBefore(callLabel);
  440. insertBeforeInstr = callLabel;
  441. }
  442. lowererMD->m_lowerer->GenerateRuntimeError(insertBeforeInstr, JSERR_NeedFunction);
  443. if (continueAfterExLabel)
  444. {
  445. // Under debugger the RuntimeError (exception) can be ignored, generate branch right after RunTimeError instr
  446. // to jmp to a safe place (which would normally be debugger bailout check).
  447. IR::BranchInstr* continueAfterEx = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueAfterExLabel, this->m_func);
  448. insertBeforeInstr->InsertBefore(continueAfterEx);
  449. }
  450. }
  451. }
  452. void
  453. LowererMDArch::LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  454. {
  455. Func *const func = callInstr->m_func;
  456. // Align frame
  457. IR::Instr *orInstr = IR::Instr::New(Js::OpCode::OR, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func), this->m_func);
  458. callInstr->InsertBefore(orInstr);
  459. IR::LabelInstr *startLoopLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  460. startLoopLabel->m_isLoopTop = true;
  461. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
  462. startLoopLabel->SetLoop(loop);
  463. loop->SetLoopTopInstr(startLoopLabel);
  464. loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  465. loop->regAlloc.liveOnBackEdgeSyms->Set(indexOpnd->m_sym->m_id);
  466. loop->regAlloc.liveOnBackEdgeSyms->Set(arrayElementsStartOpnd->m_sym->m_id);
  467. callInstr->InsertBefore(startLoopLabel);
  468. this->lowererMD->m_lowerer->InsertSub(false, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt8, func), callInstr);
  469. IR::IndirOpnd *elemPtrOpnd = IR::IndirOpnd::New(arrayElementsStartOpnd, indexOpnd, GetDefaultIndirScale(), TyMachPtr, func);
  470. // Generate argout for n+2 arg (skipping function object + this)
  471. IR::Instr *argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  472. argout->SetSrc1(elemPtrOpnd);
  473. callInstr->InsertBefore(argout);
  474. this->lowererMD->LoadDynamicArgument(argout);
  475. this->lowererMD->m_lowerer->InsertCompareBranch(indexOpnd,
  476. IR::IntConstOpnd::New(0, TyUint8, func),
  477. Js::OpCode::BrNeq_A,
  478. true,
  479. startLoopLabel,
  480. callInstr);
  481. }
  482. IR::Instr *
  483. LowererMDArch::LowerCallIDynamic(IR::Instr * callInstr, IR::Instr*saveThisArgOutInstr, IR::Opnd *argsLength, ushort callFlags, IR::Instr * insertBeforeInstrForCFG)
  484. {
  485. callInstr->InsertBefore(saveThisArgOutInstr); //Move this Argout next to call;
  486. this->LoadDynamicArgument(saveThisArgOutInstr);
  487. Func *func = callInstr->m_func;
  488. bool bIsInlinee = func->IsInlinee();
  489. if (bIsInlinee)
  490. {
  491. Assert(argsLength->AsIntConstOpnd()->GetValue() == callInstr->m_func->actualCount);
  492. }
  493. else
  494. {
  495. Assert(argsLength->IsRegOpnd());
  496. /*callInfo*/
  497. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, argsLength, argsLength, IR::IntConstOpnd::New(1, TyUint32, this->m_func), this->m_func));
  498. }
  499. IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, this->m_func);
  500. argout->SetSrc1(argsLength);
  501. callInstr->InsertBefore(argout);
  502. this->LoadDynamicArgument(argout);
  503. // load native entry point from script function into eax
  504. AssertMsg(callInstr->GetSrc1()->IsRegOpnd() && callInstr->GetSrc1()->AsRegOpnd()->m_sym->IsStackSym(),
  505. "Expected call src to be stackSym");
  506. IR::RegOpnd * functionWrapOpnd = callInstr->UnlinkSrc1()->AsRegOpnd();
  507. GeneratePreCall(callInstr, functionWrapOpnd);
  508. LowerCall(callInstr, 0);
  509. //Restore stack back to original state.
  510. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, RegESP, TyMachReg, this->m_func);
  511. if (bIsInlinee)
  512. {
  513. // +2 for callInfo & function object;
  514. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(espOpnd, (callInstr->m_func->actualCount + (callInstr->m_func->actualCount&1) + 2) * MachPtr, TyMachReg, this->m_func);
  515. callInstr->InsertAfter(IR::Instr::New(Js::OpCode::LEA, espOpnd, indirOpnd, this->m_func));
  516. }
  517. else
  518. {
  519. IR::RegOpnd *argsLengthRegOpnd = argsLength->AsRegOpnd();
  520. //Account for callInfo & function object in argsLength
  521. IR::Instr * addInstr = IR::Instr::New(Js::OpCode::ADD, argsLengthRegOpnd, argsLengthRegOpnd, IR::IntConstOpnd::New(2, TyUint32, this->m_func), this->m_func);
  522. callInstr->InsertBefore(addInstr);
  523. IR::Instr *insertInstr = callInstr->m_next;
  524. // Align stack
  525. //
  526. // INC argLengthReg
  527. IR::Instr * incInstr = IR::Instr::New(Js::OpCode::INC, argsLengthRegOpnd, argsLengthRegOpnd, this->m_func);
  528. insertInstr->InsertBefore(incInstr);
  529. // AND argLengthReg, (~1)
  530. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, argsLengthRegOpnd, argsLengthRegOpnd, IR::IntConstOpnd::New(~1, TyInt32, this->m_func, true), this->m_func);
  531. insertInstr->InsertBefore(andInstr);
  532. // LEA ESP, [ESP + argsLengthReg*4]
  533. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(espOpnd, argsLengthRegOpnd, IndirScale4, TyMachReg, this->m_func);
  534. addInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, indirOpnd, this->m_func);
  535. insertInstr->InsertBefore(addInstr);
  536. }
  537. return argout;
  538. }
  539. void
  540. LowererMDArch::GeneratePreCall(IR::Instr * callInstr, IR::Opnd *functionObjOpnd)
  541. {
  542. IR::RegOpnd* functionTypeRegOpnd = nullptr;
  543. // For calls to fixed functions we load the function's type directly from the known (hard-coded) function object address.
  544. // For other calls, we need to load it from the function object stored in a register operand.
  545. if (functionObjOpnd->IsAddrOpnd() && functionObjOpnd->AsAddrOpnd()->m_isFunction)
  546. {
  547. functionTypeRegOpnd = this->lowererMD->m_lowerer->GenerateFunctionTypeFromFixedFunctionObject(callInstr, functionObjOpnd);
  548. }
  549. else if (functionObjOpnd->IsRegOpnd())
  550. {
  551. AssertMsg(functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym(), "Expected call target to be stackSym");
  552. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  553. // functionTypeRegOpnd = MOV function->type
  554. IR::IndirOpnd* functionTypeIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  555. Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  556. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionTypeIndirOpnd, this->m_func);
  557. callInstr->InsertBefore(instr);
  558. }
  559. else
  560. {
  561. AssertMsg(false, "Unexpected call target operand type.");
  562. }
  563. // Push function object
  564. this->LoadHelperArgument(callInstr, functionObjOpnd);
  565. int entryPointOffset = Js::Type::GetOffsetOfEntryPoint();
  566. IR::IndirOpnd* entryPointOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, entryPointOffset, TyMachPtr, this->m_func);
  567. callInstr->SetSrc1(entryPointOpnd);
  568. // Atom prefers "CALL reg" over "CALL [reg]"
  569. IR::Instr * hoistedCallSrcInstr = nullptr;
  570. hoistedCallSrcInstr = callInstr->HoistSrc1(Js::OpCode::MOV);
  571. #if defined(_CONTROL_FLOW_GUARD)
  572. if (!PHASE_OFF(Js::CFGInJitPhase, this->m_func))
  573. {
  574. this->lowererMD->GenerateCFGCheck(hoistedCallSrcInstr->GetDst(), callInstr);
  575. }
  576. #endif
  577. }
  578. IR::Instr *
  579. LowererMDArch::LowerCallI(IR::Instr *callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  580. {
  581. // We need to get the calculated CallInfo in SimpleJit because that doesn't include any changes for stack alignment
  582. IR::IntConstOpnd *callInfo;
  583. int32 argCount = this->LowerCallArgs(callInstr, callFlags, 1, &callInfo);
  584. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  585. // If this is a call for new, we already pass the function operand through NewScObject,
  586. // which checks if the function operand is a real function or not, don't need to add a check again
  587. // If this is a call to a fixed function, we've already verified that the target is, indeed, a function.
  588. if (callInstr->m_opcode != Js::OpCode::CallIFixed && !(callFlags & Js::CallFlags_New))
  589. {
  590. AssertMsg(functionObjOpnd->IsRegOpnd() && functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym(), "Expected call src to be stackSym");
  591. IR::LabelInstr* continueAfterExLabel = Lowerer::InsertContinueAfterExceptionLabelForDebugger(m_func, callInstr, isHelper);
  592. GenerateFunctionObjectTest(callInstr, functionObjOpnd->AsRegOpnd(), isHelper, continueAfterExLabel);
  593. }
  594. // Can't assert until we remove unreachable code if we have proved that it is a tagged int.
  595. // Assert((callFlags & Js::CallFlags_New) || !functionWrapOpnd->IsTaggedInt());
  596. GeneratePreCall(callInstr, functionObjOpnd);
  597. IR::Opnd *const finalDst = callInstr->GetDst();
  598. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  599. IR::AutoReuseOpnd autoReuseSavedFunctionObjOpnd;
  600. if (callInstr->IsJitProfilingInstr())
  601. {
  602. Assert(callInstr->m_func->IsSimpleJit());
  603. Assert(!CONFIG_FLAG(NewSimpleJit));
  604. if(finalDst &&
  605. finalDst->IsRegOpnd() &&
  606. functionObjOpnd->IsRegOpnd() &&
  607. finalDst->AsRegOpnd()->m_sym == functionObjOpnd->AsRegOpnd()->m_sym)
  608. {
  609. // The function object sym is going to be overwritten, so save it in a temp for profiling
  610. IR::RegOpnd *const savedFunctionObjOpnd = IR::RegOpnd::New(functionObjOpnd->GetType(), callInstr->m_func);
  611. autoReuseSavedFunctionObjOpnd.Initialize(savedFunctionObjOpnd, callInstr->m_func);
  612. Lowerer::InsertMove(savedFunctionObjOpnd, functionObjOpnd, callInstr->m_next);
  613. functionObjOpnd = savedFunctionObjOpnd;
  614. }
  615. auto instr = callInstr->AsJitProfilingInstr();
  616. ret = this->lowererMD->m_lowerer->GenerateCallProfiling(
  617. instr->profileId,
  618. instr->inlineCacheIndex,
  619. instr->GetDst(),
  620. functionObjOpnd,
  621. callInfo,
  622. instr->isProfiledReturnCall,
  623. callInstr,
  624. ret);
  625. }
  626. return ret;
  627. }
  628. IR::Instr *
  629. LowererMDArch::LowerAsmJsCallE(IR::Instr *callInstr)
  630. {
  631. IR::IntConstOpnd *callInfo;
  632. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_Value, 1, &callInfo);
  633. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  634. GeneratePreCall(callInstr, functionObjOpnd);
  635. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  636. return ret;
  637. }
  638. IR::Instr *
  639. LowererMDArch::LowerInt64CallDst(IR::Instr * callInstr)
  640. {
  641. Assert(IRType_IsInt64(callInstr->GetDst()->GetType()));
  642. RegNum lowReturnReg = RegEAX;
  643. RegNum highReturnReg = RegEDX;
  644. IR::Instr * movInstr;
  645. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(callInstr->GetDst());
  646. callInstr->GetDst()->SetType(TyInt32);
  647. movInstr = callInstr->SinkDst(GetAssignOp(TyInt32), lowReturnReg);
  648. movInstr->UnlinkDst();
  649. movInstr->SetDst(dstPair.low);
  650. // Make ecx alive as it contains the high bits for the int64 return value
  651. IR::RegOpnd* highReg = IR::RegOpnd::New(TyInt32, this->m_func);
  652. highReg->SetReg(highReturnReg);
  653. // todo:: Remove the NOP in peeps
  654. IR::Instr* nopInstr = IR::Instr::New(Js::OpCode::NOP, highReg, this->m_func);
  655. movInstr->InsertBefore(nopInstr);
  656. IR::Instr* mov2Instr = IR::Instr::New(GetAssignOp(TyInt32), dstPair.high, highReg, this->m_func);
  657. movInstr->InsertAfter(mov2Instr);
  658. return mov2Instr;
  659. }
  660. IR::Instr *
  661. LowererMDArch::LowerAsmJsCallI(IR::Instr * callInstr)
  662. {
  663. IR::Instr * argInstr;
  664. int32 argCount = 0;
  665. // Lower args and look for StartCall
  666. argInstr = callInstr;
  667. IR::Opnd *src2 = argInstr->UnlinkSrc2();
  668. while (src2->IsSymOpnd())
  669. {
  670. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  671. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  672. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  673. argLinkOpnd->Free(m_func);
  674. argInstr = argLinkSym->m_instrDef;
  675. // Mov each arg to it's argSlot
  676. src2 = argInstr->UnlinkSrc2();
  677. IR::Opnd* dst = argInstr->GetDst();
  678. if (dst && IRType_IsInt64(dst->GetType()))
  679. {
  680. argInstr = LowerInt64Assign(argInstr);
  681. }
  682. else
  683. {
  684. lowererMD->ChangeToAssign(argInstr);
  685. }
  686. ++argCount;
  687. }
  688. // increment again for FunctionObject
  689. ++argCount;
  690. IR::RegOpnd * argLinkOpnd = src2->AsRegOpnd();
  691. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  692. AssertMsg(!argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  693. IR::Instr * startCallInstr = argLinkSym->m_instrDef;
  694. Assert(startCallInstr->m_opcode == Js::OpCode::StartCall);
  695. Assert(startCallInstr->GetSrc1()->IsIntConstOpnd());
  696. int32 stackAlignment = LowerStartCallAsmJs(startCallInstr, startCallInstr, callInstr);
  697. const uint32 argSlots = argCount + (stackAlignment / 4) + 1;
  698. m_func->m_argSlotsForFunctionsCalled = max(m_func->m_argSlotsForFunctionsCalled, argSlots);
  699. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  700. // we will not have function object mem ref in the case of function table calls, so we cannot calculate the call address ahead of time
  701. Assert(functionObjOpnd->IsRegOpnd() && functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym());
  702. // Push function object
  703. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, callInstr->m_func);
  704. pushInstr->SetSrc1(functionObjOpnd);
  705. callInstr->InsertBefore(pushInstr);
  706. IR::RegOpnd* functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  707. IR::IndirOpnd* functionInfoIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
  708. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  709. callInstr->InsertBefore(instr);
  710. functionInfoIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::ScriptFunctionType::GetEntryPointInfoOffset(), TyMachReg, m_func);
  711. instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  712. callInstr->InsertBefore(instr);
  713. uint32 entryPointOffset = Js::ProxyEntryPointInfo::GetAddressOffset();
  714. IR::Opnd * entryPointOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, entryPointOffset, TyMachReg, m_func);
  715. callInstr->SetSrc1(entryPointOpnd);
  716. // Atom prefers "CALL reg" over "CALL [reg]"
  717. IR::Instr * hoistedCallSrcInstr = callInstr->HoistSrc1(Js::OpCode::MOV);
  718. #if defined(_CONTROL_FLOW_GUARD)
  719. if (!PHASE_OFF(Js::CFGInJitPhase, this->m_func))
  720. {
  721. this->lowererMD->GenerateCFGCheck(hoistedCallSrcInstr->GetDst(), callInstr);
  722. }
  723. #else
  724. Unused(hoistedCallSrcInstr);
  725. #endif
  726. IR::Instr * retInstr = callInstr;
  727. callInstr->m_opcode = Js::OpCode::CALL;
  728. if (callInstr->GetDst())
  729. {
  730. IRType dstType = callInstr->GetDst()->GetType();
  731. if (IRType_IsInt64(dstType))
  732. {
  733. retInstr = LowerInt64CallDst(callInstr);
  734. }
  735. else
  736. {
  737. RegNum returnReg = GetRegReturnAsmJs(dstType);
  738. IR::Instr * movInstr;
  739. movInstr = callInstr->SinkDst(GetAssignOp(dstType), returnReg);
  740. retInstr = movInstr;
  741. }
  742. }
  743. return retInstr;
  744. }
  745. IR::Instr *
  746. LowererMDArch::LowerWasmMemOp(IR::Instr * instr, IR::Opnd *addrOpnd)
  747. {
  748. IR::IndirOpnd * indirOpnd = addrOpnd->AsIndirOpnd();
  749. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  750. uint32 offset = indirOpnd->GetOffset();
  751. IR::Opnd *arrayLenOpnd = instr->GetSrc2();
  752. int64 constOffset = (int64)addrOpnd->GetSize() + (int64)offset;
  753. CompileAssert(Js::ArrayBuffer::MaxArrayBufferLength <= UINT32_MAX);
  754. IR::IntConstOpnd * constOffsetOpnd = IR::IntConstOpnd::New((uint32)constOffset, TyUint32, m_func);
  755. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  756. IR::LabelInstr * loadLabel = Lowerer::InsertLabel(false, instr);
  757. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  758. IR::Opnd *cmpOpnd;
  759. if (indexOpnd != nullptr)
  760. {
  761. // Compare index + memop access length and array buffer length, and generate RuntimeError if greater
  762. cmpOpnd = IR::RegOpnd::New(TyUint32, m_func);
  763. Lowerer::InsertAdd(true, cmpOpnd, indexOpnd, constOffsetOpnd, helperLabel);
  764. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  765. }
  766. else
  767. {
  768. cmpOpnd = constOffsetOpnd;
  769. }
  770. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, arrayLenOpnd, Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  771. lowererMD->m_lowerer->GenerateRuntimeError(loadLabel, WASMERR_ArrayIndexOutOfRange, IR::HelperOp_WebAssemblyRuntimeError);
  772. Lowerer::InsertBranch(Js::OpCode::Br, loadLabel, helperLabel);
  773. return doneLabel;
  774. }
  775. IR::Instr*
  776. LowererMDArch::LowerAsmJsLdElemHelper(IR::Instr * instr, bool isSimdLoad /*= false*/, bool checkEndOffset /*= false*/)
  777. {
  778. IR::Opnd * src1 = instr->UnlinkSrc1();
  779. IRType type = src1->GetType();
  780. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  781. IR::LabelInstr * loadLabel = Lowerer::InsertLabel(false, instr);
  782. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  783. IR::RegOpnd * indexOpnd = src1->AsIndirOpnd()->GetIndexOpnd();
  784. IR::Opnd * cmpOpnd;
  785. const uint8 dataWidth = instr->dataWidth;
  786. Assert(isSimdLoad == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  787. if (indexOpnd)
  788. {
  789. cmpOpnd = indexOpnd;
  790. }
  791. else
  792. {
  793. cmpOpnd = IR::IntConstOpnd::New(src1->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  794. }
  795. // if dataWidth != byte per element, we need to check end offset
  796. if (isSimdLoad && checkEndOffset)
  797. {
  798. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  799. // MOV tmp, cmpOnd
  800. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  801. // ADD tmp, dataWidth
  802. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, tmp->GetType(), m_func, true), helperLabel);
  803. // JB helper
  804. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  805. // CMP tmp, size
  806. // JG $helper
  807. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  808. }
  809. else
  810. {
  811. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  812. }
  813. Lowerer::InsertBranch(Js::OpCode::Br, loadLabel, helperLabel);
  814. if (isSimdLoad)
  815. {
  816. lowererMD->m_lowerer->GenerateRuntimeError(loadLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  817. }
  818. else
  819. {
  820. if (IRType_IsFloat(type))
  821. {
  822. Lowerer::InsertMove(instr->UnlinkDst(), IR::FloatConstOpnd::New(Js::NumberConstants::NaN, type, m_func), loadLabel);
  823. }
  824. else
  825. {
  826. Lowerer::InsertMove(instr->UnlinkDst(), IR::IntConstOpnd::New(0, TyInt8, m_func), loadLabel);
  827. }
  828. }
  829. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, loadLabel);
  830. return doneLabel;
  831. }
  832. IR::Instr*
  833. LowererMDArch::LowerAsmJsStElemHelper(IR::Instr * instr, bool isSimdStore /*= false*/, bool checkEndOffset /*= false*/)
  834. {
  835. IR::Opnd * dst = instr->UnlinkDst();
  836. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  837. IR::LabelInstr * storeLabel = Lowerer::InsertLabel(false, instr);
  838. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  839. IR::Opnd * cmpOpnd;
  840. IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  841. const uint8 dataWidth = instr->dataWidth;
  842. Assert(isSimdStore == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  843. if (indexOpnd)
  844. {
  845. cmpOpnd = indexOpnd;
  846. }
  847. else
  848. {
  849. cmpOpnd = IR::IntConstOpnd::New(dst->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  850. }
  851. if (isSimdStore && checkEndOffset)
  852. {
  853. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  854. // MOV tmp, cmpOnd
  855. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  856. // ADD tmp, dataWidth
  857. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, tmp->GetType(), m_func, true), helperLabel);
  858. // JB helper
  859. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  860. // CMP tmp, size
  861. // JG $helper
  862. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  863. }
  864. else
  865. {
  866. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  867. }
  868. if (isSimdStore)
  869. {
  870. lowererMD->m_lowerer->GenerateRuntimeError(storeLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  871. }
  872. Lowerer::InsertBranch(Js::OpCode::Br, storeLabel, helperLabel);
  873. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, storeLabel);
  874. return doneLabel;
  875. }
  876. IR::Instr *
  877. LowererMDArch::LowerCallPut(IR::Instr *callInstr)
  878. {
  879. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_None);
  880. // load native entry point from script function into eax
  881. IR::Opnd * functionWrapOpnd = callInstr->UnlinkSrc1();
  882. AssertMsg(functionWrapOpnd->IsRegOpnd() && functionWrapOpnd->AsRegOpnd()->m_sym->IsStackSym(),
  883. "Expected call src to be stackSym");
  884. // push function wrapper
  885. this->LoadHelperArgument(callInstr, functionWrapOpnd);
  886. IR::HelperCallOpnd *helperCallOpnd = IR::HelperCallOpnd::New(IR::HelperOp_InvokePut, this->m_func);
  887. callInstr->SetSrc1(helperCallOpnd);
  888. return this->LowerCall(callInstr, argCount);
  889. }
  890. int32
  891. LowererMDArch::LowerCallArgs(IR::Instr *callInstr, ushort callFlags, Js::ArgSlot extraArgs, IR::IntConstOpnd **callInfoOpndRef)
  892. {
  893. IR::Instr * argInstr;
  894. uint32 argCount = 0;
  895. // Lower args and look for StartCall
  896. argInstr = callInstr;
  897. IR::Opnd *src2 = argInstr->UnlinkSrc2();
  898. while (src2->IsSymOpnd())
  899. {
  900. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  901. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  902. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  903. argLinkOpnd->Free(this->m_func);
  904. argInstr = argLinkSym->m_instrDef;
  905. // Mov each arg to it's argSlot
  906. src2 = argInstr->UnlinkSrc2();
  907. this->lowererMD->ChangeToAssign(argInstr);
  908. argCount++;
  909. }
  910. IR::RegOpnd * argLinkOpnd = src2->AsRegOpnd();
  911. StackSym *argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  912. AssertMsg(!argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  913. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  914. if (callInstr->m_opcode == Js::OpCode::NewScObject ||
  915. callInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
  916. callInstr->m_opcode == Js::OpCode::NewScObjArray ||
  917. callInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
  918. {
  919. // These push an extra arg.
  920. argCount++;
  921. }
  922. AssertMsg(startCallInstr->m_opcode == Js::OpCode::StartCall || startCallInstr->m_opcode == Js::OpCode::LoweredStartCall, "Problem with arg chain.");
  923. AssertMsg(m_func->GetJITFunctionBody()->IsAsmJsMode() || startCallInstr->GetArgOutCount(/*getInterpreterArgOutCount*/ false) == argCount, "ArgCount doesn't match StartCall count");
  924. //
  925. // Machine dependent lowering
  926. //
  927. IR::Instr * insertInstr;
  928. if (callInstr->IsCloned())
  929. {
  930. insertInstr = argInstr;
  931. }
  932. else
  933. {
  934. insertInstr = startCallInstr;
  935. }
  936. int32 stackAlignment;
  937. if (callInstr->m_opcode == Js::OpCode::AsmJsCallE)
  938. {
  939. stackAlignment = LowerStartCallAsmJs(startCallInstr, insertInstr, callInstr);
  940. }
  941. else
  942. {
  943. stackAlignment = LowerStartCall(startCallInstr, insertInstr);
  944. }
  945. startCallInstr->SetIsCloned(callInstr->IsCloned());
  946. // Push argCount
  947. IR::IntConstOpnd * argCountOpnd = Lowerer::MakeCallInfoConst(callFlags, argCount, m_func);
  948. if(callInfoOpndRef)
  949. {
  950. argCountOpnd->Use(m_func);
  951. *callInfoOpndRef = argCountOpnd;
  952. }
  953. this->LoadHelperArgument(callInstr, argCountOpnd);
  954. uint32 argSlots;
  955. argSlots = argCount + (stackAlignment / 4) + 1 + extraArgs; // + 1 for call flags
  956. this->m_func->m_argSlotsForFunctionsCalled = max(this->m_func->m_argSlotsForFunctionsCalled, argSlots);
  957. return argSlots;
  958. }
  959. ///----------------------------------------------------------------------------
  960. ///
  961. /// LowererMDArch::LowerCall
  962. ///
  963. /// Machine dependent (x86) lowering for calls.
  964. /// Adds an "ADD ESP, argCount*4" if argCount is not 0.
  965. ///
  966. ///----------------------------------------------------------------------------
  967. IR::Instr *
  968. LowererMDArch::LowerCall(IR::Instr * callInstr, uint32 argCount, RegNum regNum)
  969. {
  970. IR::Instr *retInstr = callInstr;
  971. callInstr->m_opcode = Js::OpCode::CALL;
  972. // This is required here due to calls created during lowering
  973. callInstr->m_func->SetHasCalls();
  974. if (callInstr->GetDst())
  975. {
  976. IR::Opnd * dstOpnd = callInstr->GetDst();
  977. IRType dstType = dstOpnd->GetType();
  978. Js::OpCode assignOp = GetAssignOp(dstType);
  979. IR::Instr * movInstr = nullptr;
  980. RegNum reg = GetRegReturn(dstType);
  981. if (IRType_IsFloat(dstType))
  982. {
  983. // We should only generate this if sse2 is available
  984. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "SSE2 not supported");
  985. AssertMsg(reg == RegNOREG, "No register should be assigned for float Reg");
  986. // We pop the Float X87 stack using FSTP for the return value of the CALL, instead of storing in XMM0 directly.
  987. //Before: oldDst = CALL xxx
  988. //After:
  989. // CALL xxx
  990. // newDstOpnd = FSTP
  991. // oldDst = MOVSD [newDstOpnd]
  992. IR::Instr * floatPopInstr = IR::Instr::New(Js::OpCode::FSTP, m_func);
  993. IR::Opnd * oldDst = callInstr->UnlinkDst();
  994. StackSym * newDstStackSym = StackSym::New(dstType, this->m_func);
  995. Assert(dstOpnd->IsFloat());
  996. this->m_func->StackAllocate(newDstStackSym, TySize[dstType]);
  997. IR::SymOpnd * newDstOpnd = IR::SymOpnd::New(newDstStackSym, dstType, this->m_func);
  998. floatPopInstr->SetDst(newDstOpnd);
  999. callInstr->InsertAfter(floatPopInstr);
  1000. movInstr = IR::Instr::New(assignOp, oldDst, newDstOpnd, this->m_func);
  1001. floatPopInstr->InsertAfter(movInstr);
  1002. }
  1003. else if (IRType_IsInt64(dstType))
  1004. {
  1005. retInstr = movInstr = LowerInt64CallDst(callInstr);
  1006. }
  1007. else
  1008. {
  1009. movInstr = callInstr->SinkDst(assignOp);
  1010. callInstr->GetDst()->AsRegOpnd()->SetReg(reg);
  1011. movInstr->GetSrc1()->AsRegOpnd()->SetReg(reg);
  1012. }
  1013. Assert(movInstr);
  1014. retInstr = movInstr;
  1015. }
  1016. if (argCount)
  1017. {
  1018. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, RegESP, TyMachReg, this->m_func);
  1019. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(espOpnd, argCount * MachPtr, TyMachReg, this->m_func);
  1020. IR::Instr * addInstr = IR::Instr::New(Js::OpCode::LEA,
  1021. espOpnd, indirOpnd, this->m_func);
  1022. callInstr->InsertAfter(addInstr);
  1023. }
  1024. this->helperCallArgsCount = 0;
  1025. return retInstr;
  1026. }
  1027. ///----------------------------------------------------------------------------
  1028. ///
  1029. /// LowererMDArch::LowerStartCall
  1030. ///
  1031. /// Lower StartCall to a "SUB ESP, argCount * 4"
  1032. ///
  1033. ///----------------------------------------------------------------------------
  1034. int32
  1035. LowererMDArch::LowerStartCall(IR::Instr * startCallInstr, IR::Instr* insertInstr)
  1036. {
  1037. AssertMsg(startCallInstr->GetSrc1()->IsIntConstOpnd(), "Bad src on StartCall");
  1038. IR::IntConstOpnd *sizeOpnd = startCallInstr->GetSrc1()->AsIntConstOpnd();
  1039. IntConstType sizeValue = sizeOpnd->GetValue();
  1040. // Maintain 8 byte alignment of the stack.
  1041. // We do this by adjusting the SUB for stackCall to make sure it maintains 8 byte alignment.
  1042. int32 stackAlignment = Math::Align<int32>(sizeValue*MachPtr, MachStackAlignment) - sizeValue*MachPtr;
  1043. if (stackAlignment != 0)
  1044. {
  1045. sizeValue += 1;
  1046. }
  1047. sizeValue *= MachPtr;
  1048. IR::Instr* newStartCall;
  1049. if ((uint32)sizeValue > AutoSystemInfo::PageSize) {
  1050. // Convert StartCall into a chkstk
  1051. // mov eax, sizeOpnd->m_value
  1052. // call _chkstk
  1053. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, this->GetRegChkStkParam(), TyMachReg, this->m_func);
  1054. this->lowererMD->CreateAssign(eaxOpnd, IR::IntConstOpnd::New(sizeValue, TyInt32, this->m_func, /*dontEncode*/true), insertInstr);
  1055. newStartCall = IR::Instr::New(Js::OpCode::Call, this->m_func);
  1056. newStartCall->SetSrc1(IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, this->m_func));
  1057. insertInstr->InsertBefore(newStartCall);
  1058. this->LowerCall(newStartCall, 0);
  1059. } else {
  1060. // Convert StartCall into
  1061. // lea esp, [esp - sizeValue]
  1062. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1063. newStartCall = IR::Instr::New(Js::OpCode::LEA, espOpnd, IR::IndirOpnd::New(espOpnd, -sizeValue, TyMachReg, this->m_func), this->m_func);
  1064. insertInstr->InsertBefore(newStartCall);
  1065. }
  1066. newStartCall->SetByteCodeOffset(startCallInstr);
  1067. // Mark the start call as being lowered - this is required by the bailout encoding logic
  1068. startCallInstr->m_opcode = Js::OpCode::LoweredStartCall;
  1069. return stackAlignment;
  1070. }
  1071. int32
  1072. LowererMDArch::LowerStartCallAsmJs(IR::Instr * startCallInstr, IR::Instr * insertInstr, IR::Instr * callInstr)
  1073. {
  1074. AssertMsg(startCallInstr->GetSrc1()->IsIntConstOpnd(), "Bad src on StartCall");
  1075. IR::IntConstOpnd * sizeOpnd = startCallInstr->GetSrc1()->AsIntConstOpnd();
  1076. IntConstType sizeValue = sizeOpnd->GetValue();
  1077. if (callInstr->m_opcode == Js::OpCode::AsmJsCallI)
  1078. {
  1079. // we will push FunctionObject, so don't need to worry about that
  1080. sizeValue -= MachPtr;
  1081. }
  1082. // Maintain 8 byte alignment of the stack.
  1083. // We do this by adjusting the SUB for stackCall to make sure it maintains 8 byte alignment.
  1084. int32 stackAlignment = Math::Align<int32>(sizeValue, MachStackAlignment) - sizeValue;
  1085. if (stackAlignment != 0)
  1086. {
  1087. sizeValue += MachPtr;
  1088. }
  1089. IR::Instr* newStartCall;
  1090. if ((uint32)sizeValue > AutoSystemInfo::PageSize) {
  1091. // Convert StartCall into a chkstk
  1092. // mov eax, sizeOpnd->m_value
  1093. // call _chkstk
  1094. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, GetRegChkStkParam(), TyMachReg, m_func);
  1095. lowererMD->CreateAssign(eaxOpnd, IR::IntConstOpnd::New(sizeValue, TyInt32, m_func, /*dontEncode*/true), insertInstr);
  1096. newStartCall = IR::Instr::New(Js::OpCode::Call, m_func);
  1097. newStartCall->SetSrc1(IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, m_func));
  1098. insertInstr->InsertBefore(newStartCall);
  1099. LowerCall(newStartCall, 0);
  1100. }
  1101. else {
  1102. // Convert StartCall into
  1103. // lea esp, [esp - sizeValue]
  1104. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, m_func);
  1105. newStartCall = IR::Instr::New(Js::OpCode::LEA, espOpnd, IR::IndirOpnd::New(espOpnd, -sizeValue, TyMachReg, m_func), m_func);
  1106. insertInstr->InsertBefore(newStartCall);
  1107. }
  1108. newStartCall->SetByteCodeOffset(startCallInstr);
  1109. // Mark the start call as being lowered - this is required by the bailout encoding logic
  1110. startCallInstr->m_opcode = Js::OpCode::LoweredStartCall;
  1111. return stackAlignment;
  1112. }
  1113. ///----------------------------------------------------------------------------
  1114. ///
  1115. /// LowererMDArch::LoadHelperArgument
  1116. ///
  1117. /// Change to a PUSH.
  1118. ///
  1119. ///----------------------------------------------------------------------------
  1120. IR::Instr *
  1121. LowererMDArch::LoadHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  1122. {
  1123. IR::Instr * pushInstr;
  1124. pushInstr = IR::Instr::New(Js::OpCode::PUSH, instr->m_func);
  1125. if(TySize[opndArg->GetType()] < TySize[TyMachReg])
  1126. {
  1127. Assert(!opndArg->IsMemoryOpnd()); // if it's a memory opnd, it would need to be loaded into a register first
  1128. opndArg = opndArg->UseWithNewType(TyMachReg, instr->m_func);
  1129. }
  1130. pushInstr->SetSrc1(opndArg);
  1131. instr->InsertBefore(pushInstr);
  1132. this->helperCallArgsCount++;
  1133. AssertMsg(helperCallArgsCount <= LowererMDArch::MaxArgumentsToHelper, "The # of arguments to the helper is too big.");
  1134. return pushInstr;
  1135. }
  1136. IR::Instr *
  1137. LowererMDArch::LoadDynamicArgument(IR::Instr * instr, uint argNumber /*ignore for x86*/)
  1138. {
  1139. //Convert to push instruction.
  1140. instr->m_opcode = Js::OpCode::PUSH;
  1141. return instr;
  1142. }
  1143. IR::Instr *
  1144. LowererMDArch::LoadInt64HelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1145. {
  1146. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1147. IR::Opnd * opnd = IR::IndirOpnd::New(espOpnd, -8, TyMachReg, this->m_func);
  1148. IR::Instr * instrPrev = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  1149. instrInsert->InsertBefore(instrPrev);
  1150. Int64RegPair argPair = m_func->FindOrCreateInt64Pair(opndArg);
  1151. opnd = IR::IndirOpnd::New(espOpnd, 0, TyInt32, this->m_func);
  1152. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opnd, argPair.low, this->m_func);
  1153. instrInsert->InsertBefore(instr);
  1154. LowererMD::Legalize(instr);
  1155. opnd = IR::IndirOpnd::New(espOpnd, 4, TyInt32, this->m_func);
  1156. instr = IR::Instr::New(Js::OpCode::MOV, opnd, argPair.high, this->m_func);
  1157. instrInsert->InsertBefore(instr);
  1158. LowererMD::Legalize(instr);
  1159. return instrPrev;
  1160. }
  1161. IR::Instr *
  1162. LowererMDArch::LoadDoubleHelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1163. {
  1164. IR::Instr * instrPrev;
  1165. IR::Instr * instr;
  1166. IR::Opnd * opnd;
  1167. IR::Opnd * float64Opnd;
  1168. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1169. opnd = IR::IndirOpnd::New(espOpnd, -8, TyMachReg, this->m_func);
  1170. instrPrev = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  1171. instrInsert->InsertBefore(instrPrev);
  1172. opnd = IR::IndirOpnd::New(espOpnd, (int32)0, TyFloat64, this->m_func);
  1173. if (opndArg->GetType() == TyFloat32)
  1174. {
  1175. float64Opnd = IR::RegOpnd::New(TyFloat64, m_func);
  1176. instr = IR::Instr::New(Js::OpCode::CVTSS2SD, float64Opnd, opndArg, this->m_func);
  1177. instrInsert->InsertBefore(instr);
  1178. }
  1179. else
  1180. {
  1181. float64Opnd = opndArg;
  1182. }
  1183. instr = IR::Instr::New(Js::OpCode::MOVSD, opnd, float64Opnd, this->m_func);
  1184. instrInsert->InsertBefore(instr);
  1185. LowererMD::Legalize(instr);
  1186. return instrPrev;
  1187. }
  1188. IR::Instr *
  1189. LowererMDArch::LoadFloatHelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1190. {
  1191. IR::Instr * instrPrev;
  1192. IR::Instr * instr;
  1193. IR::Opnd * opnd;
  1194. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1195. opnd = IR::IndirOpnd::New(espOpnd, -4, TyMachReg, this->m_func);
  1196. instrPrev = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  1197. instrInsert->InsertBefore(instrPrev);
  1198. opnd = IR::IndirOpnd::New(espOpnd, (int32)0, TyFloat32, this->m_func);
  1199. instr = IR::Instr::New(Js::OpCode::MOVSS, opnd, opndArg, this->m_func);
  1200. instrInsert->InsertBefore(instr);
  1201. LowererMD::Legalize(instr);
  1202. return instrPrev;
  1203. }
  1204. ///----------------------------------------------------------------------------
  1205. ///
  1206. /// LowererMDArch::LowerEntryInstr
  1207. ///
  1208. /// Emit prolog.
  1209. ///
  1210. ///----------------------------------------------------------------------------
  1211. IR::Instr *
  1212. LowererMDArch::LowerEntryInstr(IR::EntryInstr * entryInstr)
  1213. {
  1214. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1215. if (Js::Configuration::Global.flags.IsEnabled(Js::CheckAlignmentFlag))
  1216. {
  1217. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  1218. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrFunc_CheckAlignment, this->m_func));
  1219. entryInstr->InsertAfter(callInstr);
  1220. this->LowerCall(callInstr, 0, RegEAX);
  1221. }
  1222. #endif
  1223. int32 bytesOnStack = MachRegInt+MachRegInt; // Account for return address+push EBP...
  1224. // PUSH used callee-saved registers
  1225. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg+1))
  1226. {
  1227. if (LinearScan::IsCalleeSaved(reg) && (this->m_func->m_regsUsed.Test(reg)))
  1228. {
  1229. IR::RegOpnd * regOpnd = IR::RegOpnd::New(nullptr, reg, TyMachReg, this->m_func);
  1230. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1231. pushInstr->SetSrc1(regOpnd);
  1232. entryInstr->InsertAfter(pushInstr);
  1233. bytesOnStack += MachRegInt;
  1234. }
  1235. }
  1236. // Allocate frame
  1237. IR::RegOpnd * ebpOpnd = IR::RegOpnd::New(nullptr, this->GetRegBlockPointer(), TyMachReg, this->m_func);
  1238. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1239. // Dedicated argument slot is already included in the m_localStackHeight (see Func ctor)
  1240. // Allocate the inlined arg out stack in the locals. Allocate an additional slot so that
  1241. // we can unconditionally clear the argc slot of the next frame.
  1242. this->m_func->m_localStackHeight += ((this->m_func->GetMaxInlineeArgOutCount() + 1) * MachPtr);
  1243. bytesOnStack += this->m_func->m_localStackHeight;
  1244. int32 alignment = Math::Align<int32>(bytesOnStack, MachStackAlignment) - bytesOnStack;
  1245. // Make sure this frame allocation maintains 8-byte alignment. Our point of reference is the return address
  1246. this->m_func->m_localStackHeight += alignment;
  1247. bytesOnStack += alignment;
  1248. Assert(Math::Align<int32>(bytesOnStack, MachStackAlignment) == bytesOnStack);
  1249. Assert(this->m_func->hasBailout || this->bailOutStackRestoreLabel == nullptr);
  1250. this->m_func->frameSize = bytesOnStack;
  1251. if (this->m_func->GetMaxInlineeArgOutCount())
  1252. {
  1253. this->m_func->GetJITOutput()->SetFrameHeight(this->m_func->m_localStackHeight);
  1254. }
  1255. // Zero initialize the first inlinee frames argc.
  1256. if (this->m_func->GetMaxInlineeArgOutCount())
  1257. {
  1258. StackSym *sym = this->m_func->m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1259. sym->m_isInlinedArgSlot = true;
  1260. sym->m_offset = 0;
  1261. IR::Opnd *dst = IR::SymOpnd::New(sym, TyMachReg, this->m_func);
  1262. entryInstr->InsertAfter(IR::Instr::New(Js::OpCode::MOV,
  1263. dst,
  1264. IR::AddrOpnd::NewNull(this->m_func),
  1265. this->m_func));
  1266. }
  1267. if (this->m_func->m_localStackHeight != 0)
  1268. {
  1269. int32 stackSize = this->m_func->m_localStackHeight;
  1270. if (this->m_func->HasArgumentSlot())
  1271. {
  1272. // We separately push the stack argument slot below
  1273. stackSize -= MachPtr;
  1274. }
  1275. if (this->m_func->m_localStackHeight <= PAGESIZE)
  1276. {
  1277. // Generate LEA ESP, [esp - stackSize] // Atom prefers LEA for address computations
  1278. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(espOpnd, -stackSize, TyMachReg, this->m_func);
  1279. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, indirOpnd, this->m_func);
  1280. entryInstr->InsertAfter(subInstr);
  1281. }
  1282. else
  1283. {
  1284. // Generate chkstk call
  1285. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, this->GetRegChkStkParam(), TyMachReg, this->m_func);
  1286. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, eaxOpnd,
  1287. IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, this->m_func), this->m_func);
  1288. entryInstr->InsertAfter(callInstr);
  1289. this->LowerCall(callInstr, 0, RegECX);
  1290. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New(stackSize, TyMachReg, this->m_func);
  1291. this->lowererMD->CreateAssign(eaxOpnd, stackSizeOpnd, entryInstr->m_next);
  1292. }
  1293. }
  1294. // Zero-initialize dedicated arguments slot
  1295. if (this->m_func->HasArgumentSlot())
  1296. {
  1297. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1298. pushInstr->SetSrc1(IR::IntConstOpnd::New(0, TyMachPtr, this->m_func));
  1299. entryInstr->InsertAfter(pushInstr);
  1300. }
  1301. size_t frameSize = bytesOnStack + ((this->m_func->m_argSlotsForFunctionsCalled + 1) * MachPtr) + Js::Constants::MinStackJIT;
  1302. this->GeneratePrologueStackProbe(entryInstr, frameSize);
  1303. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, ebpOpnd, espOpnd, this->m_func);
  1304. entryInstr->InsertAfter(movInstr);
  1305. // Generate PUSH EBP
  1306. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1307. pushInstr->SetSrc1(ebpOpnd);
  1308. entryInstr->InsertAfter(pushInstr);
  1309. return entryInstr;
  1310. }
  1311. IR::Instr *
  1312. LowererMDArch::LowerEntryInstrAsmJs(IR::EntryInstr * entryInstr)
  1313. {
  1314. // PUSH EBP
  1315. // MOV EBP, ESP
  1316. // StackProbe
  1317. // MOV EAX, LocalStackHeight / LEA ESP, [ESP - stackSize]
  1318. // CALL chkstk /
  1319. // PUSH used nonvolatiles
  1320. // Calculate stack size
  1321. int32 bytesOnStack = MachRegInt + MachRegInt; // Account for return address+push EBP...
  1322. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg + 1))
  1323. {
  1324. if (LinearScan::IsCalleeSaved(reg) && (m_func->m_regsUsed.Test(reg)))
  1325. {
  1326. bytesOnStack += MachRegInt;
  1327. }
  1328. }
  1329. m_func->m_localStackHeight += MachPtr;
  1330. bytesOnStack += m_func->m_localStackHeight;
  1331. int32 alignment = Math::Align<int32>(bytesOnStack, MachStackAlignment) - bytesOnStack;
  1332. // Make sure this frame allocation maintains 8-byte alignment. Our point of reference is the return address
  1333. m_func->m_localStackHeight += alignment;
  1334. bytesOnStack += alignment;
  1335. Assert(Math::Align<int32>(bytesOnStack, MachStackAlignment) == bytesOnStack);
  1336. m_func->frameSize = bytesOnStack;
  1337. IR::Instr * insertInstr = entryInstr->m_next;
  1338. IR::RegOpnd * ebpOpnd = IR::RegOpnd::New(nullptr, GetRegBlockPointer(), TyMachReg, m_func);
  1339. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, m_func);
  1340. // Generate PUSH EBP
  1341. IR::Instr * pushEbpInstr = IR::Instr::New(Js::OpCode::PUSH, m_func);
  1342. pushEbpInstr->SetSrc1(ebpOpnd);
  1343. insertInstr->InsertBefore(pushEbpInstr);
  1344. // Generate MOV EBP, ESP
  1345. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, ebpOpnd, espOpnd, m_func);
  1346. insertInstr->InsertBefore(movInstr);
  1347. // Generate StackProbe
  1348. size_t frameSize = bytesOnStack + m_func->m_argSlotsForFunctionsCalled * MachPtr + Js::Constants::MinStackJIT;
  1349. GeneratePrologueStackProbe(insertInstr->m_prev, frameSize);
  1350. if (m_func->m_localStackHeight != 0)
  1351. {
  1352. int32 stackSize = m_func->m_localStackHeight - MachPtr;
  1353. if (m_func->m_localStackHeight <= PAGESIZE)
  1354. {
  1355. // Generate LEA ESP, [ESP - LocalStackHeight] // Atom prefers LEA for address computations
  1356. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(espOpnd, -stackSize, TyMachReg, m_func);
  1357. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, indirOpnd, m_func);
  1358. insertInstr->InsertBefore(subInstr);
  1359. }
  1360. else
  1361. {
  1362. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, GetRegChkStkParam(), TyMachReg, m_func);
  1363. // Generate MOV EAX, LocalStackHeight
  1364. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New(stackSize, TyMachReg, m_func);
  1365. lowererMD->CreateAssign(eaxOpnd, stackSizeOpnd, insertInstr);
  1366. // Generate CALL chkstk
  1367. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, eaxOpnd,
  1368. IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, m_func), m_func);
  1369. insertInstr->InsertBefore(callInstr);
  1370. LowerCall(callInstr, 0, RegECX);
  1371. }
  1372. }
  1373. // PUSH used callee-saved registers
  1374. for (RegNum reg = (RegNum)(RegNumCount - 1); reg > RegNOREG; reg = (RegNum)(reg - 1))
  1375. {
  1376. if (LinearScan::IsCalleeSaved(reg) && (m_func->m_regsUsed.Test(reg)))
  1377. {
  1378. IR::RegOpnd * regOpnd = IR::RegOpnd::New(nullptr, reg, TyMachReg, m_func);
  1379. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, m_func);
  1380. pushInstr->SetSrc1(regOpnd);
  1381. insertInstr->InsertBefore(pushInstr);
  1382. }
  1383. }
  1384. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1385. if (Js::Configuration::Global.flags.IsEnabled(Js::CheckAlignmentFlag))
  1386. {
  1387. // CALL CheckAlignment
  1388. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  1389. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrFunc_CheckAlignment, m_func));
  1390. insertInstr->InsertBefore(callInstr);
  1391. LowerCall(callInstr, 0, RegEAX);
  1392. }
  1393. #endif
  1394. return entryInstr;
  1395. }
  1396. void
  1397. LowererMDArch::GeneratePrologueStackProbe(IR::Instr *entryInstr, size_t frameSize)
  1398. {
  1399. //
  1400. // Generate a stack overflow check. This can be as simple as a cmp esp, const
  1401. // because this function is guaranteed to be called on its base thread only.
  1402. // If the check fails call ThreadContext::ProbeCurrentStack which will check again and throw if needed.
  1403. //
  1404. // cmp esp, ThreadContext::scriptStackLimit + frameSize
  1405. // jg done
  1406. // push frameSize
  1407. // call ThreadContext::ProbeCurrentStack
  1408. // For thread-agile thread context
  1409. // mov eax, [ThreadContext::stackLimitForCurrentThread]
  1410. // add eax, frameSize
  1411. // cmp esp, eax
  1412. // jg done
  1413. // push frameSize
  1414. // call ThreadContext::ProbeCurrentStack
  1415. // done:
  1416. //
  1417. // For thread context with script interrupt enabled:
  1418. // mov eax, [ThreadContext::stackLimitForCurrentThread]
  1419. // add eax, frameSize
  1420. // jo $helper
  1421. // cmp esp, eax
  1422. // jg done
  1423. // $helper:
  1424. // push frameSize
  1425. // call ThreadContext::ProbeCurrentStack
  1426. // done:
  1427. //
  1428. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  1429. IR::Instr *insertInstr = entryInstr->m_next;
  1430. IR::Instr *instr;
  1431. IR::Opnd *stackLimitOpnd;
  1432. bool doInterruptProbe = m_func->GetJITFunctionBody()->DoInterruptProbe();
  1433. if (doInterruptProbe || !m_func->GetThreadContextInfo()->IsThreadBound())
  1434. {
  1435. // Load the current stack limit from the ThreadContext, then increment this value by the size of the
  1436. // current frame. This is the value we'll compare against below.
  1437. stackLimitOpnd = IR::RegOpnd::New(nullptr, RegEAX, TyMachReg, this->m_func);
  1438. intptr_t pLimit = m_func->GetThreadContextInfo()->GetThreadStackLimitAddr();
  1439. IR::MemRefOpnd * memOpnd = IR::MemRefOpnd::New(pLimit, TyMachReg, this->m_func);
  1440. this->lowererMD->CreateAssign(stackLimitOpnd, memOpnd, insertInstr);
  1441. instr = IR::Instr::New(Js::OpCode::ADD, stackLimitOpnd, stackLimitOpnd,
  1442. IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func), this->m_func);
  1443. insertInstr->InsertBefore(instr);
  1444. if (doInterruptProbe)
  1445. {
  1446. // If this add overflows, then we need to call out to the helper.
  1447. instr = IR::BranchInstr::New(Js::OpCode::JO, helperLabel, this->m_func);
  1448. insertInstr->InsertBefore(instr);
  1449. }
  1450. }
  1451. else
  1452. {
  1453. // The incremented stack limit is a compile-time constant.
  1454. size_t scriptStackLimit = (size_t)m_func->GetThreadContextInfo()->GetScriptStackLimit();
  1455. stackLimitOpnd = IR::IntConstOpnd::New((frameSize + scriptStackLimit), TyMachReg, this->m_func);
  1456. }
  1457. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  1458. if (!IS_FAULTINJECT_STACK_PROBE_ON) // Do stack check fastpath only if not doing StackProbe fault injection
  1459. {
  1460. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  1461. instr->SetSrc1(IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func));
  1462. instr->SetSrc2(stackLimitOpnd);
  1463. insertInstr->InsertBefore(instr);
  1464. instr = IR::BranchInstr::New(Js::OpCode::JGT, doneLabel, this->m_func);
  1465. insertInstr->InsertBefore(instr);
  1466. }
  1467. insertInstr->InsertBefore(helperLabel);
  1468. // Make sure we have zero where we expect to find the stack nested func pointer relative to EBP.
  1469. LoadHelperArgument(insertInstr, IR::IntConstOpnd::New(0, TyMachReg, m_func));
  1470. LoadHelperArgument(insertInstr, IR::IntConstOpnd::New(0, TyMachReg, m_func));
  1471. // Load the arguments to the probe helper and do the call.
  1472. lowererMD->m_lowerer->LoadScriptContext(insertInstr);
  1473. this->lowererMD->LoadHelperArgument(
  1474. insertInstr, IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func));
  1475. instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  1476. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProbeCurrentStack2, this->m_func));
  1477. insertInstr->InsertBefore(instr);
  1478. this->LowerCall(instr, 0, RegEAX);
  1479. insertInstr->InsertBefore(doneLabel);
  1480. Security::InsertRandomFunctionPad(doneLabel);
  1481. }
  1482. ///----------------------------------------------------------------------------
  1483. ///
  1484. /// LowererMDArch::LowerExitInstr
  1485. ///
  1486. /// Emit epilog.
  1487. ///
  1488. ///----------------------------------------------------------------------------
  1489. IR::Instr *
  1490. LowererMDArch::LowerExitInstr(IR::ExitInstr * exitInstr)
  1491. {
  1492. exitInstr = LowerExitInstrCommon(exitInstr);
  1493. // Insert RET
  1494. IR::IntConstOpnd * intSrc = IR::IntConstOpnd::New(0, TyMachReg, this->m_func);
  1495. IR::RegOpnd *eaxReg = IR::RegOpnd::New(nullptr, this->GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  1496. IR::Instr *retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  1497. retInstr->SetSrc1(intSrc);
  1498. retInstr->SetSrc2(eaxReg);
  1499. exitInstr->InsertBefore(retInstr);
  1500. return exitInstr;
  1501. }
  1502. IR::Instr *
  1503. LowererMDArch::LowerExitInstrAsmJs(IR::ExitInstr * exitInstr)
  1504. {
  1505. exitInstr = LowerExitInstrCommon(exitInstr);
  1506. // get asm.js return type
  1507. IR::IntConstOpnd* intSrc = nullptr;
  1508. if (m_func->IsLoopBody())
  1509. {
  1510. // Insert RET
  1511. intSrc = IR::IntConstOpnd::New(0, TyMachReg, this->m_func);
  1512. }
  1513. else
  1514. {
  1515. // Generate RET
  1516. int32 alignedSize = Math::Align<int32>(m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetArgByteSize(), MachStackAlignment);
  1517. intSrc = IR::IntConstOpnd::New(alignedSize + MachPtr, TyMachReg, m_func);
  1518. }
  1519. IR::Instr *retInstr = IR::Instr::New(Js::OpCode::RET, m_func);
  1520. retInstr->SetSrc1(intSrc);
  1521. exitInstr->InsertBefore(retInstr);
  1522. return exitInstr;
  1523. }
  1524. IR::ExitInstr *
  1525. LowererMDArch::LowerExitInstrCommon(IR::ExitInstr * exitInstr)
  1526. {
  1527. IR::RegOpnd * ebpOpnd = IR::RegOpnd::New(nullptr, GetRegBlockPointer(), TyMachReg, m_func);
  1528. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, m_func);
  1529. // POP used callee-saved registers
  1530. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg + 1))
  1531. {
  1532. if (LinearScan::IsCalleeSaved(reg) && (m_func->m_regsUsed.Test(reg)))
  1533. {
  1534. IR::RegOpnd * regOpnd = IR::RegOpnd::New(nullptr, reg, TyMachReg, m_func);
  1535. IR::Instr * popInstr = IR::Instr::New(Js::OpCode::POP, regOpnd, m_func);
  1536. exitInstr->InsertBefore(popInstr);
  1537. }
  1538. }
  1539. // Restore frame
  1540. // Generate MOV ESP, EBP
  1541. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, espOpnd, ebpOpnd, m_func);
  1542. exitInstr->InsertBefore(movInstr);
  1543. // Generate POP EBP
  1544. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::POP, ebpOpnd, m_func);
  1545. exitInstr->InsertBefore(pushInstr);
  1546. return exitInstr;
  1547. }
  1548. IR::Instr *
  1549. LowererMDArch::LowerInt64Assign(IR::Instr * instr)
  1550. {
  1551. IR::Opnd* dst = instr->GetDst();
  1552. IR::Opnd* src1 = instr->GetSrc1();
  1553. if (dst && (dst->IsRegOpnd() || dst->IsSymOpnd() || dst->IsIndirOpnd()) && src1)
  1554. {
  1555. int dstSize = dst->GetSize();
  1556. int srcSize = src1->GetSize();
  1557. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(dst);
  1558. Int64RegPair src1Pair = m_func->FindOrCreateInt64Pair(src1);
  1559. IR::Instr* lowLoadInstr = IR::Instr::New(Js::OpCode::Ld_I4, dstPair.low, src1Pair.low, m_func);
  1560. instr->InsertBefore(lowLoadInstr);
  1561. lowererMD->ChangeToAssign(lowLoadInstr);
  1562. // Do not store to memory if we wanted less than 8 bytes
  1563. const bool canAssignHigh = !dst->IsIndirOpnd() || dstSize == 8;
  1564. const bool isLoadFromWordMem = src1->IsIndirOpnd() && srcSize < 8;
  1565. if (canAssignHigh)
  1566. {
  1567. if (!isLoadFromWordMem)
  1568. {
  1569. // Normal case, assign source's high bits to dst's high bits
  1570. IR::Instr* highLoadInstr = IR::Instr::New(Js::OpCode::Ld_I4, dstPair.high, src1Pair.high, m_func);
  1571. instr->InsertBefore(highLoadInstr);
  1572. lowererMD->ChangeToAssign(highLoadInstr);
  1573. }
  1574. else
  1575. {
  1576. // Do not load from memory if we wanted less than 8 bytes
  1577. src1Pair.high->Free(m_func);
  1578. if (IRType_IsUnsignedInt(src1->GetType()))
  1579. {
  1580. // If this is an unsigned assign from memory, we can simply set the high bits to 0
  1581. IR::Instr* highLoadInstr = IR::Instr::New(Js::OpCode::Ld_I4, dstPair.high, IR::IntConstOpnd::New(0, TyInt32, m_func), m_func);
  1582. lowererMD->ChangeToAssign(highLoadInstr);
  1583. instr->InsertBefore(highLoadInstr);
  1584. }
  1585. else
  1586. {
  1587. // If this is a signed assign from memory, we need to extend the sign
  1588. IR::Instr* highExtendInstr = IR::Instr::New(Js::OpCode::Ld_I4, dstPair.high, dstPair.low, m_func);
  1589. instr->InsertBefore(highExtendInstr);
  1590. lowererMD->ChangeToAssign(highExtendInstr);
  1591. highExtendInstr = IR::Instr::New(Js::OpCode::SAR, dstPair.high, dstPair.high, IR::IntConstOpnd::New(31, TyInt32, m_func), m_func);
  1592. instr->InsertBefore(highExtendInstr);
  1593. }
  1594. }
  1595. }
  1596. instr->Remove();
  1597. return lowLoadInstr->m_prev;
  1598. }
  1599. return instr;
  1600. }
  1601. void
  1602. LowererMDArch::EmitInt64Instr(IR::Instr *instr)
  1603. {
  1604. if (instr->IsBranchInstr())
  1605. {
  1606. LowerInt64Branch(instr);
  1607. return;
  1608. }
  1609. IR::Opnd* dst = instr->GetDst();
  1610. IR::Opnd* src1 = instr->GetSrc1();
  1611. IR::Opnd* src2 = instr->GetSrc2();
  1612. Assert(!dst || dst->IsInt64());
  1613. Assert(!src1 || src1->IsInt64());
  1614. Assert(!src2 || src2->IsInt64());
  1615. const auto LowerToHelper = [&](IR::JnHelperMethod helper) {
  1616. if (src2)
  1617. {
  1618. LoadInt64HelperArgument(instr, src2);
  1619. }
  1620. Assert(src1);
  1621. LoadInt64HelperArgument(instr, src1);
  1622. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, dst, this->m_func);
  1623. instr->InsertBefore(callInstr);
  1624. lowererMD->ChangeToHelperCall(callInstr, helper);
  1625. instr->Remove();
  1626. return callInstr;
  1627. };
  1628. Js::OpCode lowOpCode, highOpCode;
  1629. switch (instr->m_opcode)
  1630. {
  1631. case Js::OpCode::Xor_A:
  1632. case Js::OpCode::Xor_I4:
  1633. lowOpCode = Js::OpCode::XOR;
  1634. highOpCode = Js::OpCode::XOR;
  1635. goto binopCommon;
  1636. case Js::OpCode::Or_A:
  1637. case Js::OpCode::Or_I4:
  1638. lowOpCode = Js::OpCode::OR;
  1639. highOpCode = Js::OpCode::OR;
  1640. goto binopCommon;
  1641. case Js::OpCode::And_A:
  1642. case Js::OpCode::And_I4:
  1643. lowOpCode = Js::OpCode::AND;
  1644. highOpCode = Js::OpCode::AND;
  1645. goto binopCommon;
  1646. case Js::OpCode::Add_A:
  1647. case Js::OpCode::Add_I4:
  1648. lowOpCode = Js::OpCode::ADD;
  1649. highOpCode = Js::OpCode::ADC;
  1650. goto binopCommon;
  1651. case Js::OpCode::Sub_A:
  1652. case Js::OpCode::Sub_I4:
  1653. lowOpCode = Js::OpCode::SUB;
  1654. highOpCode = Js::OpCode::SBB;
  1655. binopCommon:
  1656. {
  1657. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(dst);
  1658. Int64RegPair src1Pair = m_func->FindOrCreateInt64Pair(src1);
  1659. Int64RegPair src2Pair = m_func->FindOrCreateInt64Pair(src2);
  1660. IR::Instr* lowInstr = IR::Instr::New(lowOpCode, dstPair.low, src1Pair.low, src2Pair.low, m_func);
  1661. instr->InsertBefore(lowInstr);
  1662. LowererMD::Legalize(lowInstr);
  1663. instr->ReplaceDst(dstPair.high);
  1664. instr->ReplaceSrc1(src1Pair.high);
  1665. instr->ReplaceSrc2(src2Pair.high);
  1666. instr->m_opcode = highOpCode;
  1667. LowererMD::Legalize(instr);
  1668. break;
  1669. }
  1670. case Js::OpCode::ShrU_A:
  1671. case Js::OpCode::ShrU_I4:
  1672. instr = LowerToHelper(IR::HelperDirectMath_Int64ShrU);
  1673. break;
  1674. case Js::OpCode::Shr_A:
  1675. case Js::OpCode::Shr_I4:
  1676. instr = LowerToHelper(IR::HelperDirectMath_Int64Shr);
  1677. break;
  1678. case Js::OpCode::Shl_A:
  1679. case Js::OpCode::Shl_I4:
  1680. instr = LowerToHelper(IR::HelperDirectMath_Int64Shl);
  1681. break;
  1682. case Js::OpCode::Rol_I4:
  1683. instr = LowerToHelper(IR::HelperDirectMath_Int64Rol);
  1684. break;
  1685. case Js::OpCode::Ror_I4:
  1686. instr = LowerToHelper(IR::HelperDirectMath_Int64Ror);
  1687. break;
  1688. case Js::OpCode::InlineMathClz:
  1689. instr = LowerToHelper(IR::HelperDirectMath_Int64Clz);
  1690. break;
  1691. case Js::OpCode::Ctz:
  1692. instr = LowerToHelper(IR::HelperDirectMath_Int64Ctz);
  1693. break;
  1694. case Js::OpCode::PopCnt:
  1695. instr = LowerToHelper(IR::HelperPopCnt64);
  1696. break;
  1697. case Js::OpCode::Mul_A:
  1698. case Js::OpCode::Mul_I4:
  1699. instr = LowerToHelper(IR::HelperDirectMath_Int64Mul);
  1700. break;
  1701. case Js::OpCode::DivU_I4:
  1702. this->lowererMD->m_lowerer->LoadScriptContext(instr);
  1703. instr = LowerToHelper(IR::HelperDirectMath_Int64DivU);
  1704. break;
  1705. case Js::OpCode::Div_A:
  1706. case Js::OpCode::Div_I4:
  1707. this->lowererMD->m_lowerer->LoadScriptContext(instr);
  1708. instr = LowerToHelper(IR::HelperDirectMath_Int64DivS);
  1709. break;
  1710. case Js::OpCode::RemU_I4:
  1711. this->lowererMD->m_lowerer->LoadScriptContext(instr);
  1712. instr = LowerToHelper(IR::HelperDirectMath_Int64RemU);
  1713. break;
  1714. case Js::OpCode::Rem_A:
  1715. case Js::OpCode::Rem_I4:
  1716. this->lowererMD->m_lowerer->LoadScriptContext(instr);
  1717. instr = LowerToHelper(IR::HelperDirectMath_Int64RemS);
  1718. break;
  1719. default:
  1720. AssertMsg(UNREACHED, "Int64 opcode not supported");
  1721. }
  1722. }
  1723. void LowererMDArch::LowerInt64Branch(IR::Instr *instr)
  1724. {
  1725. AssertOrFailFast(instr->IsBranchInstr());
  1726. IR::BranchInstr* branchInstr = instr->AsBranchInstr();
  1727. Assert(branchInstr->IsConditional());
  1728. // destination label
  1729. IR::LabelInstr* jmpLabel = branchInstr->GetTarget();
  1730. // Label to use when we know the condition is false after checking only the high bits
  1731. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  1732. branchInstr->InsertAfter(doneLabel);
  1733. IR::Opnd* src1 = instr->UnlinkSrc1();
  1734. IR::Opnd* src2 = instr->GetSrc2() ? instr->UnlinkSrc2() : IR::Int64ConstOpnd::New(0, TyInt64, this->m_func);
  1735. Assert(src1 && src1->IsInt64());
  1736. Assert(src2 && src2->IsInt64());
  1737. Int64RegPair src1Pair = m_func->FindOrCreateInt64Pair(src1);
  1738. Int64RegPair src2Pair = m_func->FindOrCreateInt64Pair(src2);
  1739. const auto insertJNE = [&]()
  1740. {
  1741. IR::Instr* newInstr = IR::BranchInstr::New(Js::OpCode::JNE, doneLabel, m_func);
  1742. branchInstr->InsertBefore(newInstr);
  1743. LowererMD::Legalize(newInstr);
  1744. };
  1745. const auto cmpHighAndJump = [&](Js::OpCode jumpOp, IR::LabelInstr* label)
  1746. {
  1747. IR::Instr* newInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  1748. newInstr->SetSrc1(src1Pair.high);
  1749. newInstr->SetSrc2(src2Pair.high);
  1750. branchInstr->InsertBefore(newInstr);
  1751. LowererMD::Legalize(newInstr);
  1752. newInstr = IR::BranchInstr::New(jumpOp, label, this->m_func);
  1753. branchInstr->InsertBefore(newInstr);
  1754. LowererMD::Legalize(newInstr);
  1755. };
  1756. const auto cmpLowAndJump = [&](Js::OpCode jumpOp)
  1757. {
  1758. IR::Instr* newInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  1759. newInstr->SetSrc1(src1Pair.low);
  1760. newInstr->SetSrc2(src2Pair.low);
  1761. branchInstr->InsertBefore(newInstr);
  1762. LowererMD::Legalize(newInstr);
  1763. branchInstr->m_opcode = jumpOp;
  1764. };
  1765. const auto cmpInt64Common = [&](Js::OpCode cmpHighJmpOp, Js::OpCode cmpLowJmpOp)
  1766. {
  1767. // CMP src1.high, src2.high
  1768. // JCC target
  1769. // JNE done ;; not equal means it's inverse of JCC, do not change in case cmp opnd are swapped
  1770. // ;; Fallthrough src1.high == src2.high
  1771. // CMP src1.low, src2.low
  1772. // JCC target ;; Must do unsigned comparison on low bits
  1773. //done:
  1774. cmpHighAndJump(cmpHighJmpOp, jmpLabel);
  1775. insertJNE();
  1776. cmpLowAndJump(cmpLowJmpOp);
  1777. };
  1778. switch (instr->m_opcode)
  1779. {
  1780. case Js::OpCode::BrTrue_A:
  1781. case Js::OpCode::BrTrue_I4:
  1782. {
  1783. // For BrTrue, we only need to check the low bits
  1784. // TEST src1.low, src1.low
  1785. // JNE target
  1786. IR::Instr* newInstr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  1787. newInstr->SetSrc1(src1Pair.low);
  1788. newInstr->SetSrc2(src1Pair.low);
  1789. branchInstr->InsertBefore(newInstr);
  1790. LowererMD::Legalize(newInstr);
  1791. // If src1 is not 0, jump to destination
  1792. branchInstr->m_opcode = Js::OpCode::JNE;
  1793. // Don't need the doneLabel for this case
  1794. doneLabel->Remove();
  1795. break;
  1796. }
  1797. case Js::OpCode::BrFalse_A:
  1798. case Js::OpCode::BrFalse_I4:
  1799. {
  1800. // For BrFalse, we only need to check the low bits
  1801. // TEST src1.low, src1.low
  1802. // JNE target
  1803. IR::Instr* newInstr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  1804. newInstr->SetSrc1(src1Pair.low);
  1805. newInstr->SetSrc2(src1Pair.low);
  1806. branchInstr->InsertBefore(newInstr);
  1807. LowererMD::Legalize(newInstr);
  1808. // If src1 is 0, jump to destination
  1809. branchInstr->m_opcode = Js::OpCode::JEQ;
  1810. // Don't need the doneLabel for this case
  1811. doneLabel->Remove();
  1812. break;
  1813. }
  1814. case Js::OpCode::BrEq_A:
  1815. case Js::OpCode::BrEq_I4:
  1816. // CMP src1.high, src2.high
  1817. // JNE done
  1818. // CMP src1.low, src2.low
  1819. // JEQ target
  1820. //done:
  1821. cmpHighAndJump(Js::OpCode::JNE, doneLabel);
  1822. cmpLowAndJump(Js::OpCode::JEQ);
  1823. break;
  1824. case Js::OpCode::BrNeq_A:
  1825. case Js::OpCode::BrNeq_I4:
  1826. // CMP src1.high, src2.high
  1827. // JNE target
  1828. // CMP src1.low, src2.low
  1829. // JNE target
  1830. //done:
  1831. cmpHighAndJump(Js::OpCode::JNE, jmpLabel);
  1832. cmpLowAndJump(Js::OpCode::JNE);
  1833. // Don't need the doneLabel for this case
  1834. doneLabel->Remove();
  1835. break;
  1836. case Js::OpCode::BrUnGt_I4: cmpInt64Common(Js::OpCode::JA, Js::OpCode::JA); break;
  1837. case Js::OpCode::BrUnGe_I4: cmpInt64Common(Js::OpCode::JA, Js::OpCode::JAE); break;
  1838. case Js::OpCode::BrUnLt_I4: cmpInt64Common(Js::OpCode::JB, Js::OpCode::JB); break;
  1839. case Js::OpCode::BrUnLe_I4: cmpInt64Common(Js::OpCode::JB, Js::OpCode::JBE); break;
  1840. case Js::OpCode::BrGt_A: // Fall through
  1841. case Js::OpCode::BrGt_I4: cmpInt64Common(Js::OpCode::JGT, Js::OpCode::JA); break;
  1842. case Js::OpCode::BrGe_A: // Fall through
  1843. case Js::OpCode::BrGe_I4: cmpInt64Common(Js::OpCode::JGT, Js::OpCode::JAE); break;
  1844. case Js::OpCode::BrLt_A: // Fall through
  1845. case Js::OpCode::BrLt_I4: cmpInt64Common(Js::OpCode::JLT, Js::OpCode::JB); break;
  1846. case Js::OpCode::BrLe_A: // Fall through
  1847. case Js::OpCode::BrLe_I4: cmpInt64Common(Js::OpCode::JLT, Js::OpCode::JBE); break;
  1848. default:
  1849. AssertMsg(UNREACHED, "Int64 branch opcode not supported");
  1850. branchInstr->m_opcode = Js::OpCode::Nop;
  1851. }
  1852. }
  1853. void
  1854. LowererMDArch::EmitInt4Instr(IR::Instr *instr)
  1855. {
  1856. IR::Instr *newInstr;
  1857. IR::Opnd *src1, *src2;
  1858. IR::RegOpnd *regEDX;
  1859. bool legalize = false;
  1860. switch(instr->m_opcode)
  1861. {
  1862. case Js::OpCode::Neg_I4:
  1863. instr->m_opcode = Js::OpCode::NEG;
  1864. break;
  1865. case Js::OpCode::Not_I4:
  1866. instr->m_opcode = Js::OpCode::NOT;
  1867. break;
  1868. case Js::OpCode::Add_I4:
  1869. LowererMD::ChangeToAdd(instr, false /* needFlags */);
  1870. legalize = true;
  1871. break;
  1872. case Js::OpCode::Sub_I4:
  1873. LowererMD::ChangeToSub(instr, false /* needFlags */);
  1874. legalize = true;
  1875. break;
  1876. case Js::OpCode::Mul_I4:
  1877. instr->m_opcode = Js::OpCode::IMUL2;
  1878. break;
  1879. case Js::OpCode::DivU_I4:
  1880. case Js::OpCode::Div_I4:
  1881. instr->SinkDst(Js::OpCode::MOV, RegEAX);
  1882. goto idiv_common;
  1883. case Js::OpCode::RemU_I4:
  1884. case Js::OpCode::Rem_I4:
  1885. instr->SinkDst(Js::OpCode::MOV, RegEDX);
  1886. idiv_common:
  1887. if (instr->GetSrc1()->IsUInt32())
  1888. {
  1889. Assert(instr->GetSrc2()->IsUInt32());
  1890. Assert(instr->m_opcode == Js::OpCode::RemU_I4 || instr->m_opcode == Js::OpCode::DivU_I4);
  1891. instr->m_opcode = Js::OpCode::DIV;
  1892. }
  1893. else
  1894. {
  1895. instr->m_opcode = Js::OpCode::IDIV;
  1896. }
  1897. instr->HoistSrc1(Js::OpCode::MOV, RegEAX);
  1898. regEDX = IR::RegOpnd::New(TyInt32, instr->m_func);
  1899. regEDX->SetReg(RegEDX);
  1900. if (instr->GetSrc1()->IsUInt32())
  1901. {
  1902. // we need to ensure that register allocator doesn't muck about with edx
  1903. instr->HoistSrc2(Js::OpCode::MOV, RegECX);
  1904. newInstr = IR::Instr::New(Js::OpCode::Ld_I4, regEDX, IR::IntConstOpnd::New(0, TyInt32, instr->m_func), instr->m_func);
  1905. instr->InsertBefore(newInstr);
  1906. LowererMD::ChangeToAssign(newInstr);
  1907. // NOP ensures that the EDX = Ld_I4 0 doesn't get deadstored, will be removed in peeps
  1908. instr->InsertBefore(IR::Instr::New(Js::OpCode::NOP, regEDX, regEDX, instr->m_func));
  1909. }
  1910. else
  1911. {
  1912. if (instr->GetSrc2()->IsImmediateOpnd())
  1913. {
  1914. instr->HoistSrc2(Js::OpCode::MOV);
  1915. }
  1916. instr->InsertBefore(IR::Instr::New(Js::OpCode::CDQ, regEDX, instr->m_func));
  1917. }
  1918. return;
  1919. case Js::OpCode::Or_I4:
  1920. instr->m_opcode = Js::OpCode::OR;
  1921. break;
  1922. case Js::OpCode::Xor_I4:
  1923. instr->m_opcode = Js::OpCode::XOR;
  1924. break;
  1925. case Js::OpCode::And_I4:
  1926. instr->m_opcode = Js::OpCode::AND;
  1927. break;
  1928. case Js::OpCode::Shl_I4:
  1929. case Js::OpCode::ShrU_I4:
  1930. case Js::OpCode::Shr_I4:
  1931. case Js::OpCode::Rol_I4:
  1932. case Js::OpCode::Ror_I4:
  1933. LowererMD::ChangeToShift(instr, false /* needFlags */);
  1934. legalize = true;
  1935. break;
  1936. case Js::OpCode::BrTrue_I4:
  1937. instr->m_opcode = Js::OpCode::JNE;
  1938. goto br1_Common;
  1939. case Js::OpCode::BrFalse_I4:
  1940. instr->m_opcode = Js::OpCode::JEQ;
  1941. br1_Common:
  1942. src1 = instr->UnlinkSrc1();
  1943. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  1944. instr->InsertBefore(newInstr);
  1945. newInstr->SetSrc1(src1);
  1946. newInstr->SetSrc2(src1);
  1947. return;
  1948. case Js::OpCode::BrEq_I4:
  1949. instr->m_opcode = Js::OpCode::JEQ;
  1950. goto br2_Common;
  1951. case Js::OpCode::BrNeq_I4:
  1952. instr->m_opcode = Js::OpCode::JNE;
  1953. goto br2_Common;
  1954. case Js::OpCode::BrUnGt_I4:
  1955. instr->m_opcode = Js::OpCode::JA;
  1956. goto br2_Common;
  1957. case Js::OpCode::BrUnGe_I4:
  1958. instr->m_opcode = Js::OpCode::JAE;
  1959. goto br2_Common;
  1960. case Js::OpCode::BrUnLe_I4:
  1961. instr->m_opcode = Js::OpCode::JBE;
  1962. goto br2_Common;
  1963. case Js::OpCode::BrUnLt_I4:
  1964. instr->m_opcode = Js::OpCode::JB;
  1965. goto br2_Common;
  1966. case Js::OpCode::BrGt_I4:
  1967. instr->m_opcode = Js::OpCode::JGT;
  1968. goto br2_Common;
  1969. case Js::OpCode::BrGe_I4:
  1970. instr->m_opcode = Js::OpCode::JGE;
  1971. goto br2_Common;
  1972. case Js::OpCode::BrLe_I4:
  1973. instr->m_opcode = Js::OpCode::JLE;
  1974. goto br2_Common;
  1975. case Js::OpCode::BrLt_I4:
  1976. instr->m_opcode = Js::OpCode::JLT;
  1977. br2_Common:
  1978. src1 = instr->UnlinkSrc1();
  1979. src2 = instr->UnlinkSrc2();
  1980. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  1981. instr->InsertBefore(newInstr);
  1982. newInstr->SetSrc1(src1);
  1983. newInstr->SetSrc2(src2);
  1984. return;
  1985. default:
  1986. AssertMsg(UNREACHED, "Un-implemented int4 opcode");
  1987. }
  1988. if(legalize)
  1989. {
  1990. LowererMD::Legalize(instr);
  1991. }
  1992. else
  1993. {
  1994. // OpEq's
  1995. LowererMD::MakeDstEquSrc1(instr);
  1996. }
  1997. }
  1998. void
  1999. LowererMDArch::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  2000. {
  2001. // s2 = MOV src1
  2002. // s2 = SHL s2, Js::VarTag_Shift -- restore the var tag on the result
  2003. // JO $ToVar
  2004. // JB $ToVar [isFromUint32]
  2005. // s2 = INC s2
  2006. // dst = MOV s2
  2007. // JMP $done
  2008. //$ToVar:
  2009. // EmitLoadVarNoCheck
  2010. //$Done:
  2011. AssertMsg(instrLoad->GetSrc1()->IsRegOpnd(), "Should be regOpnd");
  2012. bool isInt = false;
  2013. bool isNotInt = false;
  2014. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2015. IR::LabelInstr *labelToVar = nullptr;
  2016. IR::LabelInstr *labelDone = nullptr;
  2017. IR::Instr *instr;
  2018. if (src1->IsTaggedInt())
  2019. {
  2020. isInt = true;
  2021. }
  2022. else if (src1->IsNotInt())
  2023. {
  2024. isNotInt = true;
  2025. }
  2026. if (!isNotInt)
  2027. {
  2028. // s2 = MOV s1
  2029. IR::Opnd * opnd32src1 = src1->UseWithNewType(TyInt32, this->m_func);
  2030. IR::RegOpnd * opndReg2 = IR::RegOpnd::New(TyMachReg, this->m_func);
  2031. IR::Opnd * opnd32Reg2 = opndReg2->UseWithNewType(TyInt32, this->m_func);
  2032. instr = IR::Instr::New(Js::OpCode::MOV, opnd32Reg2, opnd32src1, this->m_func);
  2033. instrLoad->InsertBefore(instr);
  2034. // s2 = SHL s2, Js::VarTag_Shift -- restore the var tag on the result
  2035. instr = IR::Instr::New(Js::OpCode::SHL, opnd32Reg2, opnd32Reg2,
  2036. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func),
  2037. this->m_func);
  2038. instrLoad->InsertBefore(instr);
  2039. if (!isInt)
  2040. {
  2041. // JO $ToVar
  2042. labelToVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2043. instr = IR::BranchInstr::New(Js::OpCode::JO, labelToVar, this->m_func);
  2044. instrLoad->InsertBefore(instr);
  2045. if (isFromUint32)
  2046. {
  2047. // JB $ToVar [isFromUint32]
  2048. instr = IR::BranchInstr::New(Js::OpCode::JB, labelToVar, this->m_func);
  2049. instrLoad->InsertBefore(instr);
  2050. }
  2051. }
  2052. // s2 = INC s2
  2053. instr = IR::Instr::New(Js::OpCode::INC, opndReg2, opndReg2, this->m_func);
  2054. instrLoad->InsertBefore(instr);
  2055. // dst = MOV s2
  2056. instr = IR::Instr::New(Js::OpCode::MOV, instrLoad->GetDst(), opndReg2, this->m_func);
  2057. instrLoad->InsertBefore(instr);
  2058. if (!isInt)
  2059. {
  2060. // JMP $done
  2061. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  2062. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  2063. instrLoad->InsertBefore(instr);
  2064. }
  2065. }
  2066. if (!isInt)
  2067. {
  2068. //$ToVar:
  2069. if (labelToVar)
  2070. {
  2071. instrLoad->InsertBefore(labelToVar);
  2072. }
  2073. this->lowererMD->EmitLoadVarNoCheck(instrLoad->GetDst()->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper || labelToVar != nullptr);
  2074. }
  2075. //$Done:
  2076. if (labelDone)
  2077. {
  2078. instrLoad->InsertAfter(labelDone);
  2079. }
  2080. instrLoad->Remove();
  2081. }
  2082. void
  2083. LowererMDArch::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2084. {
  2085. // We should only generate this if sse2 is available
  2086. Assert(AutoSystemInfo::Data.SSE2Available());
  2087. Assert(dst->IsRegOpnd() && dst->IsFloat());
  2088. Assert(src->IsRegOpnd() && (src->GetType() == TyInt32 || src->GetType() == TyUint32));
  2089. instrInsert->InsertBefore(IR::Instr::New(dst->IsFloat64() ? Js::OpCode::CVTSI2SD : Js::OpCode::CVTSI2SS, dst, src, this->m_func));
  2090. }
  2091. void
  2092. LowererMDArch::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2093. {
  2094. // We should only generate this if sse2 is available
  2095. Assert(AutoSystemInfo::Data.SSE2Available());
  2096. IR::Opnd* origDst = nullptr;
  2097. if (dst->IsFloat32())
  2098. {
  2099. origDst = dst;
  2100. dst = IR::RegOpnd::New(TyFloat64, this->m_func);
  2101. }
  2102. this->lowererMD->EmitIntToFloat(dst, src, instrInsert);
  2103. IR::RegOpnd * highestBitOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  2104. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, highestBitOpnd, src, this->m_func);
  2105. instrInsert->InsertBefore(instr);
  2106. instr = IR::Instr::New(Js::OpCode::SHR, highestBitOpnd, highestBitOpnd,
  2107. IR::IntConstOpnd::New(31, TyInt8, this->m_func, true), this->m_func);
  2108. instrInsert->InsertBefore(instr);
  2109. // TODO: Encode indir with base as address opnd instead
  2110. IR::RegOpnd * baseOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2111. instr = IR::Instr::New(Js::OpCode::MOV, baseOpnd, IR::AddrOpnd::New(m_func->GetThreadContextInfo()->GetUIntConvertConstAddr(),
  2112. IR::AddrOpndKindDynamicMisc, this->m_func), this->m_func);
  2113. instrInsert->InsertBefore(instr);
  2114. instr = IR::Instr::New(Js::OpCode::ADDSD, dst, dst, IR::IndirOpnd::New(baseOpnd,
  2115. highestBitOpnd, IndirScale8, TyFloat64, this->m_func), this->m_func);
  2116. instrInsert->InsertBefore(instr);
  2117. if (origDst)
  2118. {
  2119. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTSD2SS, origDst, dst, this->m_func));
  2120. }
  2121. }
  2122. void
  2123. LowererMDArch::EmitIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2124. {
  2125. Assert(dst->IsRegOpnd() && dst->IsInt64());
  2126. Assert(src->IsInt32());
  2127. Func* func = instrInsert->m_func;
  2128. Int64RegPair dstPair = func->FindOrCreateInt64Pair(dst);
  2129. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyMachPtr, func);
  2130. regEAX->SetReg(RegEAX);
  2131. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV, regEAX, src, func));
  2132. IR::RegOpnd *regEDX = IR::RegOpnd::New(TyMachPtr, func);
  2133. regEDX->SetReg(RegEDX);
  2134. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CDQ, regEDX, func));
  2135. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dstPair.low, regEAX, func));
  2136. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dstPair.high, regEDX, func));
  2137. }
  2138. void
  2139. LowererMDArch::EmitUIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2140. {
  2141. Assert(dst->IsRegOpnd() && dst->IsInt64());
  2142. Assert(src->IsUInt32());
  2143. Func* func = instrInsert->m_func;
  2144. Int64RegPair dstPair = func->FindOrCreateInt64Pair(dst);
  2145. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dstPair.high, IR::IntConstOpnd::New(0, TyInt32, func), func));
  2146. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dstPair.low, src, func));
  2147. }
  2148. void
  2149. LowererMDArch::EmitLongToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2150. {
  2151. Assert(dst->IsRegOpnd() && dst->IsInt32());
  2152. Assert(src->IsInt64());
  2153. Func* func = instrInsert->m_func;
  2154. Int64RegPair srcPair = func->FindOrCreateInt64Pair(src);
  2155. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, srcPair.low, func));
  2156. }
  2157. bool
  2158. LowererMDArch::EmitLoadInt32(IR::Instr *instrLoad, bool conversionFromObjectAllowed, bool bailOutOnHelper, IR::LabelInstr * labelBailOut)
  2159. {
  2160. // if(doShiftFirst)
  2161. // {
  2162. // r1 = MOV src1
  2163. // r1 = SAR r1, VarTag_Shift (move last-shifted bit into CF)
  2164. // JAE (CF == 0) $helper or $float
  2165. // }
  2166. // else
  2167. // {
  2168. // TEST src1, AtomTag
  2169. // JEQ $helper or $float
  2170. // r1 = MOV src1
  2171. // r1 = SAR r1, VarTag_Shift
  2172. // }
  2173. // dst = MOV r1
  2174. // JMP $Done
  2175. // $float:
  2176. // dst = ConvertToFloat(src1, $helper)
  2177. // $Helper
  2178. // dst = ToInt32(src1)
  2179. // $Done
  2180. AssertMsg(instrLoad->GetSrc1()->IsRegOpnd(), "Should be regOpnd");
  2181. bool isInt = false;
  2182. bool isNotInt = false;
  2183. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2184. IR::LabelInstr *labelHelper = nullptr;
  2185. IR::LabelInstr *labelDone = nullptr;
  2186. IR::LabelInstr* labelFloat = nullptr;
  2187. IR::Instr *instr;
  2188. if (src1->IsTaggedInt())
  2189. {
  2190. isInt = true;
  2191. }
  2192. else if (src1->IsNotInt())
  2193. {
  2194. isNotInt = true;
  2195. }
  2196. const ValueType src1ValueType(src1->GetValueType());
  2197. const bool doShiftFirst = src1ValueType.IsLikelyTaggedInt(); // faster to shift and check flags if it's likely tagged
  2198. const bool doFloatToIntFastPath =
  2199. (src1ValueType.IsLikelyFloat() || src1ValueType.IsLikelyUntaggedInt()) &&
  2200. !(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger)) &&
  2201. AutoSystemInfo::Data.SSE2Available();
  2202. IR::RegOpnd * r1 = nullptr;
  2203. if(doShiftFirst)
  2204. {
  2205. // r1 = MOV src1
  2206. r1 = IR::RegOpnd::New(TyVar, instrLoad->m_func);
  2207. r1->SetValueType(src1->GetValueType());
  2208. instr = IR::Instr::New(Js::OpCode::MOV, r1, src1, instrLoad->m_func);
  2209. instrLoad->InsertBefore(instr);
  2210. }
  2211. if (isNotInt)
  2212. {
  2213. // Known to be non-integer. If we are required to bail out on helper call, just re-jit.
  2214. if (!doFloatToIntFastPath && bailOutOnHelper)
  2215. {
  2216. if(!GlobOpt::DoEliminateArrayAccessHelperCall(this->m_func))
  2217. {
  2218. // Array access helper call removal is already off for some reason. Prevent trying to rejit again
  2219. // because it won't help and the same thing will happen again. Just abort jitting this function.
  2220. if(PHASE_TRACE(Js::BailOutPhase, this->m_func))
  2221. {
  2222. Output::Print(_u(" Aborting JIT because EliminateArrayAccessHelperCall is already off\n"));
  2223. Output::Flush();
  2224. }
  2225. throw Js::OperationAbortedException();
  2226. }
  2227. throw Js::RejitException(RejitReason::ArrayAccessHelperCallEliminationDisabled);
  2228. }
  2229. }
  2230. else
  2231. {
  2232. // It could be an integer in this case
  2233. if(doShiftFirst)
  2234. {
  2235. // r1 = SAR r1, VarTag_Shift (move last-shifted bit into CF)
  2236. Assert(r1);
  2237. instr = IR::Instr::New(Js::OpCode::SAR, r1, r1,
  2238. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrLoad->m_func), instrLoad->m_func);
  2239. instrLoad->InsertBefore(instr);
  2240. }
  2241. // We do not know for sure it is an integer - add a Smint test
  2242. if (!isInt)
  2243. {
  2244. if(doFloatToIntFastPath)
  2245. {
  2246. labelFloat = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  2247. }
  2248. else
  2249. {
  2250. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2251. }
  2252. if(doShiftFirst)
  2253. {
  2254. // JAE (CF == 0) $helper or $float
  2255. instrLoad->InsertBefore(
  2256. IR::BranchInstr::New(Js::OpCode::JAE, labelFloat ? labelFloat : labelHelper, this->m_func));
  2257. }
  2258. else
  2259. {
  2260. // TEST src1, AtomTag
  2261. // JEQ $helper or $float
  2262. this->lowererMD->GenerateSmIntTest(src1, instrLoad, labelFloat ? labelFloat : labelHelper);
  2263. }
  2264. }
  2265. if(!doShiftFirst)
  2266. {
  2267. if(src1->IsEqual(instrLoad->GetDst()))
  2268. {
  2269. // Go ahead and change src1, since it was already confirmed that we won't bail out or go to helper where src1
  2270. // may be used
  2271. r1 = src1;
  2272. }
  2273. else
  2274. {
  2275. // r1 = MOV src1
  2276. Assert(!r1);
  2277. r1 = IR::RegOpnd::New(TyVar, instrLoad->m_func);
  2278. r1->SetValueType(src1->GetValueType());
  2279. instr = IR::Instr::New(Js::OpCode::MOV, r1, src1, instrLoad->m_func);
  2280. instrLoad->InsertBefore(instr);
  2281. }
  2282. // r1 = SAR r1, VarTag_Shift
  2283. Assert(r1);
  2284. instr = IR::Instr::New(Js::OpCode::SAR, r1, r1,
  2285. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrLoad->m_func), instrLoad->m_func);
  2286. instrLoad->InsertBefore(instr);
  2287. }
  2288. // dst = MOV r1
  2289. Assert(r1);
  2290. instr = IR::Instr::New(Js::OpCode::MOV, instrLoad->GetDst(), r1, instrLoad->m_func);
  2291. instrLoad->InsertBefore(instr);
  2292. if (!isInt)
  2293. {
  2294. // JMP $Done
  2295. labelDone = instrLoad->GetOrCreateContinueLabel();
  2296. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  2297. instrLoad->InsertBefore(instr);
  2298. }
  2299. }
  2300. // if it is not an int - we need to convert.
  2301. if (!isInt)
  2302. {
  2303. if(doFloatToIntFastPath)
  2304. {
  2305. if(labelFloat)
  2306. {
  2307. instrLoad->InsertBefore(labelFloat);
  2308. }
  2309. if(!labelHelper)
  2310. {
  2311. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2312. }
  2313. if(!labelDone)
  2314. {
  2315. labelDone = instrLoad->GetOrCreateContinueLabel();
  2316. }
  2317. this->lowererMD->GenerateFloatTest(src1, instrLoad, labelHelper, instrLoad->HasBailOutInfo());
  2318. IR::Opnd* floatOpnd = IR::IndirOpnd::New(src1, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  2319. this->lowererMD->ConvertFloatToInt32(instrLoad->GetDst(), floatOpnd, labelHelper, labelDone, instrLoad);
  2320. }
  2321. // $Helper
  2322. // dst = ToInt32(r1)
  2323. // $Done
  2324. if (labelHelper)
  2325. {
  2326. instrLoad->InsertBefore(labelHelper);
  2327. }
  2328. if(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger))
  2329. {
  2330. // Avoid bailout if we have a JavascriptNumber whose value is a signed 32-bit integer
  2331. lowererMD->m_lowerer->LoadInt32FromUntaggedVar(instrLoad);
  2332. // Need to bail out instead of calling a helper
  2333. return true;
  2334. }
  2335. if (bailOutOnHelper)
  2336. {
  2337. Assert(labelBailOut);
  2338. lowererMD->m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrLoad);
  2339. instrLoad->Remove();
  2340. }
  2341. else if (conversionFromObjectAllowed)
  2342. {
  2343. lowererMD->m_lowerer->LowerUnaryHelperMem(instrLoad, IR::HelperConv_ToInt32);
  2344. }
  2345. else
  2346. {
  2347. lowererMD->m_lowerer->LowerUnaryHelperMemWithBoolReference(instrLoad, IR::HelperConv_ToInt32_NoObjects, true /*useBoolForBailout*/);
  2348. }
  2349. }
  2350. else
  2351. {
  2352. instrLoad->Remove();
  2353. }
  2354. return false;
  2355. }
  2356. IR::Instr *
  2357. LowererMDArch::LoadCheckedFloat(
  2358. IR::RegOpnd *opndOrig,
  2359. IR::RegOpnd *opndFloat,
  2360. IR::LabelInstr *labelInline,
  2361. IR::LabelInstr *labelHelper,
  2362. IR::Instr *instrInsert,
  2363. const bool checkForNullInLoopBody)
  2364. {
  2365. // Load one floating-point var into an XMM register, inserting checks to see if it's really a float:
  2366. // TEST src, 1
  2367. // JNE $non-int
  2368. // t0 = MOV src // convert a tagged int to float
  2369. // t0 = SAR t0, 1
  2370. // flt = CVTSI2SD t0
  2371. // JMP $labelInline
  2372. // $non-int
  2373. // CMP [src], JavascriptNumber::`vtable'
  2374. // JNE $labelHelper
  2375. // flt = MOVSD [t0 + offset(value)]
  2376. IR::Opnd * opnd;
  2377. IR::Instr * instr;
  2378. IR::Instr * instrFirst = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2379. instrFirst->SetSrc1(opndOrig);
  2380. instrFirst->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func));
  2381. instrInsert->InsertBefore(instrFirst);
  2382. IR::LabelInstr * labelVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2383. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelVar, this->m_func);
  2384. instrInsert->InsertBefore(instr);
  2385. if (opndOrig->GetValueType().IsLikelyFloat())
  2386. {
  2387. // Make this path helper if value is likely a float
  2388. instrInsert->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true));
  2389. }
  2390. opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  2391. instr = IR::Instr::New(Js::OpCode::MOV, opnd, opndOrig, this->m_func);
  2392. instrInsert->InsertBefore(instr);
  2393. instr = IR::Instr::New(
  2394. Js::OpCode::SAR, opnd, opnd, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  2395. instrInsert->InsertBefore(instr);
  2396. instr = IR::Instr::New(Js::OpCode::CVTSI2SD, opndFloat, opnd, this->m_func);
  2397. instrInsert->InsertBefore(instr);
  2398. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelInline, this->m_func);
  2399. instrInsert->InsertBefore(instr);
  2400. instrInsert->InsertBefore(labelVar);
  2401. lowererMD->GenerateFloatTest(opndOrig, instrInsert, labelHelper, checkForNullInLoopBody);
  2402. opnd = IR::IndirOpnd::New(opndOrig, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  2403. instr = IR::Instr::New(Js::OpCode::MOVSD, opndFloat, opnd, this->m_func);
  2404. instrInsert->InsertBefore(instr);
  2405. return instrFirst;
  2406. }
  2407. IR::LabelInstr *
  2408. LowererMDArch::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  2409. {
  2410. IR::Instr * exitPrevInstr = exitTargetInstr->m_prev;
  2411. // On x86 we push and pop the out param area, but the start call can be moved passed the bailout instruction
  2412. // which we don't keep track of. There isn't a flow based pass after lowerer,
  2413. // So we don't know how much stack we need to pop. Instead, generate a landing area to restore the stack
  2414. // Via EBP, the prolog/epilog phase will fix up the size from EBP we need to restore to ESP before the epilog
  2415. if (bailOutInfo->startCallCount != 0)
  2416. {
  2417. if (this->bailOutStackRestoreLabel == nullptr)
  2418. {
  2419. if (exitPrevInstr->HasFallThrough())
  2420. {
  2421. // Branch around the stack reload
  2422. IR::BranchInstr * branchToExit = IR::BranchInstr::New(Js::OpCode::JMP, exitTargetInstr, this->m_func);
  2423. exitPrevInstr->InsertAfter(branchToExit);
  2424. exitPrevInstr = branchToExit;
  2425. }
  2426. this->bailOutStackRestoreLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2427. IR::RegOpnd * ebpOpnd = IR::RegOpnd::New(nullptr, RegEBP, TyMachReg, this->m_func);
  2428. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, RegESP, TyMachReg, this->m_func);
  2429. // -4 for now, fix up in prolog/epilog phase
  2430. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(ebpOpnd, (size_t)-4, TyMachReg, this->m_func);
  2431. // Lower this after register allocation, once we know the frame size.
  2432. IR::Instr *bailOutStackRestoreInstr = IR::Instr::New(Js::OpCode::BailOutStackRestore, espOpnd, indirOpnd, this->m_func);
  2433. exitPrevInstr->InsertAfter(bailOutStackRestoreInstr);
  2434. exitPrevInstr->InsertAfter(this->bailOutStackRestoreLabel);
  2435. }
  2436. // Jump to the stack restore label instead
  2437. exitTargetInstr = this->bailOutStackRestoreLabel;
  2438. }
  2439. return exitTargetInstr;
  2440. }
  2441. ///----------------------------------------------------------------------------
  2442. ///
  2443. /// LowererMDArch::GenerateFastShiftLeft
  2444. ///
  2445. ///----------------------------------------------------------------------------
  2446. bool
  2447. LowererMDArch::GenerateFastShiftLeft(IR::Instr * instrShift)
  2448. {
  2449. // Given:
  2450. //
  2451. // dst = Shl src1, src2
  2452. //
  2453. // Generate:
  2454. //
  2455. // (If not 2 Int31's, jump to $helper.)
  2456. // s1 = MOV src1
  2457. // s1 = SAR s1, Js::VarTag_Shift -- Remove the var tag from the value to be shifted
  2458. // s2 = MOV src2
  2459. // s2 = SAR s2, Js::VarTag_Shift -- extract the real shift amount from the var
  2460. // s1 = SHL s1, s2 -- do the inline shift
  2461. // s3 = MOV s1
  2462. // s3 = SHL s3, Js::VarTag_Shift -- restore the var tag on the result
  2463. // JO $ToVar
  2464. // s3 = INC s3
  2465. // dst = MOV s3
  2466. // JMP $fallthru
  2467. //$ToVar:
  2468. // PUSH scriptContext
  2469. // PUSH s1
  2470. // dst = ToVar()
  2471. // JMP $fallthru
  2472. // $helper:
  2473. // (caller generates helper call)
  2474. // $fallthru:
  2475. IR::LabelInstr * labelHelper = nullptr;
  2476. IR::LabelInstr * labelFallThru;
  2477. IR::Instr * instr;
  2478. IR::RegOpnd * opndReg1;
  2479. IR::RegOpnd * opndReg2;
  2480. IR::Opnd * opndSrc1;
  2481. IR::Opnd * opndSrc2;
  2482. opndSrc1 = instrShift->GetSrc1();
  2483. opndSrc2 = instrShift->GetSrc2();
  2484. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Shl instruction");
  2485. // Not tagged ints?
  2486. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2487. {
  2488. return true;
  2489. }
  2490. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2491. {
  2492. return true;
  2493. }
  2494. // Tagged ints?
  2495. bool isTaggedInts = false;
  2496. bool src2IsIntConst = false;
  2497. IntConstType s2Value = 0;
  2498. if (opndSrc2->IsRegOpnd())
  2499. {
  2500. if (opndSrc2->AsRegOpnd()->IsTaggedInt())
  2501. {
  2502. if (opndSrc2->AsRegOpnd()->m_sym->IsTaggableIntConst())
  2503. {
  2504. src2IsIntConst = true;
  2505. s2Value = opndSrc2->AsRegOpnd()->m_sym->GetIntConstValue();
  2506. s2Value = (s2Value & 0x1F);
  2507. }
  2508. if (opndSrc1->IsTaggedInt())
  2509. {
  2510. isTaggedInts = true;
  2511. }
  2512. }
  2513. }
  2514. else
  2515. {
  2516. AssertMsg(opndSrc2->IsAddrOpnd() && Js::TaggedInt::Is(opndSrc2->AsAddrOpnd()->m_address),
  2517. "Expect src2 of shift right to be reg or Var.");
  2518. src2IsIntConst = true;
  2519. s2Value = Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address);
  2520. s2Value = (s2Value & 0x1F);
  2521. if (opndSrc1->IsTaggedInt())
  2522. {
  2523. isTaggedInts = true;
  2524. }
  2525. }
  2526. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2527. if (!isTaggedInts)
  2528. {
  2529. // (If not 2 Int31's, jump to $helper.)
  2530. lowererMD->GenerateSmIntPairTest(instrShift, opndSrc1, opndSrc2, labelHelper);
  2531. }
  2532. // s1 = MOV src1
  2533. opndReg1 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2534. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc1, instrShift->m_func);
  2535. instrShift->InsertBefore(instr);
  2536. // s1 = SAR s1, Js::VarTag_Shift
  2537. //
  2538. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2539. //
  2540. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2541. instr = IR::Instr::New(
  2542. Js::OpCode::SAR, opnd32Reg1, opnd32Reg1,
  2543. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2544. instrShift->InsertBefore(instr);
  2545. IR::Opnd *countOpnd;
  2546. if (src2IsIntConst)
  2547. {
  2548. countOpnd = IR::IntConstOpnd::New(s2Value, TyMachReg, instrShift->m_func);
  2549. }
  2550. else
  2551. {
  2552. // s2 = MOV src2
  2553. opndReg2 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2554. // Shift count needs to be in ECX
  2555. opndReg2->SetReg(this->GetRegShiftCount());
  2556. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opndSrc2, instrShift->m_func);
  2557. instrShift->InsertBefore(instr);
  2558. // s2 = SAR s2, Js::VarTag_Shift
  2559. //
  2560. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2561. //
  2562. IR::Opnd * opnd32Reg2 = opndReg2->UseWithNewType(TyInt32, instrShift->m_func);
  2563. instr = IR::Instr::New(
  2564. Js::OpCode::SAR, opnd32Reg2, opnd32Reg2,
  2565. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2566. instrShift->InsertBefore(instr);
  2567. countOpnd = opndReg2;
  2568. }
  2569. // s1 = SHL s1, s2
  2570. //
  2571. // Ecmascript spec says we only need mask the shift amount by 0x1F. But intel uses 0x3F
  2572. // for 64 operands. Hence using 32 bits. opnd32Reg1 is already refined. reusing that.
  2573. //
  2574. instr = IR::Instr::New(Js::OpCode::SHL, opnd32Reg1, opnd32Reg1, countOpnd, instrShift->m_func);
  2575. instrShift->InsertBefore(instr);
  2576. // s3 = MOV s1
  2577. IR::RegOpnd * opndReg3 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2578. IR::Opnd * opnd32Reg3 = opndReg3->UseWithNewType(TyInt32, instrShift->m_func);
  2579. instr = IR::Instr::New(Js::OpCode::MOV, opnd32Reg3, opnd32Reg1, instrShift->m_func);
  2580. instrShift->InsertBefore(instr);
  2581. // s3 = SHL s3, Js::VarTag_Shift -- restore the var tag on the result
  2582. //
  2583. // Ecmascript spec says we only need mask the shift amount by 0x1F. But intel uses 0x3F
  2584. // for 64 operands. Hence using 32 bits. opnd32Reg1 is already refined. reusing that.
  2585. //
  2586. instr = IR::Instr::New(
  2587. Js::OpCode::SHL, opnd32Reg3, opnd32Reg3,
  2588. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2589. instrShift->InsertBefore(instr);
  2590. // JO $ToVar
  2591. IR::LabelInstr *labelToVar = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2592. instr = IR::BranchInstr::New(Js::OpCode::JO, labelToVar, instrShift->m_func);
  2593. instrShift->InsertBefore(instr);
  2594. // s3 = INC s3
  2595. instr = IR::Instr::New(Js::OpCode::INC, opndReg3, opndReg3, instrShift->m_func);
  2596. instrShift->InsertBefore(instr);
  2597. // dst = MOV s3
  2598. instr = IR::Instr::New(Js::OpCode::MOV, instrShift->GetDst(), opndReg3, instrShift->m_func);
  2599. instrShift->InsertBefore(instr);
  2600. // JMP $fallthru
  2601. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func);
  2602. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrShift->m_func);
  2603. instrShift->InsertBefore(instr);
  2604. //$ToVar:
  2605. instrShift->InsertBefore(labelToVar);
  2606. IR::JnHelperMethod helperMethod;
  2607. IR::Opnd *dst;
  2608. dst = instrShift->GetDst();
  2609. if (instrShift->dstIsTempNumber)
  2610. {
  2611. IR::Opnd *tempOpnd;
  2612. helperMethod = IR::HelperOp_Int32ToAtomInPlace;
  2613. Assert(dst->IsRegOpnd());
  2614. StackSym * tempNumberSym = lowererMD->GetLowerer()->GetTempNumberSym(dst, instrShift->dstIsTempNumberTransferred);
  2615. IR::Instr *load = lowererMD->LoadStackAddress(tempNumberSym);
  2616. instrShift->InsertBefore(load);
  2617. tempOpnd = load->GetDst();
  2618. this->LoadHelperArgument(instrShift, tempOpnd);
  2619. }
  2620. else
  2621. {
  2622. helperMethod = IR::HelperOp_Int32ToAtom;
  2623. }
  2624. // PUSH scriptContext
  2625. this->lowererMD->m_lowerer->LoadScriptContext(instrShift);
  2626. // PUSH s1
  2627. this->LoadHelperArgument(instrShift, opndReg1);
  2628. // dst = ToVar()
  2629. instr = IR::Instr::New(Js::OpCode::Call, dst,
  2630. IR::HelperCallOpnd::New(helperMethod, instrShift->m_func), instrShift->m_func);
  2631. instrShift->InsertBefore(instr);
  2632. this->LowerCall(instr, 0);
  2633. // JMP $fallthru
  2634. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrShift->m_func);
  2635. instrShift->InsertBefore(instr);
  2636. // $helper:
  2637. // (caller generates helper call)
  2638. // $fallthru:
  2639. instrShift->InsertBefore(labelHelper);
  2640. instrShift->InsertAfter(labelFallThru);
  2641. return true;
  2642. }
  2643. ///----------------------------------------------------------------------------
  2644. ///
  2645. /// LowererMDArch::GenerateFastShiftRight
  2646. ///
  2647. ///----------------------------------------------------------------------------
  2648. bool
  2649. LowererMDArch::GenerateFastShiftRight(IR::Instr * instrShift)
  2650. {
  2651. // Given:
  2652. //
  2653. // dst = Shr/Sar src1, src2
  2654. //
  2655. // Generate:
  2656. //
  2657. // s1 = MOV src1
  2658. // TEST s1, 1
  2659. // JEQ $S1ToInt
  2660. // s1 = SAR s1, VarTag_Shift -- extract the real shift amount from the var
  2661. // JMP $src2
  2662. //$S1ToInt:
  2663. // PUSH scriptContext
  2664. // PUSH s1
  2665. // s1 = ToInt32()/ToUInt32
  2666. //$src2:
  2667. // Load s2
  2668. // TEST s2, 1
  2669. // JEQ $S2ToUInt
  2670. // s2 = SAR s2, VarTag_Shift -- extract the real shift amount from the var
  2671. // JMP $Shr
  2672. //$S2ToUInt:
  2673. // PUSH scriptContext
  2674. // PUSH s2
  2675. // s2 = ToUInt32()
  2676. //$Shr:
  2677. // s1 = SHR/SAR s1, s2 -- do the inline shift
  2678. // s3 = MOV s1
  2679. //ECX = MOV s2
  2680. // s3 = SHL s3, ECX -- To tagInt
  2681. // JO $ToVar
  2682. // JS $ToVar
  2683. // s3 = INC s3
  2684. // JMP $done
  2685. //$ToVar:
  2686. // EmitLoadVarNoCheck
  2687. //$Done:
  2688. // dst = MOV s3
  2689. IR::LabelInstr * labelS1ToInt = nullptr;
  2690. IR::LabelInstr * labelSrc2 = nullptr;
  2691. IR::LabelInstr * labelS2ToUInt = nullptr;
  2692. IR::LabelInstr * labelShr = nullptr;
  2693. IR::LabelInstr * labelToVar = nullptr;
  2694. IR::LabelInstr * labelDone = nullptr;
  2695. IR::Instr * instr;
  2696. IR::RegOpnd * opndReg1;
  2697. IR::RegOpnd * opndReg2;
  2698. IR::Opnd * opndSrc1;
  2699. IR::Opnd * opndSrc2;
  2700. bool src1IsInt = false;
  2701. bool src1IsNotInt = false;
  2702. bool src2IsInt = false;
  2703. bool src2IsIntConst = false;
  2704. bool src2IsNotInt = false;
  2705. bool resultIsTaggedInt = false;
  2706. bool isUnsignedShift = (instrShift->m_opcode == Js::OpCode::ShrU_A);
  2707. opndSrc1 = instrShift->UnlinkSrc1();
  2708. opndSrc2 = instrShift->UnlinkSrc2();
  2709. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Shl instruction");
  2710. if (instrShift->HasBailOutInfo())
  2711. {
  2712. IR::Instr * bailOutInstr = this->lowererMD->m_lowerer->SplitBailOnImplicitCall(instrShift);
  2713. this->lowererMD->m_lowerer->LowerBailOnEqualOrNotEqual(bailOutInstr);
  2714. }
  2715. AssertMsg(opndSrc1->IsRegOpnd(), "We expect this to be a regOpnd");
  2716. opndReg1 = opndSrc1->AsRegOpnd();
  2717. src1IsInt = opndReg1->IsTaggedInt();
  2718. if (src1IsInt && !isUnsignedShift)
  2719. {
  2720. // -1 >>> 0 != taggedInt...
  2721. resultIsTaggedInt = true;
  2722. }
  2723. src1IsNotInt = opndReg1->IsNotInt();
  2724. // s1 = MOV src1
  2725. opndReg1 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2726. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc1, instrShift->m_func);
  2727. instrShift->InsertBefore(instr);
  2728. IR::Opnd *dst = instrShift->GetDst();
  2729. AssertMsg(dst->IsRegOpnd(), "We expect this to be a regOpnd");
  2730. IntConstType s2Value = 0;
  2731. if (opndSrc2->IsRegOpnd())
  2732. {
  2733. opndReg2 = opndSrc2->AsRegOpnd();
  2734. src2IsInt = opndReg2->IsTaggedInt();
  2735. src2IsIntConst = opndReg2->m_sym->IsTaggableIntConst();
  2736. src2IsNotInt = opndReg2->IsNotInt();
  2737. }
  2738. else
  2739. {
  2740. AssertMsg(opndSrc2->IsAddrOpnd() && Js::TaggedInt::Is(opndSrc2->AsAddrOpnd()->m_address),
  2741. "Expect src2 of shift right to be reg or Var.");
  2742. src2IsInt = src2IsIntConst = true;
  2743. opndReg2 = nullptr;
  2744. }
  2745. if (isUnsignedShift)
  2746. {
  2747. // We use the src2IsIntConst to combine the tag shifting with the actual shift.
  2748. // The tag shift however needs to be a signed shift...
  2749. src2IsIntConst = false;
  2750. if (opndSrc2->IsAddrOpnd())
  2751. {
  2752. instr = lowererMD->CreateAssign(
  2753. IR::RegOpnd::New(opndSrc2->GetType(), instrShift->m_func),
  2754. opndSrc2, instrShift);
  2755. opndSrc2 = instr->GetDst();
  2756. opndReg2 = opndSrc2->AsRegOpnd();
  2757. }
  2758. }
  2759. if (src2IsIntConst)
  2760. {
  2761. if (opndSrc2->IsRegOpnd())
  2762. {
  2763. AnalysisAssert(opndReg2);
  2764. s2Value = opndReg2->m_sym->GetIntConstValue();
  2765. }
  2766. else
  2767. {
  2768. s2Value = Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address);
  2769. }
  2770. s2Value = (s2Value & 0x1F);
  2771. if (s2Value >= Js::VarTag_Shift)
  2772. {
  2773. resultIsTaggedInt = true;
  2774. if ((unsigned)(s2Value + Js::VarTag_Shift) > 0x1f)
  2775. {
  2776. // Can't combine the SHR with the AtomTag shift if we overflow...
  2777. s2Value = 0;
  2778. src2IsIntConst = false;
  2779. }
  2780. }
  2781. }
  2782. if (!src1IsNotInt)
  2783. {
  2784. if (!src1IsInt)
  2785. {
  2786. // TEST s1, AtomTag
  2787. instr = IR::Instr::New(Js::OpCode::TEST, instrShift->m_func);
  2788. instr->SetSrc1(opndReg1);
  2789. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, instrShift->m_func));
  2790. instrShift->InsertBefore(instr);
  2791. // JEQ $S1ToInt
  2792. labelS1ToInt = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2793. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelS1ToInt, instrShift->m_func);
  2794. instrShift->InsertBefore(instr);
  2795. }
  2796. // s1 = SAR s1, VarTag_Shift -- extract the real shift amount from the var
  2797. //
  2798. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2799. //
  2800. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2801. instr = IR::Instr::New(Js::OpCode::SAR, opnd32Reg1, opnd32Reg1,
  2802. IR::IntConstOpnd::New(Js::VarTag_Shift + s2Value, TyInt8, instrShift->m_func), instrShift->m_func);
  2803. instrShift->InsertBefore(instr);
  2804. // JMP $src2
  2805. labelSrc2 = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func);
  2806. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelSrc2, instrShift->m_func);
  2807. instrShift->InsertBefore(instr);
  2808. }
  2809. if (!src1IsInt)
  2810. {
  2811. if (labelS1ToInt)
  2812. {
  2813. //$S1ToInt:
  2814. instrShift->InsertBefore(labelS1ToInt);
  2815. }
  2816. // PUSH scriptContext
  2817. this->lowererMD->m_lowerer->LoadScriptContext(instrShift);
  2818. // PUSH s1
  2819. this->LoadHelperArgument(instrShift, opndReg1);
  2820. // s1 = ToInt32()/ToUint32
  2821. instr = IR::Instr::New(Js::OpCode::Call, opndReg1,
  2822. IR::HelperCallOpnd::New((isUnsignedShift ? IR::HelperConv_ToUInt32_Full : IR::HelperConv_ToInt32_Full), instrShift->m_func),
  2823. instrShift->m_func);
  2824. instrShift->InsertBefore(instr);
  2825. this->LowerCall(instr, 0);
  2826. if (src2IsIntConst && s2Value != 0)
  2827. {
  2828. // s1 = SHR/SAR s1, s2 -- do the inline shift
  2829. //
  2830. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2831. //
  2832. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2833. instr = IR::Instr::New(isUnsignedShift ? Js::OpCode::SHR : Js::OpCode::SAR,
  2834. opnd32Reg1, opnd32Reg1, IR::IntConstOpnd::New(s2Value, TyInt8, instrShift->m_func), instrShift->m_func);
  2835. instrShift->InsertBefore(instr);
  2836. }
  2837. }
  2838. //$src2:
  2839. if (labelSrc2)
  2840. {
  2841. instrShift->InsertBefore(labelSrc2);
  2842. }
  2843. if (!src2IsIntConst)
  2844. {
  2845. // Load s2
  2846. opndReg2 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2847. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opndSrc2, instrShift->m_func);
  2848. instrShift->InsertBefore(instr);
  2849. }
  2850. if (!src2IsNotInt)
  2851. {
  2852. if (!src2IsInt)
  2853. {
  2854. // TEST s2, AtomTag
  2855. instr = IR::Instr::New(Js::OpCode::TEST, instrShift->m_func);
  2856. instr->SetSrc1(opndReg2);
  2857. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, instrShift->m_func));
  2858. instrShift->InsertBefore(instr);
  2859. // JEQ $ToUInt
  2860. labelS2ToUInt = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2861. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelS2ToUInt, instrShift->m_func);
  2862. instrShift->InsertBefore(instr);
  2863. }
  2864. if (!src2IsIntConst)
  2865. {
  2866. // s2 = SAR s2, VarTag_Shift -- extract the real shift amount from the var
  2867. //
  2868. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2869. //
  2870. IR::Opnd * opnd32Reg2 = opndReg2->UseWithNewType(TyInt32, instrShift->m_func);
  2871. instr = IR::Instr::New(Js::OpCode::SAR, opnd32Reg2, opnd32Reg2,
  2872. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2873. instrShift->InsertBefore(instr);
  2874. }
  2875. // JMP $shr
  2876. labelShr = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func);
  2877. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelShr, instrShift->m_func);
  2878. instrShift->InsertBefore(instr);
  2879. }
  2880. if (!src2IsInt)
  2881. {
  2882. if (labelS2ToUInt)
  2883. {
  2884. //$S2ToUInt:
  2885. instrShift->InsertBefore(labelS2ToUInt);
  2886. }
  2887. // PUSH scriptContext
  2888. this->lowererMD->m_lowerer->LoadScriptContext(instrShift);
  2889. // PUSH s2
  2890. this->LoadHelperArgument(instrShift, opndReg2);
  2891. // s2 = ToUInt32()
  2892. instr = IR::Instr::New(Js::OpCode::Call, opndReg2,
  2893. IR::HelperCallOpnd::New(IR::HelperConv_ToUInt32_Full, instrShift->m_func), instrShift->m_func);
  2894. instrShift->InsertBefore(instr);
  2895. this->LowerCall(instr, 0);
  2896. }
  2897. //$Shr:
  2898. if (labelShr)
  2899. {
  2900. instrShift->InsertBefore(labelShr);
  2901. }
  2902. if (!src2IsIntConst)
  2903. {
  2904. // s1 = SHR/SAR s1, s2 -- do the inline shift
  2905. //
  2906. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2907. //
  2908. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2909. IR::RegOpnd * opnd32Ecx = IR::RegOpnd::New(TyInt32, this->m_func);
  2910. opnd32Ecx->SetReg(this->GetRegShiftCount());
  2911. instr = IR::Instr::New(Js::OpCode::MOV, opnd32Ecx, opndReg2, this->m_func);
  2912. instrShift->InsertBefore(instr);
  2913. instr = IR::Instr::New(isUnsignedShift ? Js::OpCode::SHR : Js::OpCode::SAR,
  2914. opnd32Reg1, opnd32Reg1, opnd32Ecx, instrShift->m_func);
  2915. instrShift->InsertBefore(instr);
  2916. }
  2917. // s3 = MOV s1
  2918. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2919. IR::RegOpnd * opndReg3 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2920. IR::Opnd * opnd32Reg3 = opndReg3->UseWithNewType(TyInt32, instrShift->m_func);
  2921. instr = IR::Instr::New(Js::OpCode::MOV, opnd32Reg3, opnd32Reg1, instrShift->m_func);
  2922. instrShift->InsertBefore(instr);
  2923. // s3 = SHL s3, VarTag_Shift -- To tagInt
  2924. //
  2925. // Ecmascript spec says we only need mask the shift amount by 0x1F. But intel uses 0x3F
  2926. // for 64 operands. Hence using 32 bits.
  2927. //
  2928. instr = IR::Instr::New(Js::OpCode::SHL, opnd32Reg3, opnd32Reg3,
  2929. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2930. instrShift->InsertBefore(instr);
  2931. if (!resultIsTaggedInt)
  2932. {
  2933. // JO $ToVar
  2934. labelToVar = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2935. instr = IR::BranchInstr::New(Js::OpCode::JO, labelToVar, instrShift->m_func);
  2936. instrShift->InsertBefore(instr);
  2937. if (isUnsignedShift)
  2938. {
  2939. // JS $ToVar
  2940. instr = IR::BranchInstr::New(Js::OpCode::JSB, labelToVar, instrShift->m_func);
  2941. instrShift->InsertBefore(instr);
  2942. }
  2943. }
  2944. // s1 = INC s1
  2945. instr = IR::Instr::New(Js::OpCode::INC, opndReg3, opndReg3, instrShift->m_func);
  2946. instrShift->InsertBefore(instr);
  2947. if (!src1IsInt || !src2IsInt || !resultIsTaggedInt)
  2948. {
  2949. // JMP $done
  2950. labelDone = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func);
  2951. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, instrShift->m_func);
  2952. instrShift->InsertBefore(instr);
  2953. }
  2954. if (!resultIsTaggedInt)
  2955. {
  2956. //$ToVar:
  2957. instrShift->InsertBefore(labelToVar);
  2958. this->lowererMD->EmitLoadVarNoCheck(opndReg3, opndReg1, instrShift, isUnsignedShift, true);
  2959. }
  2960. if (labelDone)
  2961. {
  2962. //$Done:
  2963. instrShift->InsertBefore(labelDone);
  2964. }
  2965. // dst = MOV s3
  2966. instrShift->m_opcode = Js::OpCode::MOV;
  2967. instrShift->SetSrc1(opndReg3);
  2968. // Skip lowering call to helper
  2969. return false;
  2970. }
  2971. ///----------------------------------------------------------------------------
  2972. ///
  2973. /// LowererMDArch::GenerateFastAnd
  2974. ///
  2975. ///----------------------------------------------------------------------------
  2976. bool
  2977. LowererMDArch::GenerateFastAnd(IR::Instr * instrAnd)
  2978. {
  2979. // Given:
  2980. //
  2981. // dst = And src1, src2
  2982. //
  2983. // Generate:
  2984. //
  2985. // s1 = MOV src1
  2986. // s1 = AND s1, src2 -- try an inline add
  2987. // TEST s1, 1 -- if both opnds are ints, the int tag will be set in the result
  2988. // JEQ $helper
  2989. // dst = MOV s1
  2990. // JMP $fallthru
  2991. // $helper:
  2992. // (caller generates helper sequence)
  2993. // $fallthru:
  2994. IR::Instr * instr;
  2995. IR::LabelInstr * labelHelper=nullptr;
  2996. IR::LabelInstr * labelFallThru;
  2997. IR::Opnd * opndReg;
  2998. IR::Opnd * opndSrc1;
  2999. IR::Opnd * opndSrc2;
  3000. opndSrc1 = instrAnd->GetSrc1();
  3001. opndSrc2 = instrAnd->GetSrc2();
  3002. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on And instruction");
  3003. // Not tagged ints?
  3004. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  3005. {
  3006. return true;
  3007. }
  3008. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  3009. {
  3010. return true;
  3011. }
  3012. // Tagged ints?
  3013. bool isTaggedInts = false;
  3014. if (opndSrc1->IsTaggedInt())
  3015. {
  3016. if (opndSrc2->IsTaggedInt())
  3017. {
  3018. isTaggedInts = true;
  3019. }
  3020. }
  3021. // s1 = MOV src1
  3022. opndReg = IR::RegOpnd::New(TyMachReg, instrAnd->m_func);
  3023. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, instrAnd->m_func);
  3024. instrAnd->InsertBefore(instr);
  3025. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->m_sym->IsTaggableIntConst())
  3026. {
  3027. Js::Var value = Js::TaggedInt::ToVarUnchecked(opndSrc2->AsRegOpnd()->m_sym->GetIntConstValue());
  3028. opndSrc2 = IR::AddrOpnd::New(value, IR::AddrOpndKindConstantVar, instrAnd->m_func);
  3029. }
  3030. // s1 = AND s1, src2
  3031. instr = IR::Instr::New(Js::OpCode::AND, opndReg, opndReg, opndSrc2, instrAnd->m_func);
  3032. instrAnd->InsertBefore(instr);
  3033. if (!isTaggedInts)
  3034. {
  3035. // TEST s1, 1
  3036. instr = IR::Instr::New(Js::OpCode::TEST, instrAnd->m_func);
  3037. instr->SetSrc1(opndReg);
  3038. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, instrAnd->m_func));
  3039. instrAnd->InsertBefore(instr);
  3040. // JNE $helper
  3041. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrAnd->m_func, true);
  3042. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, instrAnd->m_func);
  3043. instrAnd->InsertBefore(instr);
  3044. }
  3045. // dst = MOV s1
  3046. if (isTaggedInts)
  3047. {
  3048. // Reuse the existing instruction
  3049. instrAnd->m_opcode = Js::OpCode::MOV;
  3050. instrAnd->ReplaceSrc1(opndReg);
  3051. instrAnd->FreeSrc2();
  3052. // Skip lowering call to helper
  3053. return false;
  3054. }
  3055. instr = IR::Instr::New(Js::OpCode::MOV, instrAnd->GetDst(), opndReg, instrAnd->m_func);
  3056. instrAnd->InsertBefore(instr);
  3057. // JMP $fallthru
  3058. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrAnd->m_func);
  3059. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrAnd->m_func);
  3060. instrAnd->InsertBefore(instr);
  3061. // $helper:
  3062. // (caller generates helper sequence)
  3063. // $fallthru:
  3064. AssertMsg(labelHelper, "Should not be NULL");
  3065. instrAnd->InsertBefore(labelHelper);
  3066. instrAnd->InsertAfter(labelFallThru);
  3067. return true;
  3068. }
  3069. ///----------------------------------------------------------------------------
  3070. ///
  3071. /// LowererMDArch::GenerateFastOr
  3072. ///
  3073. ///----------------------------------------------------------------------------
  3074. bool
  3075. LowererMDArch::GenerateFastOr(IR::Instr * instrOr)
  3076. {
  3077. // Given:
  3078. //
  3079. // dst = Or src1, src2
  3080. //
  3081. // Generate:
  3082. //
  3083. // (If not 2 Int31's, jump to $helper.)
  3084. //
  3085. // s1 = MOV src1
  3086. // s1 = OR s1, src2 -- try an inline OR
  3087. // dst = MOV s1
  3088. // JMP $fallthru
  3089. // $helper:
  3090. // (caller generates helper sequence)
  3091. // $fallthru:
  3092. IR::Instr * instr;
  3093. IR::LabelInstr * labelHelper=nullptr;
  3094. IR::LabelInstr * labelFallThru;
  3095. IR::Opnd * opndReg;
  3096. IR::Opnd * opndSrc1;
  3097. IR::Opnd * opndSrc2;
  3098. opndSrc1 = instrOr->GetSrc1();
  3099. opndSrc2 = instrOr->GetSrc2();
  3100. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Or instruction");
  3101. // Not tagged ints?
  3102. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  3103. {
  3104. return true;
  3105. }
  3106. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  3107. {
  3108. return true;
  3109. }
  3110. // Tagged ints?
  3111. bool isTaggedInts = false;
  3112. if (opndSrc1->IsTaggedInt())
  3113. {
  3114. if (opndSrc2->IsTaggedInt())
  3115. {
  3116. isTaggedInts = true;
  3117. }
  3118. }
  3119. if (!isTaggedInts)
  3120. {
  3121. // (If not 2 Int31's, jump to $helper.)
  3122. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrOr->m_func, true);
  3123. lowererMD->GenerateSmIntPairTest(instrOr, opndSrc1, opndSrc2, labelHelper);
  3124. }
  3125. // s1 = MOV src1
  3126. opndReg = IR::RegOpnd::New(TyMachReg, instrOr->m_func);
  3127. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, instrOr->m_func);
  3128. instrOr->InsertBefore(instr);
  3129. // s1 = OR s1, src2
  3130. instr = IR::Instr::New(Js::OpCode::OR, opndReg, opndReg, opndSrc2, instrOr->m_func);
  3131. instrOr->InsertBefore(instr);
  3132. // dst = MOV s1
  3133. if (isTaggedInts)
  3134. {
  3135. // Reuse the existing instruction
  3136. instrOr->m_opcode = Js::OpCode::MOV;
  3137. instrOr->ReplaceSrc1(opndReg);
  3138. instrOr->FreeSrc2();
  3139. // Skip lowering call to helper
  3140. return false;
  3141. }
  3142. instr = IR::Instr::New(Js::OpCode::MOV, instrOr->GetDst(), opndReg, instrOr->m_func);
  3143. instrOr->InsertBefore(instr);
  3144. // JMP $fallthru
  3145. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrOr->m_func);
  3146. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrOr->m_func);
  3147. instrOr->InsertBefore(instr);
  3148. // $helper:
  3149. // (caller generates helper sequence)
  3150. // $fallthru:
  3151. AssertMsg(labelHelper, "Should not be NULL");
  3152. instrOr->InsertBefore(labelHelper);
  3153. instrOr->InsertAfter(labelFallThru);
  3154. return true;
  3155. }
  3156. ///----------------------------------------------------------------------------
  3157. ///
  3158. /// LowererMD::GenerateFastXor
  3159. ///
  3160. ///----------------------------------------------------------------------------
  3161. bool
  3162. LowererMDArch::GenerateFastXor(IR::Instr * instrXor)
  3163. {
  3164. // Given:
  3165. //
  3166. // dst = Xor src1, src2
  3167. //
  3168. // Generate:
  3169. //
  3170. // (If not 2 Int31's, jump to $helper.)
  3171. //
  3172. // s1 = MOV src1
  3173. // s1 = XOR s1, src2 -- try an inline XOR
  3174. // s1 = INC s1
  3175. // dst = MOV s1
  3176. // JMP $fallthru
  3177. // $helper:
  3178. // (caller generates helper sequence)
  3179. // $fallthru:
  3180. IR::Instr * instr;
  3181. IR::LabelInstr * labelHelper=nullptr;
  3182. IR::LabelInstr * labelFallThru;
  3183. IR::Opnd * opndReg;
  3184. IR::Opnd * opndSrc1;
  3185. IR::Opnd * opndSrc2;
  3186. opndSrc1 = instrXor->GetSrc1();
  3187. opndSrc2 = instrXor->GetSrc2();
  3188. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Xor instruction");
  3189. // Not tagged ints?
  3190. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  3191. {
  3192. return true;
  3193. }
  3194. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  3195. {
  3196. return true;
  3197. }
  3198. // Tagged ints?
  3199. bool isTaggedInts = false;
  3200. if (opndSrc1->IsTaggedInt())
  3201. {
  3202. if (opndSrc2->IsTaggedInt())
  3203. {
  3204. isTaggedInts = true;
  3205. }
  3206. }
  3207. if (!isTaggedInts)
  3208. {
  3209. // (If not 2 Int31's, jump to $helper.)
  3210. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrXor->m_func, true);
  3211. lowererMD->GenerateSmIntPairTest(instrXor, opndSrc1, opndSrc2, labelHelper);
  3212. }
  3213. // s1 = MOV src1
  3214. opndReg = IR::RegOpnd::New(TyMachReg, instrXor->m_func);
  3215. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, instrXor->m_func);
  3216. instrXor->InsertBefore(instr);
  3217. // s1 = XOR s1, src2
  3218. instr = IR::Instr::New(Js::OpCode::XOR, opndReg, opndReg, opndSrc2, instrXor->m_func);
  3219. instrXor->InsertBefore(instr);
  3220. // s1 = INC s1
  3221. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, instrXor->m_func);
  3222. instrXor->InsertBefore(instr);
  3223. // dst = MOV s1
  3224. if (isTaggedInts)
  3225. {
  3226. // Reuse the existing instruction
  3227. instrXor->m_opcode = Js::OpCode::MOV;
  3228. instrXor->ReplaceSrc1(opndReg);
  3229. instrXor->FreeSrc2();
  3230. // Skip lowering call to helper
  3231. return false;
  3232. }
  3233. instr = IR::Instr::New(Js::OpCode::MOV, instrXor->GetDst(), opndReg, instrXor->m_func);
  3234. instrXor->InsertBefore(instr);
  3235. // JMP $fallthru
  3236. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrXor->m_func);
  3237. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrXor->m_func);
  3238. instrXor->InsertBefore(instr);
  3239. // $helper:
  3240. // (caller generates helper sequence)
  3241. // $fallthru:
  3242. AssertMsg(labelHelper, "Should not be NULL");
  3243. instrXor->InsertBefore(labelHelper);
  3244. instrXor->InsertAfter(labelFallThru);
  3245. return true;
  3246. }
  3247. //----------------------------------------------------------------------------
  3248. //
  3249. // LowererMD::GenerateFastNot
  3250. //
  3251. //----------------------------------------------------------------------------
  3252. bool
  3253. LowererMDArch::GenerateFastNot(IR::Instr * instrNot)
  3254. {
  3255. // Given:
  3256. //
  3257. // dst = Not src
  3258. //
  3259. // Generate:
  3260. //
  3261. // TEST src, 1 -- test for int src
  3262. // JEQ $helper
  3263. // dst = MOV src
  3264. // dst = NOT dst -- do an inline NOT
  3265. // dst = INC dst -- restore the var tag on the result (!1 becomes 0, INC to get 1 again)
  3266. // JMP $fallthru
  3267. // $helper:
  3268. // (caller generates helper call)
  3269. // $fallthru:
  3270. IR::Instr * instr;
  3271. IR::LabelInstr * labelHelper = nullptr;
  3272. IR::LabelInstr * labelFallThru = nullptr;
  3273. IR::Opnd * opndSrc1;
  3274. IR::Opnd * opndDst;
  3275. opndSrc1 = instrNot->GetSrc1();
  3276. AssertMsg(opndSrc1, "Expected src opnd on Not instruction");
  3277. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->IsIntConst())
  3278. {
  3279. IntConstType value = opndSrc1->AsRegOpnd()->m_sym->GetIntConstValue();
  3280. value = ~value;
  3281. instrNot->ClearBailOutInfo();
  3282. instrNot->FreeSrc1();
  3283. instrNot->SetSrc1(IR::AddrOpnd::NewFromNumber(value, instrNot->m_func));
  3284. instrNot = this->lowererMD->ChangeToAssign(instrNot);
  3285. // Skip lowering call to helper
  3286. return false;
  3287. }
  3288. bool isInt = (opndSrc1->IsTaggedInt());
  3289. if (!isInt)
  3290. {
  3291. // TEST src1, AtomTag
  3292. instr = IR::Instr::New(Js::OpCode::TEST, instrNot->m_func);
  3293. instr->SetSrc1(opndSrc1);
  3294. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, instrNot->m_func));
  3295. instrNot->InsertBefore(instr);
  3296. // JEQ $helper
  3297. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrNot->m_func, true);
  3298. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, instrNot->m_func);
  3299. instrNot->InsertBefore(instr);
  3300. }
  3301. // dst = MOV src
  3302. opndDst = instrNot->GetDst();
  3303. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndSrc1, instrNot->m_func);
  3304. instrNot->InsertBefore(instr);
  3305. // dst = NOT dst
  3306. instr = IR::Instr::New(Js::OpCode::NOT, opndDst, opndDst, instrNot->m_func);
  3307. instrNot->InsertBefore(instr);
  3308. // dst = INC dst
  3309. instr = IR::Instr::New(Js::OpCode::INC, opndDst, opndDst, instrNot->m_func);
  3310. instrNot->InsertBefore(instr);
  3311. if (isInt)
  3312. {
  3313. instrNot->Remove();
  3314. // Skip lowering call to helper
  3315. return false;
  3316. }
  3317. // JMP $fallthru
  3318. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrNot->m_func);
  3319. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrNot->m_func);
  3320. instrNot->InsertBefore(instr);
  3321. // $helper:
  3322. // (caller generates helper sequence)
  3323. // $fallthru:
  3324. AssertMsg(labelHelper, "Should not be NULL");
  3325. instrNot->InsertBefore(labelHelper);
  3326. instrNot->InsertAfter(labelFallThru);
  3327. return true;
  3328. }
  3329. void
  3330. LowererMDArch::FinalLower()
  3331. {
  3332. int32 offset;
  3333. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, this->m_func->m_tailInstr, this->m_func->m_headInstr)
  3334. {
  3335. switch (instr->m_opcode)
  3336. {
  3337. case Js::OpCode::Ret:
  3338. instr->Remove();
  3339. break;
  3340. case Js::OpCode::Leave:
  3341. Assert(this->m_func->DoOptimizeTry() && !this->m_func->IsLoopBodyInTry());
  3342. this->lowererMD->LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), true /*fromFinalLower*/);
  3343. break;
  3344. case Js::OpCode::BailOutStackRestore:
  3345. // We don't know the frameSize at lower time...
  3346. instr->m_opcode = Js::OpCode::LEA;
  3347. // exclude the EBP and return address
  3348. instr->GetSrc1()->AsIndirOpnd()->SetOffset(-(int)(this->m_func->frameSize) + 2 * MachPtr);
  3349. break;
  3350. case Js::OpCode::RestoreOutParam:
  3351. Assert(instr->GetDst() != nullptr);
  3352. Assert(instr->GetDst()->IsIndirOpnd());
  3353. offset = instr->GetDst()->AsIndirOpnd()->GetOffset();
  3354. offset -= this->m_func->frameSize;
  3355. offset += 2 * sizeof(void*);
  3356. instr->GetDst()->AsIndirOpnd()->SetOffset(offset, true);
  3357. instr->m_opcode = Js::OpCode::MOV;
  3358. break;
  3359. case Js::OpCode::CMOVA:
  3360. case Js::OpCode::CMOVAE:
  3361. case Js::OpCode::CMOVB:
  3362. case Js::OpCode::CMOVBE:
  3363. case Js::OpCode::CMOVE:
  3364. case Js::OpCode::CMOVG:
  3365. case Js::OpCode::CMOVGE:
  3366. case Js::OpCode::CMOVL:
  3367. case Js::OpCode::CMOVLE:
  3368. case Js::OpCode::CMOVNE:
  3369. case Js::OpCode::CMOVNO:
  3370. case Js::OpCode::CMOVNP:
  3371. case Js::OpCode::CMOVNS:
  3372. case Js::OpCode::CMOVO:
  3373. case Js::OpCode::CMOVP:
  3374. case Js::OpCode::CMOVS:
  3375. // Get rid of fake src1.
  3376. if (instr->GetSrc2())
  3377. {
  3378. // CMOV inserted before regalloc have a dummy src1 to simulate the fact that
  3379. // CMOV is not a definite def of the dst.
  3380. instr->SwapOpnds();
  3381. instr->FreeSrc2();
  3382. }
  3383. break;
  3384. }
  3385. }
  3386. NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  3387. }
  3388. //This is dependent on calling convention and harder to do common thing here.
  3389. IR::Opnd*
  3390. LowererMDArch::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  3391. {
  3392. // x86:
  3393. // s25.i32 = LdLen_A s4.var
  3394. // s26.i32 = Ld_A s25.i32
  3395. // s25.i32 = Or_I4 s25.i32, 1 // For alignment
  3396. // $L2:
  3397. // s10.var = LdElemI_A [s4.var+s25.i32].var
  3398. // ArgOut_A_Dynamic s10.var
  3399. // s25.i32 = SUB_I4 s25.i32, 0x1
  3400. // JNE $L2
  3401. // $L3
  3402. GenerateFunctionObjectTest(callInstr, callInstr->GetSrc1()->AsRegOpnd(), false);
  3403. if (callInstr->m_func->IsInlinee())
  3404. {
  3405. return this->lowererMD->m_lowerer->GenerateArgOutForInlineeStackArgs(callInstr, stackArgsInstr);
  3406. }
  3407. Assert(stackArgsInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  3408. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamic);
  3409. Func *func = callInstr->m_func;
  3410. IR::RegOpnd* stackArgs = stackArgsInstr->GetSrc1()->AsRegOpnd();
  3411. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, func);
  3412. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd, stackArgs, func);
  3413. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); // LdLen_A works only on stack arguments
  3414. callInstr->InsertBefore(ldLen);
  3415. this->lowererMD->m_lowerer->GenerateFastRealStackArgumentsLdLen(ldLen);
  3416. IR::Instr* saveLenInstr = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyUint32, func), ldLenDstOpnd, func);
  3417. saveLenInstr->GetDst()->SetValueType(ValueType::GetTaggedInt());
  3418. callInstr->InsertBefore(saveLenInstr);
  3419. // Align frame
  3420. IR::Instr* orInstr = IR::Instr::New(Js::OpCode::OR, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func), this->m_func);
  3421. callInstr->InsertBefore(orInstr);
  3422. IR::LabelInstr* startLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
  3423. startLoop->m_isLoopTop = true;
  3424. Loop *loop = JitAnew(this->m_func->m_alloc, Loop, this->m_func->m_alloc, this->m_func);
  3425. startLoop->SetLoop(loop);
  3426. loop->SetLoopTopInstr(startLoop);
  3427. loop->regAlloc.liveOnBackEdgeSyms = JitAnew(func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  3428. callInstr->InsertBefore(startLoop);
  3429. IR::IndirOpnd *nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
  3430. nthArgument->SetOffset(-1);
  3431. IR::RegOpnd* ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
  3432. IR::Instr* ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  3433. callInstr->InsertBefore(ldElem);
  3434. this->lowererMD->m_lowerer->GenerateFastStackArgumentsLdElemI(ldElem);
  3435. IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  3436. argout->SetSrc1(ldElemDstOpnd);
  3437. callInstr->InsertBefore(argout);
  3438. this->LoadDynamicArgument(argout);
  3439. IR::Instr *subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyUint32, func),func);
  3440. callInstr->InsertBefore(subInstr);
  3441. this->lowererMD->EmitInt4Instr(subInstr);
  3442. IR::BranchInstr *tailBranch = IR::BranchInstr::New(Js::OpCode::JNE, startLoop, func);
  3443. callInstr->InsertBefore(tailBranch);
  3444. loop->regAlloc.liveOnBackEdgeSyms->Set(ldLenDstOpnd->m_sym->m_id);
  3445. // return the length which will be used for callInfo generations & stack allocation
  3446. return saveLenInstr->GetDst()->AsRegOpnd();
  3447. }
  3448. IR::Instr *
  3449. LowererMDArch::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  3450. {
  3451. IR::RegOpnd *retReg = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  3452. // Load the continuation address into the return register.
  3453. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, retReg, targetOpnd, this->m_func));
  3454. IR::IntConstOpnd *intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  3455. IR::Instr * retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  3456. retInstr->SetSrc1(intSrc);
  3457. retInstr->SetSrc2(retReg);
  3458. insertBeforeInstr->InsertBefore(retInstr);
  3459. // return the last instruction inserted
  3460. return retInstr;
  3461. }