LowererMDArch.cpp 147 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "LowererMDArch.h"
  7. #include "Library/JavascriptGeneratorFunction.h"
  8. const Js::OpCode LowererMD::MDExtend32Opcode = Js::OpCode::MOV;
  9. BYTE
  10. LowererMDArch::GetDefaultIndirScale()
  11. {
  12. return IndirScale4;
  13. }
  14. RegNum
  15. LowererMDArch::GetRegShiftCount()
  16. {
  17. return RegECX;
  18. }
  19. RegNum
  20. LowererMDArch::GetRegReturn(IRType type)
  21. {
  22. return ( IRType_IsFloat(type) || IRType_IsSimd128(type) || IRType_IsInt64(type) ) ? RegNOREG : RegEAX;
  23. }
  24. RegNum
  25. LowererMDArch::GetRegReturnAsmJs(IRType type)
  26. {
  27. if (IRType_IsFloat(type) || IRType_IsSimd128(type))
  28. {
  29. return RegXMM0;
  30. }
  31. else
  32. {
  33. Assert(type == TyInt32 || type == TyInt64);
  34. return RegEAX;
  35. }
  36. }
  37. RegNum
  38. LowererMDArch::GetRegStackPointer()
  39. {
  40. return RegESP;
  41. }
  42. RegNum
  43. LowererMDArch::GetRegBlockPointer()
  44. {
  45. return RegEBP;
  46. }
  47. RegNum
  48. LowererMDArch::GetRegFramePointer()
  49. {
  50. return RegEBP;
  51. }
  52. RegNum
  53. LowererMDArch::GetRegChkStkParam()
  54. {
  55. return RegEAX;
  56. }
  57. RegNum
  58. LowererMDArch::GetRegIMulDestLower()
  59. {
  60. return RegEAX;
  61. }
  62. RegNum
  63. LowererMDArch::GetRegIMulHighDestLower()
  64. {
  65. return RegEDX;
  66. }
  67. RegNum
  68. LowererMDArch::GetRegArgI4(int32 argNum)
  69. {
  70. return RegNOREG;
  71. }
  72. RegNum
  73. LowererMDArch::GetRegArgR8(int32 argNum)
  74. {
  75. return RegNOREG;
  76. }
  77. Js::OpCode
  78. LowererMDArch::GetAssignOp(IRType type)
  79. {
  80. switch (type)
  81. {
  82. case TyFloat64:
  83. return Js::OpCode::MOVSD;
  84. case TyFloat32:
  85. return Js::OpCode::MOVSS;
  86. case TySimd128F4:
  87. case TySimd128I4:
  88. case TySimd128I8:
  89. case TySimd128I16:
  90. case TySimd128U4:
  91. case TySimd128U8:
  92. case TySimd128U16:
  93. case TySimd128B4:
  94. case TySimd128B8:
  95. case TySimd128B16:
  96. case TySimd128D2:
  97. case TySimd128I2:
  98. return Js::OpCode::MOVUPS;
  99. default:
  100. return Js::OpCode::MOV;
  101. }
  102. }
  103. void
  104. LowererMDArch::Init(LowererMD *lowererMD)
  105. {
  106. this->lowererMD = lowererMD;
  107. this->helperCallArgsCount = 0;
  108. }
  109. ///----------------------------------------------------------------------------
  110. ///
  111. /// LowererMD::LoadInputParamPtr
  112. ///
  113. /// Load the address of the start of the passed-in parameters not including
  114. /// the this parameter.
  115. ///
  116. ///----------------------------------------------------------------------------
  117. IR::Instr *
  118. LowererMDArch::LoadInputParamPtr(IR::Instr *instrInsert, IR::RegOpnd *optionalDstOpnd /* = nullptr */)
  119. {
  120. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  121. {
  122. IR::RegOpnd * argPtrRegOpnd = Lowerer::LoadGeneratorArgsPtr(instrInsert);
  123. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(argPtrRegOpnd, 1 * MachPtr, TyMachPtr, this->m_func);
  124. IR::RegOpnd * dstOpnd = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachPtr, this->m_func);
  125. return Lowerer::InsertLea(dstOpnd, indirOpnd, instrInsert);
  126. }
  127. else
  128. {
  129. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  130. StackSym *paramSym = StackSym::New(TyVar, this->m_func);
  131. this->m_func->SetArgOffset(paramSym, 5 * MachPtr);
  132. return this->lowererMD->m_lowerer->InsertLoadStackAddress(paramSym, instrInsert, optionalDstOpnd);
  133. }
  134. }
  135. IR::Instr *
  136. LowererMDArch::LoadStackArgPtr(IR::Instr * instrArgPtr)
  137. {
  138. // if (actual count >= formal count)
  139. // dst = ebp + 5 * sizeof(Var) -- point to the first input parameter after "this"
  140. // else
  141. // sub esp, (size of formals) -- we'll copy the input params to the callee frame, since the caller frame
  142. // doesn't have space for them all
  143. // dst = esp + 3 * sizeof(var) -- point to the location of the first input param (after "this")
  144. // within the area we just allocated on the callee frame
  145. IR::Instr * instrPrev = instrArgPtr;
  146. IR::LabelInstr * instrLabelExtra = nullptr;
  147. IR::Instr * instr;
  148. IR::Opnd * opnd;
  149. Js::ArgSlot formalsCount = this->m_func->GetInParamsCount();
  150. // Only need to check the number of actuals if there's at least 1 formal (plus "this")
  151. if (formalsCount > 1)
  152. {
  153. instrPrev = this->lowererMD->LoadInputParamCount(instrArgPtr);
  154. IR::Opnd * opndActuals = instrPrev->GetDst();
  155. IR::Opnd * opndFormals =
  156. IR::IntConstOpnd::New(formalsCount, TyMachReg, this->m_func);
  157. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  158. instr->SetSrc1(opndActuals);
  159. instr->SetSrc2(opndFormals);
  160. instrArgPtr->InsertBefore(instr);
  161. instrLabelExtra = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  162. instr = IR::BranchInstr::New(Js::OpCode::JB, instrLabelExtra, this->m_func);
  163. instrArgPtr->InsertBefore(instr);
  164. }
  165. // Modify the original instruction to load the addr of the input parameters on the caller's frame.
  166. instr = LoadInputParamPtr(instrArgPtr, instrArgPtr->UnlinkDst()->AsRegOpnd());
  167. instrArgPtr->Remove();
  168. instrArgPtr = instr;
  169. if (instrLabelExtra)
  170. {
  171. IR::LabelInstr *instrLabelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  172. instr = IR::BranchInstr::New(Js::OpCode::JMP, instrLabelDone, this->m_func);
  173. instrArgPtr->InsertAfter(instr);
  174. instr->InsertAfter(instrLabelExtra);
  175. instrLabelExtra->InsertAfter(instrLabelDone);
  176. // Allocate space on the callee's frame for a copy of the formals, plus the callee object pointer
  177. // and the callinfo.
  178. // Be sure to double-align the allocation.
  179. // REVIEW: Do we ever need to generate a chkstk call here?
  180. int formalsBytes = (formalsCount + 2) * sizeof(Js::Var);
  181. formalsBytes = Math::Align<size_t>(formalsBytes, MachStackAlignment);
  182. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  183. opnd = IR::IndirOpnd::New(espOpnd, -formalsBytes, TyMachReg, this->m_func);
  184. instr = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  185. instrLabelDone->InsertBefore(instr);
  186. // Result is the pointer to the address where we'll store the first input param
  187. // (after "this") in the callee's frame.
  188. opnd = IR::IndirOpnd::New(espOpnd, 3 * sizeof(Js::Var), TyMachReg, this->m_func);
  189. instr = IR::Instr::New(Js::OpCode::LEA, instrArgPtr->GetDst(), opnd, this->m_func);
  190. instrLabelDone->InsertBefore(instr);
  191. }
  192. return instrPrev;
  193. }
  194. ///----------------------------------------------------------------------------
  195. ///
  196. /// LowererMDArch::LoadHeapArguments
  197. ///
  198. /// Load the heap-based arguments object
  199. ///
  200. ///----------------------------------------------------------------------------
  201. IR::Instr *
  202. LowererMDArch::LoadHeapArguments(IR::Instr *instrArgs)
  203. {
  204. ASSERT_INLINEE_FUNC(instrArgs);
  205. Func *func = instrArgs->m_func;
  206. IR::Instr * instrPrev = instrArgs->m_prev;
  207. if (func->IsStackArgsEnabled()) //both inlinee & inliner has stack args. We don't support other scenarios.
  208. {
  209. // The initial args slot value is zero. (TODO: it should be possible to dead-store the LdHeapArgs in this case.)
  210. instrArgs->m_opcode = Js::OpCode::MOV;
  211. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  212. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  213. {
  214. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  215. Output::Flush();
  216. }
  217. }
  218. else
  219. {
  220. // s7 = formals are let decls
  221. // s6 = memory context
  222. // s5 = array of property ID's
  223. // s4 = local frame instance
  224. // s3 = address of first actual argument (after "this")
  225. // s2 = actual argument count
  226. // s1 = current function
  227. // dst = JavascriptOperators::LoadHeapArguments(s1, s2, s3, s4, s5, s6, s7)
  228. // s7 = formals are let decls
  229. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(instrArgs->m_opcode == Js::OpCode::LdLetHeapArguments ? TRUE : FALSE, TyUint8, func));
  230. // s6 = memory context
  231. instrPrev = this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  232. // s5 = array of property ID's
  233. intptr_t formalsPropIdArray = instrArgs->m_func->GetJITFunctionBody()->GetFormalsPropIdArrayAddr();
  234. if (!formalsPropIdArray)
  235. {
  236. formalsPropIdArray = instrArgs->m_func->GetScriptContextInfo()->GetNullAddr();
  237. }
  238. IR::Opnd * argArray = IR::AddrOpnd::New(formalsPropIdArray, IR::AddrOpndKindDynamicMisc, m_func);
  239. this->LoadHelperArgument(instrArgs, argArray);
  240. // s4 = local frame instance
  241. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  242. this->LoadHelperArgument(instrArgs, frameObj);
  243. if (func->IsInlinee())
  244. {
  245. /*
  246. * s3 = address of first actual argument (after "this").
  247. * Stack looks like arg 1 ('this') <-- low address
  248. * ...
  249. * arg N
  250. * arguments object
  251. * function object
  252. * argc <-- frameStartSym
  253. */
  254. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  255. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  256. IR::Instr *instr = this->lowererMD->m_lowerer->InsertLoadStackAddress(firstRealArgSlotSym, instrArgs);
  257. this->LoadHelperArgument(instrArgs, instr->GetDst());
  258. // s2 = actual argument count (without counting "this").
  259. instr = IR::Instr::New(Js::OpCode::MOV,
  260. IR::RegOpnd::New(TyMachReg, func),
  261. IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func),
  262. func);
  263. instrArgs->InsertBefore(instr);
  264. this->LoadHelperArgument(instrArgs, instr->GetDst());
  265. // s1 = current function.
  266. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  267. // Save the newly-created args object to its dedicated stack slot.
  268. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  269. instr = IR::Instr::New(Js::OpCode::MOV,
  270. argObjSlotOpnd,
  271. instrArgs->GetDst(),
  272. func);
  273. instrArgs->InsertAfter(instr);
  274. }
  275. else
  276. {
  277. // s3 = address of first actual argument (after "this")
  278. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  279. this->LoadHelperArgument(instrArgs, instr->GetDst());
  280. // s2 = actual argument count (without counting "this")
  281. instr = this->lowererMD->LoadInputParamCount(instrArgs, -1);
  282. IR::Opnd* opndInputParamCount = instr->GetDst();
  283. this->LoadHelperArgument(instrArgs, opndInputParamCount);
  284. // s1 = current function
  285. StackSym *paramSym = StackSym::New(TyMachReg, func);
  286. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  287. IR::Opnd *srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  288. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  289. {
  290. // the function object for generator calls is a GeneratorVirtualScriptFunction object
  291. // and we need to pass the real JavascriptGeneratorFunction object so grab it instead
  292. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
  293. Lowerer::InsertMove(tmpOpnd, srcOpnd, instrArgs);
  294. srcOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
  295. }
  296. this->LoadHelperArgument(instrArgs, srcOpnd);
  297. // Save the newly-created args object to its dedicated stack slot.
  298. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  299. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  300. instrArgs->InsertAfter(instr);
  301. }
  302. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArguments);
  303. }
  304. return instrPrev;
  305. }
  306. ///----------------------------------------------------------------------------
  307. ///
  308. /// LowererMDArch::LoadHeapArgsCached
  309. ///
  310. /// Load the heap-based arguments object using a cached scope
  311. ///
  312. ///----------------------------------------------------------------------------
  313. IR::Instr *
  314. LowererMDArch::LoadHeapArgsCached(IR::Instr *instrArgs)
  315. {
  316. ASSERT_INLINEE_FUNC(instrArgs);
  317. Func *func = instrArgs->m_func;
  318. IR::Instr *instrPrev = instrArgs->m_prev;
  319. if (instrArgs->m_func->IsStackArgsEnabled())
  320. {
  321. // The initial args slot value is zero. (TODO: it should be possible to dead-store the LdHeapArgs in this case.)
  322. instrArgs->m_opcode = Js::OpCode::MOV;
  323. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  324. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  325. {
  326. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  327. Output::Flush();
  328. }
  329. }
  330. else
  331. {
  332. // s7 = formals are let decls
  333. // s6 = memory context
  334. // s5 = local frame instance
  335. // s4 = address of first actual argument (after "this")
  336. // s3 = formal argument count
  337. // s2 = actual argument count
  338. // s1 = current function
  339. // dst = JavascriptOperators::LoadArguments(s1, s2, s3, s4, s5, s6, s7)
  340. // s7 = formals are let decls
  341. IR::Opnd * formalsAreLetDecls = IR::IntConstOpnd::New((IntConstType)(instrArgs->m_opcode == Js::OpCode::LdLetHeapArgsCached), TyUint8, func);
  342. this->LoadHelperArgument(instrArgs, formalsAreLetDecls);
  343. // s6 = memory context
  344. this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  345. // s5 = local frame instance
  346. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  347. this->LoadHelperArgument(instrArgs, frameObj);
  348. if (func->IsInlinee())
  349. {
  350. // s4 = address of first actual argument (after "this")
  351. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  352. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  353. IR::Instr *instr = this->lowererMD->m_lowerer->InsertLoadStackAddress(firstRealArgSlotSym, instrArgs);
  354. this->LoadHelperArgument(instrArgs, instr->GetDst());
  355. // s3 = formal argument count (without counting "this")
  356. uint32 formalsCount = func->GetJITFunctionBody()->GetInParamsCount() - 1;
  357. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyMachReg, func));
  358. // s2 = actual argument count (without counting "this").
  359. instr = IR::Instr::New(Js::OpCode::MOV,
  360. IR::RegOpnd::New(TyMachReg, func),
  361. IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func),
  362. func);
  363. instrArgs->InsertBefore(instr);
  364. this->LoadHelperArgument(instrArgs, instr->GetDst());
  365. // s1 = current function.
  366. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  367. // Save the newly-created args object to its dedicated stack slot.
  368. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  369. instr = IR::Instr::New(Js::OpCode::MOV,
  370. argObjSlotOpnd,
  371. instrArgs->GetDst(),
  372. func);
  373. instrArgs->InsertAfter(instr);
  374. }
  375. else
  376. {
  377. // s4 = address of first actual argument (after "this")
  378. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  379. this->LoadHelperArgument(instrArgs, instr->GetDst());
  380. // s3 = formal argument count (without counting "this")
  381. uint32 formalsCount = func->GetInParamsCount() - 1;
  382. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyMachReg, func));
  383. // s2 = actual argument count (without counting "this")
  384. instr = this->lowererMD->LoadInputParamCount(instrArgs);
  385. instr = IR::Instr::New(Js::OpCode::DEC, instr->GetDst(), instr->GetDst(), func);
  386. instrArgs->InsertBefore(instr);
  387. this->LoadHelperArgument(instrArgs, instr->GetDst());
  388. // s1 = current function
  389. StackSym *paramSym = StackSym::New(TyMachReg, func);
  390. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  391. IR::Opnd *srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  392. this->LoadHelperArgument(instrArgs, srcOpnd);
  393. // Save the newly-created args object to its dedicated stack slot.
  394. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  395. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  396. instrArgs->InsertAfter(instr);
  397. }
  398. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArgsCached);
  399. }
  400. return instrPrev;
  401. }
  402. //
  403. // Load the parameter in the first argument slot
  404. //
  405. IR::Instr *
  406. LowererMDArch::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * dst, ushort extraArgs)
  407. {
  408. // No need to do anything different for spread calls on x86 since we push args.
  409. IR::SymOpnd * argOpnd = IR::SymOpnd::New(this->m_func->m_symTable->GetArgSlotSym(1), TyVar, this->m_func);
  410. IR::Instr * argInstr = Lowerer::InsertMove(argOpnd, dst, instr);
  411. return argInstr;
  412. }
  413. void
  414. LowererMDArch::GenerateFunctionObjectTest(IR::Instr * callInstr, IR::RegOpnd *functionObjOpnd, bool isHelper, IR::LabelInstr* continueAfterExLabel /* = nullptr */)
  415. {
  416. AssertMsg(!m_func->IsJitInDebugMode() || continueAfterExLabel, "When jit is in debug mode, continueAfterExLabel must be provided otherwise continue after exception may cause AV.");
  417. if (!functionObjOpnd->IsNotTaggedValue())
  418. {
  419. IR::Instr * insertBeforeInstr = callInstr;
  420. // Need check and error if we are calling a tagged int.
  421. if (!functionObjOpnd->IsTaggedInt())
  422. {
  423. // TEST s1, 1
  424. // JEQ $callLabel
  425. IR::LabelInstr * callLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func /*, isHelper*/);
  426. this->lowererMD->GenerateObjectTest(functionObjOpnd, callInstr, callLabel, true);
  427. #if DBG
  428. int count = 0;
  429. FOREACH_SLIST_ENTRY(IR::BranchInstr *, branchInstr, &callLabel->labelRefs)
  430. {
  431. branchInstr->m_isHelperToNonHelperBranch = true;
  432. count++;
  433. } NEXT_SLIST_ENTRY;
  434. Assert(count == 1);
  435. #endif
  436. callInstr->InsertBefore(callLabel);
  437. insertBeforeInstr = callLabel;
  438. }
  439. lowererMD->m_lowerer->GenerateRuntimeError(insertBeforeInstr, JSERR_NeedFunction);
  440. if (continueAfterExLabel)
  441. {
  442. // Under debugger the RuntimeError (exception) can be ignored, generate branch right after RunTimeError instr
  443. // to jmp to a safe place (which would normally be debugger bailout check).
  444. IR::BranchInstr* continueAfterEx = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueAfterExLabel, this->m_func);
  445. insertBeforeInstr->InsertBefore(continueAfterEx);
  446. }
  447. }
  448. }
  449. void
  450. LowererMDArch::LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  451. {
  452. Func *const func = callInstr->m_func;
  453. // Align frame
  454. IR::Instr *orInstr = IR::Instr::New(Js::OpCode::OR, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func), this->m_func);
  455. callInstr->InsertBefore(orInstr);
  456. IR::LabelInstr *startLoopLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  457. startLoopLabel->m_isLoopTop = true;
  458. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
  459. startLoopLabel->SetLoop(loop);
  460. loop->SetLoopTopInstr(startLoopLabel);
  461. loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  462. loop->regAlloc.liveOnBackEdgeSyms->Set(indexOpnd->m_sym->m_id);
  463. loop->regAlloc.liveOnBackEdgeSyms->Set(arrayElementsStartOpnd->m_sym->m_id);
  464. callInstr->InsertBefore(startLoopLabel);
  465. this->lowererMD->m_lowerer->InsertSub(false, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt8, func), callInstr);
  466. IR::IndirOpnd *elemPtrOpnd = IR::IndirOpnd::New(arrayElementsStartOpnd, indexOpnd, GetDefaultIndirScale(), TyMachPtr, func);
  467. // Generate argout for n+2 arg (skipping function object + this)
  468. IR::Instr *argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  469. argout->SetSrc1(elemPtrOpnd);
  470. callInstr->InsertBefore(argout);
  471. this->lowererMD->LoadDynamicArgument(argout);
  472. this->lowererMD->m_lowerer->InsertCompareBranch(indexOpnd,
  473. IR::IntConstOpnd::New(0, TyUint8, func),
  474. Js::OpCode::BrNeq_A,
  475. true,
  476. startLoopLabel,
  477. callInstr);
  478. }
  479. IR::Instr *
  480. LowererMDArch::LowerCallIDynamic(IR::Instr * callInstr, IR::Instr*saveThisArgOutInstr, IR::Opnd *argsLength, ushort callFlags, IR::Instr * insertBeforeInstrForCFG)
  481. {
  482. callInstr->InsertBefore(saveThisArgOutInstr); //Move this Argout next to call;
  483. this->LoadDynamicArgument(saveThisArgOutInstr);
  484. Func *func = callInstr->m_func;
  485. bool bIsInlinee = func->IsInlinee();
  486. if (bIsInlinee)
  487. {
  488. Assert(argsLength->AsIntConstOpnd()->GetValue() == callInstr->m_func->actualCount);
  489. }
  490. else
  491. {
  492. Assert(argsLength->IsRegOpnd());
  493. /*callInfo*/
  494. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, argsLength, argsLength, IR::IntConstOpnd::New(1, TyUint32, this->m_func), this->m_func));
  495. }
  496. IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, this->m_func);
  497. argout->SetSrc1(argsLength);
  498. callInstr->InsertBefore(argout);
  499. this->LoadDynamicArgument(argout);
  500. // load native entry point from script function into eax
  501. AssertMsg(callInstr->GetSrc1()->IsRegOpnd() && callInstr->GetSrc1()->AsRegOpnd()->m_sym->IsStackSym(),
  502. "Expected call src to be stackSym");
  503. IR::RegOpnd * functionWrapOpnd = callInstr->UnlinkSrc1()->AsRegOpnd();
  504. GeneratePreCall(callInstr, functionWrapOpnd);
  505. LowerCall(callInstr, 0);
  506. //Restore stack back to original state.
  507. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, RegESP, TyMachReg, this->m_func);
  508. if (bIsInlinee)
  509. {
  510. // +2 for callInfo & function object;
  511. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(espOpnd, (callInstr->m_func->actualCount + (callInstr->m_func->actualCount&1) + 2) * MachPtr, TyMachReg, this->m_func);
  512. callInstr->InsertAfter(IR::Instr::New(Js::OpCode::LEA, espOpnd, indirOpnd, this->m_func));
  513. }
  514. else
  515. {
  516. IR::RegOpnd *argsLengthRegOpnd = argsLength->AsRegOpnd();
  517. //Account for callInfo & function object in argsLength
  518. IR::Instr * addInstr = IR::Instr::New(Js::OpCode::ADD, argsLengthRegOpnd, argsLengthRegOpnd, IR::IntConstOpnd::New(2, TyUint32, this->m_func), this->m_func);
  519. callInstr->InsertBefore(addInstr);
  520. IR::Instr *insertInstr = callInstr->m_next;
  521. // Align stack
  522. //
  523. // INC argLengthReg
  524. IR::Instr * incInstr = IR::Instr::New(Js::OpCode::INC, argsLengthRegOpnd, argsLengthRegOpnd, this->m_func);
  525. insertInstr->InsertBefore(incInstr);
  526. // AND argLengthReg, (~1)
  527. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, argsLengthRegOpnd, argsLengthRegOpnd, IR::IntConstOpnd::New(~1, TyInt32, this->m_func, true), this->m_func);
  528. insertInstr->InsertBefore(andInstr);
  529. // LEA ESP, [ESP + argsLengthReg*4]
  530. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(espOpnd, argsLengthRegOpnd, IndirScale4, TyMachReg, this->m_func);
  531. addInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, indirOpnd, this->m_func);
  532. insertInstr->InsertBefore(addInstr);
  533. }
  534. return argout;
  535. }
  536. void
  537. LowererMDArch::GeneratePreCall(IR::Instr * callInstr, IR::Opnd *functionObjOpnd)
  538. {
  539. IR::RegOpnd* functionTypeRegOpnd = nullptr;
  540. // For calls to fixed functions we load the function's type directly from the known (hard-coded) function object address.
  541. // For other calls, we need to load it from the function object stored in a register operand.
  542. if (functionObjOpnd->IsAddrOpnd() && functionObjOpnd->AsAddrOpnd()->m_isFunction)
  543. {
  544. functionTypeRegOpnd = this->lowererMD->m_lowerer->GenerateFunctionTypeFromFixedFunctionObject(callInstr, functionObjOpnd);
  545. }
  546. else if (functionObjOpnd->IsRegOpnd())
  547. {
  548. AssertMsg(functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym(), "Expected call target to be stackSym");
  549. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  550. // functionTypeRegOpnd = MOV function->type
  551. IR::IndirOpnd* functionTypeIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  552. Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  553. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionTypeIndirOpnd, this->m_func);
  554. callInstr->InsertBefore(instr);
  555. }
  556. else
  557. {
  558. AssertMsg(false, "Unexpected call target operand type.");
  559. }
  560. // Push function object
  561. this->LoadHelperArgument(callInstr, functionObjOpnd);
  562. int entryPointOffset = Js::Type::GetOffsetOfEntryPoint();
  563. IR::IndirOpnd* entryPointOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, entryPointOffset, TyMachPtr, this->m_func);
  564. callInstr->SetSrc1(entryPointOpnd);
  565. // Atom prefers "CALL reg" over "CALL [reg]"
  566. IR::Instr * hoistedCallSrcInstr = nullptr;
  567. hoistedCallSrcInstr = callInstr->HoistSrc1(Js::OpCode::MOV);
  568. #if defined(_CONTROL_FLOW_GUARD)
  569. if (!PHASE_OFF(Js::CFGInJitPhase, this->m_func))
  570. {
  571. this->lowererMD->GenerateCFGCheck(hoistedCallSrcInstr->GetDst(), callInstr);
  572. }
  573. #endif
  574. }
  575. IR::Instr *
  576. LowererMDArch::LowerCallI(IR::Instr *callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  577. {
  578. // We need to get the calculated CallInfo in SimpleJit because that doesn't include any changes for stack alignment
  579. IR::IntConstOpnd *callInfo;
  580. int32 argCount = this->LowerCallArgs(callInstr, callFlags, 1, &callInfo);
  581. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  582. // If this is a call for new, we already pass the function operand through NewScObject,
  583. // which checks if the function operand is a real function or not, don't need to add a check again
  584. // If this is a call to a fixed function, we've already verified that the target is, indeed, a function.
  585. if (callInstr->m_opcode != Js::OpCode::CallIFixed && !(callFlags & Js::CallFlags_New))
  586. {
  587. AssertMsg(functionObjOpnd->IsRegOpnd() && functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym(), "Expected call src to be stackSym");
  588. IR::LabelInstr* continueAfterExLabel = Lowerer::InsertContinueAfterExceptionLabelForDebugger(m_func, callInstr, isHelper);
  589. GenerateFunctionObjectTest(callInstr, functionObjOpnd->AsRegOpnd(), isHelper, continueAfterExLabel);
  590. }
  591. // Can't assert until we remove unreachable code if we have proved that it is a tagged int.
  592. // Assert((callFlags & Js::CallFlags_New) || !functionWrapOpnd->IsTaggedInt());
  593. GeneratePreCall(callInstr, functionObjOpnd);
  594. IR::Opnd *const finalDst = callInstr->GetDst();
  595. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  596. IR::AutoReuseOpnd autoReuseSavedFunctionObjOpnd;
  597. if (callInstr->IsJitProfilingInstr())
  598. {
  599. Assert(callInstr->m_func->IsSimpleJit());
  600. Assert(!CONFIG_FLAG(NewSimpleJit));
  601. if(finalDst &&
  602. finalDst->IsRegOpnd() &&
  603. functionObjOpnd->IsRegOpnd() &&
  604. finalDst->AsRegOpnd()->m_sym == functionObjOpnd->AsRegOpnd()->m_sym)
  605. {
  606. // The function object sym is going to be overwritten, so save it in a temp for profiling
  607. IR::RegOpnd *const savedFunctionObjOpnd = IR::RegOpnd::New(functionObjOpnd->GetType(), callInstr->m_func);
  608. autoReuseSavedFunctionObjOpnd.Initialize(savedFunctionObjOpnd, callInstr->m_func);
  609. Lowerer::InsertMove(savedFunctionObjOpnd, functionObjOpnd, callInstr->m_next);
  610. functionObjOpnd = savedFunctionObjOpnd;
  611. }
  612. auto instr = callInstr->AsJitProfilingInstr();
  613. ret = this->lowererMD->m_lowerer->GenerateCallProfiling(
  614. instr->profileId,
  615. instr->inlineCacheIndex,
  616. instr->GetDst(),
  617. functionObjOpnd,
  618. callInfo,
  619. instr->isProfiledReturnCall,
  620. callInstr,
  621. ret);
  622. }
  623. return ret;
  624. }
  625. IR::Instr *
  626. LowererMDArch::LowerAsmJsCallE(IR::Instr *callInstr)
  627. {
  628. IR::IntConstOpnd *callInfo;
  629. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_Value, 1, &callInfo);
  630. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  631. GeneratePreCall(callInstr, functionObjOpnd);
  632. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  633. return ret;
  634. }
  635. IR::Instr *
  636. LowererMDArch::LowerInt64CallDst(IR::Instr * callInstr)
  637. {
  638. Assert(IRType_IsInt64(callInstr->GetDst()->GetType()));
  639. RegNum lowReturnReg = RegEAX;
  640. RegNum highReturnReg = RegEDX;
  641. IR::Instr * movInstr;
  642. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(callInstr->GetDst());
  643. callInstr->GetDst()->SetType(TyInt32);
  644. movInstr = callInstr->SinkDst(GetAssignOp(TyInt32), lowReturnReg);
  645. movInstr->UnlinkDst();
  646. movInstr->SetDst(dstPair.low);
  647. // Make ecx alive as it contains the high bits for the int64 return value
  648. IR::RegOpnd* highReg = IR::RegOpnd::New(TyInt32, this->m_func);
  649. highReg->SetReg(highReturnReg);
  650. // todo:: Remove the NOP in peeps
  651. IR::Instr* nopInstr = IR::Instr::New(Js::OpCode::NOP, highReg, this->m_func);
  652. movInstr->InsertBefore(nopInstr);
  653. IR::Instr* mov2Instr = IR::Instr::New(GetAssignOp(TyInt32), dstPair.high, highReg, this->m_func);
  654. movInstr->InsertAfter(mov2Instr);
  655. return mov2Instr;
  656. }
  657. IR::Instr *
  658. LowererMDArch::LowerAsmJsCallI(IR::Instr * callInstr)
  659. {
  660. IR::Instr * argInstr;
  661. int32 argCount = 0;
  662. // Lower args and look for StartCall
  663. argInstr = callInstr;
  664. IR::Opnd *src2 = argInstr->UnlinkSrc2();
  665. while (src2->IsSymOpnd())
  666. {
  667. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  668. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  669. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  670. argLinkOpnd->Free(m_func);
  671. argInstr = argLinkSym->m_instrDef;
  672. // Mov each arg to it's argSlot
  673. src2 = argInstr->UnlinkSrc2();
  674. LowererMD::ChangeToAssign(argInstr);
  675. ++argCount;
  676. }
  677. // increment again for FunctionObject
  678. ++argCount;
  679. IR::RegOpnd * argLinkOpnd = src2->AsRegOpnd();
  680. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  681. AssertMsg(!argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  682. IR::Instr * startCallInstr = argLinkSym->m_instrDef;
  683. Assert(startCallInstr->m_opcode == Js::OpCode::StartCall);
  684. Assert(startCallInstr->GetSrc1()->IsIntConstOpnd());
  685. int32 stackAlignment = LowerStartCallAsmJs(startCallInstr, startCallInstr, callInstr);
  686. const uint32 argSlots = argCount + (stackAlignment / 4) + 1;
  687. m_func->m_argSlotsForFunctionsCalled = max(m_func->m_argSlotsForFunctionsCalled, argSlots);
  688. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  689. // we will not have function object mem ref in the case of function table calls, so we cannot calculate the call address ahead of time
  690. Assert(functionObjOpnd->IsRegOpnd() && functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym());
  691. // Push function object
  692. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, callInstr->m_func);
  693. pushInstr->SetSrc1(functionObjOpnd);
  694. callInstr->InsertBefore(pushInstr);
  695. IR::RegOpnd* functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  696. IR::IndirOpnd* functionInfoIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
  697. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  698. callInstr->InsertBefore(instr);
  699. functionInfoIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::ScriptFunctionType::GetEntryPointInfoOffset(), TyMachReg, m_func);
  700. instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  701. callInstr->InsertBefore(instr);
  702. uint32 entryPointOffset = Js::ProxyEntryPointInfo::GetAddressOffset();
  703. IR::Opnd * entryPointOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, entryPointOffset, TyMachReg, m_func);
  704. callInstr->SetSrc1(entryPointOpnd);
  705. // Atom prefers "CALL reg" over "CALL [reg]"
  706. IR::Instr * hoistedCallSrcInstr = callInstr->HoistSrc1(Js::OpCode::MOV);
  707. #if defined(_CONTROL_FLOW_GUARD)
  708. if (!PHASE_OFF(Js::CFGInJitPhase, this->m_func))
  709. {
  710. this->lowererMD->GenerateCFGCheck(hoistedCallSrcInstr->GetDst(), callInstr);
  711. }
  712. #else
  713. Unused(hoistedCallSrcInstr);
  714. #endif
  715. IR::Instr * retInstr = callInstr;
  716. callInstr->m_opcode = Js::OpCode::CALL;
  717. callInstr->m_func->SetHasCallsOnSelfAndParents();
  718. if (callInstr->GetDst())
  719. {
  720. IRType dstType = callInstr->GetDst()->GetType();
  721. if (IRType_IsInt64(dstType))
  722. {
  723. retInstr = LowerInt64CallDst(callInstr);
  724. }
  725. else
  726. {
  727. RegNum returnReg = GetRegReturnAsmJs(dstType);
  728. IR::Instr * movInstr;
  729. movInstr = callInstr->SinkDst(GetAssignOp(dstType), returnReg);
  730. retInstr = movInstr;
  731. }
  732. }
  733. return retInstr;
  734. }
  735. IR::Instr *
  736. LowererMDArch::LowerWasmArrayBoundsCheck(IR::Instr * instr, IR::Opnd *addrOpnd)
  737. {
  738. IR::IndirOpnd * indirOpnd = addrOpnd->AsIndirOpnd();
  739. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  740. uint32 offset = indirOpnd->GetOffset();
  741. IR::Opnd *arrayLenOpnd = instr->GetSrc2();
  742. int64 constOffset = (int64)addrOpnd->GetSize() + (int64)offset;
  743. CompileAssert(Js::ArrayBuffer::MaxArrayBufferLength <= UINT32_MAX);
  744. IR::IntConstOpnd * constOffsetOpnd = IR::IntConstOpnd::New((uint32)constOffset, TyUint32, m_func);
  745. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  746. IR::LabelInstr * loadLabel = Lowerer::InsertLabel(false, instr);
  747. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  748. IR::Opnd *cmpOpnd;
  749. if (indexOpnd != nullptr)
  750. {
  751. // Compare index + memop access length and array buffer length, and generate RuntimeError if greater
  752. cmpOpnd = IR::RegOpnd::New(TyUint32, m_func);
  753. Lowerer::InsertAdd(true, cmpOpnd, indexOpnd, constOffsetOpnd, helperLabel);
  754. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  755. }
  756. else
  757. {
  758. cmpOpnd = constOffsetOpnd;
  759. }
  760. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, arrayLenOpnd, Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  761. lowererMD->m_lowerer->GenerateRuntimeError(loadLabel, WASMERR_ArrayIndexOutOfRange, IR::HelperOp_WebAssemblyRuntimeError);
  762. Lowerer::InsertBranch(Js::OpCode::Br, loadLabel, helperLabel);
  763. return doneLabel;
  764. }
  765. void
  766. LowererMDArch::LowerAtomicStore(IR::Opnd * dst, IR::Opnd * src1, IR::Instr * insertBeforeInstr)
  767. {
  768. Assert(IRType_IsNativeInt(dst->GetType()));
  769. Assert(IRType_IsNativeInt(src1->GetType()));
  770. Func* func = insertBeforeInstr->m_func;
  771. // Move src1 to a register of the same type as dst
  772. IR::RegOpnd* tmpSrc = IR::RegOpnd::New(dst->GetType(), func);
  773. Lowerer::InsertMove(tmpSrc, src1, insertBeforeInstr);
  774. if (dst->IsInt64())
  775. {
  776. // todo:: Can do better implementation then InterlockedExchange64 with the following
  777. /*
  778. mov ebx, tmpSrc.low;
  779. mov ecx, tmpSrc.high;
  780. ;; Load old value first
  781. mov eax, [buffer];
  782. mov edx, [buffer+4];
  783. tryAgain:
  784. ;; CMPXCHG8B doc:
  785. ;; Compare EDX:EAX with m64. If equal, set ZF
  786. ;; and load ECX:EBX into m64. Else, clear ZF and
  787. ;; load m64 into EDX:EAX.
  788. lock CMPXCHG8B [buffer]
  789. jnz tryAgain
  790. ;; ZF was set, this means the old value hasn't changed between the load and the CMPXCHG8B
  791. ;; so we correctly stored our value atomically
  792. // This is a failed attempt to implement this
  793. // Review: Should I leave this as a comment or remove ?
  794. IR::RegOpnd* ecx = IR::RegOpnd::New(RegECX, TyMachReg, func);
  795. IR::RegOpnd* ebx = IR::RegOpnd::New(RegEBX, TyMachReg, func);
  796. IR::RegOpnd* eax = IR::RegOpnd::New(RegEAX, TyMachReg, func);
  797. IR::RegOpnd* edx = IR::RegOpnd::New(RegEDX, TyMachReg, func);
  798. auto dstPair = func->FindOrCreateInt64Pair(dst);
  799. auto srcPair = func->FindOrCreateInt64Pair(tmpSrc);
  800. Lowerer::InsertMove(ebx, srcPair.low, insertBeforeInstr);
  801. Lowerer::InsertMove(ecx, srcPair.high, insertBeforeInstr);
  802. Lowerer::InsertMove(eax, dstPair.low, insertBeforeInstr);
  803. Lowerer::InsertMove(edx, dstPair.high, insertBeforeInstr);
  804. IR::LabelInstr* startLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
  805. startLoop->m_isLoopTop = true;
  806. Loop *loop = JitAnew(this->m_func->m_alloc, Loop, this->m_func->m_alloc, this->m_func);
  807. startLoop->SetLoop(loop);
  808. loop->SetLoopTopInstr(startLoop);
  809. loop->regAlloc.liveOnBackEdgeSyms = JitAnew(func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  810. loop->regAlloc.liveOnBackEdgeSyms->Set(ebx->m_sym->m_id);
  811. loop->regAlloc.liveOnBackEdgeSyms->Set(ecx->m_sym->m_id);
  812. loop->regAlloc.liveOnBackEdgeSyms->Set(eax->m_sym->m_id);
  813. loop->regAlloc.liveOnBackEdgeSyms->Set(edx->m_sym->m_id);
  814. insertBeforeInstr->InsertBefore(startLoop);
  815. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::CMPXCHG8B, nullptr, dstPair.low, func));
  816. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, startLoop, func));
  817. */
  818. //////
  819. IR::RegOpnd* bufferAddress = IR::RegOpnd::New(TyMachReg, func);
  820. IR::Instr* lea = IR::Instr::New(Js::OpCode::LEA, bufferAddress, dst, func);
  821. insertBeforeInstr->InsertBefore(lea);
  822. LoadInt64HelperArgument(insertBeforeInstr, tmpSrc);
  823. LoadHelperArgument(insertBeforeInstr, bufferAddress);
  824. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, func);
  825. insertBeforeInstr->InsertBefore(callInstr);
  826. lowererMD->ChangeToHelperCall(callInstr, IR::HelperAtomicStore64);
  827. }
  828. else
  829. {
  830. // Put tmpSrc as dst to make sure we know that register is modified
  831. IR::Instr* xchgInstr = IR::Instr::New(Js::OpCode::XCHG, tmpSrc, tmpSrc, dst, insertBeforeInstr->m_func);
  832. insertBeforeInstr->InsertBefore(xchgInstr);
  833. }
  834. }
  835. void
  836. LowererMDArch::LowerAtomicLoad(IR::Opnd * dst, IR::Opnd * src1, IR::Instr * insertBeforeInstr)
  837. {
  838. Assert(IRType_IsNativeInt(dst->GetType()));
  839. Assert(IRType_IsNativeInt(src1->GetType()));
  840. Func* func = insertBeforeInstr->m_func;
  841. if (src1->IsInt64())
  842. {
  843. /*
  844. ;; Zero out all the relevant registers
  845. xor ebx, ebx;
  846. xor ecx, ecx;
  847. xor eax, eax;
  848. xor edx, edx;
  849. lock CMPXCHG8B [buffer]
  850. ;; The value in the buffer is in EDX:EAX
  851. */
  852. IR::RegOpnd* ecx = IR::RegOpnd::New(RegECX, TyMachReg, func);
  853. IR::RegOpnd* ebx = IR::RegOpnd::New(RegEBX, TyMachReg, func);
  854. IR::RegOpnd* eax = IR::RegOpnd::New(RegEAX, TyMachReg, func);
  855. IR::RegOpnd* edx = IR::RegOpnd::New(RegEDX, TyMachReg, func);
  856. IR::IntConstOpnd* zero = IR::IntConstOpnd::New(0, TyMachReg, func);
  857. Lowerer::InsertMove(ebx, zero, insertBeforeInstr);
  858. Lowerer::InsertMove(ecx, zero, insertBeforeInstr);
  859. Lowerer::InsertMove(eax, zero, insertBeforeInstr);
  860. Lowerer::InsertMove(edx, zero, insertBeforeInstr);
  861. IR::ListOpnd* deps = IR::ListOpnd::New(func, eax, ebx, ecx, edx);
  862. IR::ListOpnd* dsts = IR::ListOpnd::New(func, eax, edx);
  863. IR::Instr* cmpxchg = IR::Instr::New(Js::OpCode::LOCKCMPXCHG8B, dsts, src1, deps, func);
  864. insertBeforeInstr->InsertBefore(cmpxchg);
  865. Int64RegPair dstPair = func->FindOrCreateInt64Pair(dst);
  866. Lowerer::InsertMove(dstPair.low, eax, insertBeforeInstr);
  867. Lowerer::InsertMove(dstPair.high, edx, insertBeforeInstr);
  868. }
  869. else
  870. {
  871. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, func);
  872. insertBeforeInstr->InsertBefore(callInstr);
  873. lowererMD->ChangeToHelperCall(callInstr, IR::HelperMemoryBarrier);
  874. Lowerer::InsertMove(dst, src1, insertBeforeInstr);
  875. }
  876. }
  877. IR::Instr*
  878. LowererMDArch::LowerAsmJsLdElemHelper(IR::Instr * instr, bool isSimdLoad /*= false*/, bool checkEndOffset /*= false*/)
  879. {
  880. IR::Opnd * src1 = instr->UnlinkSrc1();
  881. IRType type = src1->GetType();
  882. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  883. IR::LabelInstr * loadLabel = Lowerer::InsertLabel(false, instr);
  884. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  885. IR::RegOpnd * indexOpnd = src1->AsIndirOpnd()->GetIndexOpnd();
  886. IR::Opnd * cmpOpnd;
  887. const uint8 dataWidth = instr->dataWidth;
  888. Assert(isSimdLoad == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  889. if (indexOpnd)
  890. {
  891. cmpOpnd = indexOpnd;
  892. }
  893. else
  894. {
  895. cmpOpnd = IR::IntConstOpnd::New(src1->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  896. }
  897. // if dataWidth != byte per element, we need to check end offset
  898. if (isSimdLoad && checkEndOffset)
  899. {
  900. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  901. // MOV tmp, cmpOnd
  902. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  903. // ADD tmp, dataWidth
  904. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, tmp->GetType(), m_func, true), helperLabel);
  905. // JB helper
  906. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  907. // CMP tmp, size
  908. // JG $helper
  909. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  910. }
  911. else
  912. {
  913. #ifdef ENABLE_WASM_SIMD
  914. if (m_func->GetJITFunctionBody()->IsWasmFunction() && src1->AsIndirOpnd()->GetOffset()) //WASM.SIMD
  915. {
  916. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  917. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  918. // ADD tmp, offset
  919. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)src1->AsIndirOpnd()->GetOffset(), tmp->GetType(), m_func), helperLabel);
  920. // JB helper
  921. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  922. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  923. }
  924. else
  925. #endif
  926. {
  927. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  928. }
  929. }
  930. Lowerer::InsertBranch(Js::OpCode::Br, loadLabel, helperLabel);
  931. if (isSimdLoad)
  932. {
  933. lowererMD->m_lowerer->GenerateRuntimeError(loadLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  934. }
  935. else
  936. {
  937. if (IRType_IsFloat(type))
  938. {
  939. Lowerer::InsertMove(instr->UnlinkDst(), IR::FloatConstOpnd::New(Js::NumberConstants::NaN, type, m_func), loadLabel);
  940. }
  941. else
  942. {
  943. Lowerer::InsertMove(instr->UnlinkDst(), IR::IntConstOpnd::New(0, TyInt8, m_func), loadLabel);
  944. }
  945. }
  946. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, loadLabel);
  947. return doneLabel;
  948. }
  949. IR::Instr*
  950. LowererMDArch::LowerAsmJsStElemHelper(IR::Instr * instr, bool isSimdStore /*= false*/, bool checkEndOffset /*= false*/)
  951. {
  952. IR::Opnd * dst = instr->UnlinkDst();
  953. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  954. IR::LabelInstr * storeLabel = Lowerer::InsertLabel(false, instr);
  955. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  956. IR::Opnd * cmpOpnd;
  957. IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  958. const uint8 dataWidth = instr->dataWidth;
  959. Assert(isSimdStore == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  960. if (indexOpnd)
  961. {
  962. cmpOpnd = indexOpnd;
  963. }
  964. else
  965. {
  966. cmpOpnd = IR::IntConstOpnd::New(dst->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  967. }
  968. if (isSimdStore && checkEndOffset)
  969. {
  970. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  971. // MOV tmp, cmpOnd
  972. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  973. // ADD tmp, dataWidth
  974. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, tmp->GetType(), m_func, true), helperLabel);
  975. // JB helper
  976. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  977. // CMP tmp, size
  978. // JG $helper
  979. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  980. }
  981. else
  982. {
  983. #ifdef ENABLE_WASM_SIMD
  984. if (m_func->GetJITFunctionBody()->IsWasmFunction() && dst->AsIndirOpnd()->GetOffset()) //WASM.SIMD
  985. {
  986. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  987. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  988. // ADD tmp, offset
  989. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)dst->AsIndirOpnd()->GetOffset(), tmp->GetType(), m_func), helperLabel);
  990. // JB helper
  991. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  992. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  993. }
  994. else
  995. #endif
  996. {
  997. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  998. }
  999. }
  1000. if (isSimdStore)
  1001. {
  1002. lowererMD->m_lowerer->GenerateRuntimeError(storeLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  1003. }
  1004. Lowerer::InsertBranch(Js::OpCode::Br, storeLabel, helperLabel);
  1005. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, storeLabel);
  1006. return doneLabel;
  1007. }
  1008. int32
  1009. LowererMDArch::LowerCallArgs(IR::Instr *callInstr, ushort callFlags, Js::ArgSlot extraArgs, IR::IntConstOpnd **callInfoOpndRef)
  1010. {
  1011. IR::Instr * argInstr;
  1012. uint32 argCount = 0;
  1013. // Lower args and look for StartCall
  1014. argInstr = callInstr;
  1015. IR::Opnd *src2 = argInstr->UnlinkSrc2();
  1016. while (src2->IsSymOpnd())
  1017. {
  1018. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  1019. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  1020. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  1021. argLinkOpnd->Free(this->m_func);
  1022. argInstr = argLinkSym->m_instrDef;
  1023. // Mov each arg to it's argSlot
  1024. src2 = argInstr->UnlinkSrc2();
  1025. this->lowererMD->ChangeToAssign(argInstr);
  1026. argCount++;
  1027. }
  1028. IR::RegOpnd * argLinkOpnd = src2->AsRegOpnd();
  1029. StackSym *argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  1030. AssertMsg(!argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  1031. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  1032. if (callInstr->m_opcode == Js::OpCode::NewScObject ||
  1033. callInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
  1034. callInstr->m_opcode == Js::OpCode::NewScObjArray ||
  1035. callInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
  1036. {
  1037. // These push an extra arg.
  1038. argCount++;
  1039. }
  1040. AssertMsg(startCallInstr->m_opcode == Js::OpCode::StartCall || startCallInstr->m_opcode == Js::OpCode::LoweredStartCall, "Problem with arg chain.");
  1041. AssertMsg(startCallInstr->GetArgOutCount(/*getInterpreterArgOutCount*/ false) == argCount, "ArgCount doesn't match StartCall count");
  1042. //
  1043. // Machine dependent lowering
  1044. //
  1045. IR::Instr * insertInstr;
  1046. if (callInstr->IsCloned())
  1047. {
  1048. insertInstr = argInstr;
  1049. }
  1050. else
  1051. {
  1052. insertInstr = startCallInstr;
  1053. }
  1054. int32 stackAlignment;
  1055. if (callInstr->m_opcode == Js::OpCode::AsmJsCallE)
  1056. {
  1057. stackAlignment = LowerStartCallAsmJs(startCallInstr, insertInstr, callInstr);
  1058. }
  1059. else
  1060. {
  1061. stackAlignment = LowerStartCall(startCallInstr, insertInstr);
  1062. }
  1063. startCallInstr->SetIsCloned(callInstr->IsCloned());
  1064. // Push argCount
  1065. IR::IntConstOpnd * argCountOpnd = Lowerer::MakeCallInfoConst(callFlags, argCount, m_func);
  1066. if(callInfoOpndRef)
  1067. {
  1068. argCountOpnd->Use(m_func);
  1069. *callInfoOpndRef = argCountOpnd;
  1070. }
  1071. this->LoadHelperArgument(callInstr, argCountOpnd);
  1072. uint32 argSlots;
  1073. argSlots = argCount + (stackAlignment / 4) + 1 + extraArgs; // + 1 for call flags
  1074. this->m_func->m_argSlotsForFunctionsCalled = max(this->m_func->m_argSlotsForFunctionsCalled, argSlots);
  1075. return argSlots;
  1076. }
  1077. ///----------------------------------------------------------------------------
  1078. ///
  1079. /// LowererMDArch::LowerCall
  1080. ///
  1081. /// Machine dependent (x86) lowering for calls.
  1082. /// Adds an "ADD ESP, argCount*4" if argCount is not 0.
  1083. ///
  1084. ///----------------------------------------------------------------------------
  1085. IR::Instr *
  1086. LowererMDArch::LowerCall(IR::Instr * callInstr, uint32 argCount, RegNum regNum)
  1087. {
  1088. IR::Instr *retInstr = callInstr;
  1089. callInstr->m_opcode = Js::OpCode::CALL;
  1090. // This is required here due to calls created during lowering
  1091. callInstr->m_func->SetHasCallsOnSelfAndParents();
  1092. if (callInstr->GetDst())
  1093. {
  1094. IR::Opnd * dstOpnd = callInstr->GetDst();
  1095. IRType dstType = dstOpnd->GetType();
  1096. Js::OpCode assignOp = GetAssignOp(dstType);
  1097. IR::Instr * movInstr = nullptr;
  1098. RegNum reg = GetRegReturn(dstType);
  1099. if (IRType_IsFloat(dstType))
  1100. {
  1101. // We should only generate this if sse2 is available
  1102. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "SSE2 not supported");
  1103. AssertMsg(reg == RegNOREG, "No register should be assigned for float Reg");
  1104. // We pop the Float X87 stack using FSTP for the return value of the CALL, instead of storing in XMM0 directly.
  1105. //Before: oldDst = CALL xxx
  1106. //After:
  1107. // CALL xxx
  1108. // newDstOpnd = FSTP
  1109. // oldDst = MOVSD [newDstOpnd]
  1110. IR::Instr * floatPopInstr = IR::Instr::New(Js::OpCode::FSTP, m_func);
  1111. IR::Opnd * oldDst = callInstr->UnlinkDst();
  1112. StackSym * newDstStackSym = StackSym::New(dstType, this->m_func);
  1113. Assert(dstOpnd->IsFloat());
  1114. this->m_func->StackAllocate(newDstStackSym, TySize[dstType]);
  1115. IR::SymOpnd * newDstOpnd = IR::SymOpnd::New(newDstStackSym, dstType, this->m_func);
  1116. floatPopInstr->SetDst(newDstOpnd);
  1117. callInstr->InsertAfter(floatPopInstr);
  1118. movInstr = IR::Instr::New(assignOp, oldDst, newDstOpnd, this->m_func);
  1119. floatPopInstr->InsertAfter(movInstr);
  1120. }
  1121. else if (IRType_IsInt64(dstType))
  1122. {
  1123. retInstr = movInstr = LowerInt64CallDst(callInstr);
  1124. }
  1125. else
  1126. {
  1127. movInstr = callInstr->SinkDst(assignOp);
  1128. callInstr->GetDst()->AsRegOpnd()->SetReg(reg);
  1129. movInstr->GetSrc1()->AsRegOpnd()->SetReg(reg);
  1130. }
  1131. Assert(movInstr);
  1132. retInstr = movInstr;
  1133. }
  1134. if (argCount)
  1135. {
  1136. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, RegESP, TyMachReg, this->m_func);
  1137. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(espOpnd, argCount * MachPtr, TyMachReg, this->m_func);
  1138. IR::Instr * addInstr = IR::Instr::New(Js::OpCode::LEA,
  1139. espOpnd, indirOpnd, this->m_func);
  1140. callInstr->InsertAfter(addInstr);
  1141. }
  1142. this->helperCallArgsCount = 0;
  1143. return retInstr;
  1144. }
  1145. ///----------------------------------------------------------------------------
  1146. ///
  1147. /// LowererMDArch::LowerStartCall
  1148. ///
  1149. /// Lower StartCall to a "SUB ESP, argCount * 4"
  1150. ///
  1151. ///----------------------------------------------------------------------------
  1152. int32
  1153. LowererMDArch::LowerStartCall(IR::Instr * startCallInstr, IR::Instr* insertInstr)
  1154. {
  1155. AssertMsg(startCallInstr->GetSrc1()->IsIntConstOpnd(), "Bad src on StartCall");
  1156. IR::IntConstOpnd *sizeOpnd = startCallInstr->GetSrc1()->AsIntConstOpnd();
  1157. IntConstType sizeValue = sizeOpnd->GetValue();
  1158. // Maintain 8 byte alignment of the stack.
  1159. // We do this by adjusting the SUB for stackCall to make sure it maintains 8 byte alignment.
  1160. int32 stackAlignment = Math::Align<int32>(sizeValue*MachPtr, MachStackAlignment) - sizeValue*MachPtr;
  1161. if (stackAlignment != 0)
  1162. {
  1163. sizeValue += 1;
  1164. }
  1165. sizeValue *= MachPtr;
  1166. IR::Instr* newStartCall;
  1167. if ((uint32)sizeValue > AutoSystemInfo::PageSize) {
  1168. // Convert StartCall into a chkstk
  1169. // mov eax, sizeOpnd->m_value
  1170. // call _chkstk
  1171. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, this->GetRegChkStkParam(), TyMachReg, this->m_func);
  1172. Lowerer::InsertMove(eaxOpnd, IR::IntConstOpnd::New(sizeValue, TyInt32, this->m_func, /*dontEncode*/true), insertInstr);
  1173. newStartCall = IR::Instr::New(Js::OpCode::Call, this->m_func);
  1174. newStartCall->SetSrc1(IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, this->m_func));
  1175. insertInstr->InsertBefore(newStartCall);
  1176. this->LowerCall(newStartCall, 0);
  1177. } else {
  1178. // Convert StartCall into
  1179. // lea esp, [esp - sizeValue]
  1180. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1181. newStartCall = IR::Instr::New(Js::OpCode::LEA, espOpnd, IR::IndirOpnd::New(espOpnd, -sizeValue, TyMachReg, this->m_func), this->m_func);
  1182. insertInstr->InsertBefore(newStartCall);
  1183. }
  1184. newStartCall->SetByteCodeOffset(startCallInstr);
  1185. // Mark the start call as being lowered - this is required by the bailout encoding logic
  1186. startCallInstr->m_opcode = Js::OpCode::LoweredStartCall;
  1187. return stackAlignment;
  1188. }
  1189. int32
  1190. LowererMDArch::LowerStartCallAsmJs(IR::Instr * startCallInstr, IR::Instr * insertInstr, IR::Instr * callInstr)
  1191. {
  1192. AssertMsg(startCallInstr->GetSrc1()->IsIntConstOpnd(), "Bad src on StartCall");
  1193. AssertMsg(startCallInstr->GetSrc2()->IsIntConstOpnd(), "Bad src on StartCall");
  1194. IR::IntConstOpnd * sizeOpnd = startCallInstr->GetSrc2()->AsIntConstOpnd();
  1195. IntConstType sizeValue = sizeOpnd->GetValue();
  1196. // Maintain 8 byte alignment of the stack.
  1197. // We do this by adjusting the SUB for stackCall to make sure it maintains 8 byte alignment.
  1198. int32 stackAlignment = Math::Align<int32>(sizeValue, MachStackAlignment) - sizeValue;
  1199. if (stackAlignment != 0)
  1200. {
  1201. sizeValue += MachPtr;
  1202. }
  1203. IR::Instr* newStartCall;
  1204. if ((uint32)sizeValue > AutoSystemInfo::PageSize) {
  1205. // Convert StartCall into a chkstk
  1206. // mov eax, sizeOpnd->m_value
  1207. // call _chkstk
  1208. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, GetRegChkStkParam(), TyMachReg, m_func);
  1209. Lowerer::InsertMove(eaxOpnd, IR::IntConstOpnd::New(sizeValue, TyInt32, m_func, /*dontEncode*/true), insertInstr);
  1210. newStartCall = IR::Instr::New(Js::OpCode::Call, m_func);
  1211. newStartCall->SetSrc1(IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, m_func));
  1212. insertInstr->InsertBefore(newStartCall);
  1213. LowerCall(newStartCall, 0);
  1214. }
  1215. else {
  1216. // Convert StartCall into
  1217. // lea esp, [esp - sizeValue]
  1218. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, m_func);
  1219. newStartCall = IR::Instr::New(Js::OpCode::LEA, espOpnd, IR::IndirOpnd::New(espOpnd, -sizeValue, TyMachReg, m_func), m_func);
  1220. insertInstr->InsertBefore(newStartCall);
  1221. }
  1222. newStartCall->SetByteCodeOffset(startCallInstr);
  1223. // Mark the start call as being lowered - this is required by the bailout encoding logic
  1224. startCallInstr->m_opcode = Js::OpCode::LoweredStartCall;
  1225. return stackAlignment;
  1226. }
  1227. ///----------------------------------------------------------------------------
  1228. ///
  1229. /// LowererMDArch::LoadHelperArgument
  1230. ///
  1231. /// Change to a PUSH.
  1232. ///
  1233. ///----------------------------------------------------------------------------
  1234. IR::Instr *
  1235. LowererMDArch::LoadHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  1236. {
  1237. IR::Instr * pushInstr;
  1238. pushInstr = IR::Instr::New(Js::OpCode::PUSH, instr->m_func);
  1239. if(TySize[opndArg->GetType()] < TySize[TyMachReg])
  1240. {
  1241. Assert(!opndArg->IsMemoryOpnd()); // if it's a memory opnd, it would need to be loaded into a register first
  1242. opndArg = opndArg->UseWithNewType(TyMachReg, instr->m_func);
  1243. }
  1244. pushInstr->SetSrc1(opndArg);
  1245. instr->InsertBefore(pushInstr);
  1246. this->helperCallArgsCount++;
  1247. AssertMsg(helperCallArgsCount <= LowererMDArch::MaxArgumentsToHelper, "The # of arguments to the helper is too big.");
  1248. return pushInstr;
  1249. }
  1250. IR::Instr *
  1251. LowererMDArch::LoadDynamicArgument(IR::Instr * instr, uint argNumber /*ignore for x86*/)
  1252. {
  1253. //Convert to push instruction.
  1254. instr->m_opcode = Js::OpCode::PUSH;
  1255. return instr;
  1256. }
  1257. IR::Instr *
  1258. LowererMDArch::LoadInt64HelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1259. {
  1260. Int64RegPair argPair = m_func->FindOrCreateInt64Pair(opndArg);
  1261. LoadHelperArgument(instrInsert, argPair.high);
  1262. return LoadHelperArgument(instrInsert, argPair.low);
  1263. }
  1264. IR::Instr *
  1265. LowererMDArch::LoadDoubleHelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1266. {
  1267. IR::Instr * instrPrev;
  1268. IR::Instr * instr;
  1269. IR::Opnd * opnd;
  1270. IR::Opnd * float64Opnd;
  1271. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1272. opnd = IR::IndirOpnd::New(espOpnd, -8, TyMachReg, this->m_func);
  1273. instrPrev = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  1274. instrInsert->InsertBefore(instrPrev);
  1275. opnd = IR::IndirOpnd::New(espOpnd, (int32)0, TyFloat64, this->m_func);
  1276. if (opndArg->GetType() == TyFloat32)
  1277. {
  1278. float64Opnd = IR::RegOpnd::New(TyFloat64, m_func);
  1279. instr = IR::Instr::New(Js::OpCode::CVTSS2SD, float64Opnd, opndArg, this->m_func);
  1280. instrInsert->InsertBefore(instr);
  1281. }
  1282. else
  1283. {
  1284. float64Opnd = opndArg;
  1285. }
  1286. instr = IR::Instr::New(Js::OpCode::MOVSD, opnd, float64Opnd, this->m_func);
  1287. instrInsert->InsertBefore(instr);
  1288. LowererMD::Legalize(instr);
  1289. return instrPrev;
  1290. }
  1291. IR::Instr *
  1292. LowererMDArch::LoadFloatHelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1293. {
  1294. IR::Instr * instrPrev;
  1295. IR::Instr * instr;
  1296. IR::Opnd * opnd;
  1297. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1298. opnd = IR::IndirOpnd::New(espOpnd, -4, TyMachReg, this->m_func);
  1299. instrPrev = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  1300. instrInsert->InsertBefore(instrPrev);
  1301. opnd = IR::IndirOpnd::New(espOpnd, (int32)0, TyFloat32, this->m_func);
  1302. instr = IR::Instr::New(Js::OpCode::MOVSS, opnd, opndArg, this->m_func);
  1303. instrInsert->InsertBefore(instr);
  1304. LowererMD::Legalize(instr);
  1305. return instrPrev;
  1306. }
  1307. ///----------------------------------------------------------------------------
  1308. ///
  1309. /// LowererMDArch::LowerEntryInstr
  1310. ///
  1311. /// Emit prolog.
  1312. ///
  1313. ///----------------------------------------------------------------------------
  1314. IR::Instr *
  1315. LowererMDArch::LowerEntryInstr(IR::EntryInstr * entryInstr)
  1316. {
  1317. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1318. if (Js::Configuration::Global.flags.IsEnabled(Js::CheckAlignmentFlag))
  1319. {
  1320. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  1321. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrFunc_CheckAlignment, this->m_func));
  1322. entryInstr->InsertAfter(callInstr);
  1323. this->LowerCall(callInstr, 0, RegEAX);
  1324. }
  1325. #endif
  1326. int32 bytesOnStack = MachRegInt+MachRegInt; // Account for return address+push EBP...
  1327. // PUSH used callee-saved registers
  1328. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg+1))
  1329. {
  1330. if (LinearScan::IsCalleeSaved(reg) && (this->m_func->m_regsUsed.Test(reg)))
  1331. {
  1332. IR::RegOpnd * regOpnd = IR::RegOpnd::New(nullptr, reg, TyMachReg, this->m_func);
  1333. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1334. pushInstr->SetSrc1(regOpnd);
  1335. entryInstr->InsertAfter(pushInstr);
  1336. bytesOnStack += MachRegInt;
  1337. }
  1338. }
  1339. // Allocate frame
  1340. IR::RegOpnd * ebpOpnd = IR::RegOpnd::New(nullptr, this->GetRegBlockPointer(), TyMachReg, this->m_func);
  1341. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1342. // Dedicated argument slot is already included in the m_localStackHeight (see Func ctor)
  1343. // Allocate the inlined arg out stack in the locals. Allocate an additional slot so that
  1344. // we can unconditionally clear the argc slot of the next frame.
  1345. this->m_func->m_localStackHeight += m_func->GetMaxInlineeArgOutSize() + MachPtr;
  1346. bytesOnStack += this->m_func->m_localStackHeight;
  1347. int32 alignment = Math::Align<int32>(bytesOnStack, MachStackAlignment) - bytesOnStack;
  1348. // Make sure this frame allocation maintains 8-byte alignment. Our point of reference is the return address
  1349. this->m_func->m_localStackHeight += alignment;
  1350. bytesOnStack += alignment;
  1351. Assert(Math::Align<int32>(bytesOnStack, MachStackAlignment) == bytesOnStack);
  1352. Assert(this->m_func->hasBailout || this->bailOutStackRestoreLabel == nullptr);
  1353. this->m_func->frameSize = bytesOnStack;
  1354. if (this->m_func->HasInlinee())
  1355. {
  1356. this->m_func->GetJITOutput()->SetFrameHeight(this->m_func->m_localStackHeight);
  1357. StackSym *sym = this->m_func->m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1358. sym->m_isInlinedArgSlot = true;
  1359. sym->m_offset = 0;
  1360. IR::Opnd *dst = IR::SymOpnd::New(sym, TyMachReg, this->m_func);
  1361. entryInstr->InsertAfter(IR::Instr::New(Js::OpCode::MOV,
  1362. dst,
  1363. IR::AddrOpnd::NewNull(this->m_func),
  1364. this->m_func));
  1365. }
  1366. if (this->m_func->m_localStackHeight != 0)
  1367. {
  1368. int32 stackSize = this->m_func->m_localStackHeight;
  1369. if (this->m_func->HasArgumentSlot())
  1370. {
  1371. // We separately push the stack argument slot below
  1372. stackSize -= MachPtr;
  1373. }
  1374. if (this->m_func->m_localStackHeight <= PAGESIZE)
  1375. {
  1376. // Generate LEA ESP, [esp - stackSize] // Atom prefers LEA for address computations
  1377. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(espOpnd, -stackSize, TyMachReg, this->m_func);
  1378. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, indirOpnd, this->m_func);
  1379. entryInstr->InsertAfter(subInstr);
  1380. }
  1381. else
  1382. {
  1383. // Generate chkstk call
  1384. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, this->GetRegChkStkParam(), TyMachReg, this->m_func);
  1385. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, eaxOpnd,
  1386. IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, this->m_func), this->m_func);
  1387. entryInstr->InsertAfter(callInstr);
  1388. this->LowerCall(callInstr, 0, RegECX);
  1389. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New(stackSize, TyMachReg, this->m_func);
  1390. Lowerer::InsertMove(eaxOpnd, stackSizeOpnd, entryInstr->m_next);
  1391. }
  1392. }
  1393. // Zero-initialize dedicated arguments slot
  1394. if (this->m_func->HasArgumentSlot())
  1395. {
  1396. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1397. pushInstr->SetSrc1(IR::IntConstOpnd::New(0, TyMachPtr, this->m_func));
  1398. entryInstr->InsertAfter(pushInstr);
  1399. }
  1400. size_t frameSize = bytesOnStack + ((this->m_func->m_argSlotsForFunctionsCalled + 1) * MachPtr) + Js::Constants::MinStackJIT;
  1401. this->GeneratePrologueStackProbe(entryInstr, frameSize);
  1402. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, ebpOpnd, espOpnd, this->m_func);
  1403. entryInstr->InsertAfter(movInstr);
  1404. // Generate PUSH EBP
  1405. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1406. pushInstr->SetSrc1(ebpOpnd);
  1407. entryInstr->InsertAfter(pushInstr);
  1408. return entryInstr;
  1409. }
  1410. void
  1411. LowererMDArch::GeneratePrologueStackProbe(IR::Instr *entryInstr, size_t frameSize)
  1412. {
  1413. //
  1414. // Generate a stack overflow check. This can be as simple as a cmp esp, const
  1415. // because this function is guaranteed to be called on its base thread only.
  1416. // If the check fails call ThreadContext::ProbeCurrentStack which will check again and throw if needed.
  1417. //
  1418. // cmp esp, ThreadContext::scriptStackLimit + frameSize
  1419. // jg done
  1420. // push frameSize
  1421. // call ThreadContext::ProbeCurrentStack
  1422. // For thread-agile thread context
  1423. // mov eax, [ThreadContext::stackLimitForCurrentThread]
  1424. // add eax, frameSize
  1425. // cmp esp, eax
  1426. // jg done
  1427. // push frameSize
  1428. // call ThreadContext::ProbeCurrentStack
  1429. // done:
  1430. //
  1431. // For thread context with script interrupt enabled:
  1432. // mov eax, [ThreadContext::stackLimitForCurrentThread]
  1433. // add eax, frameSize
  1434. // jo $helper
  1435. // cmp esp, eax
  1436. // jg done
  1437. // $helper:
  1438. // push frameSize
  1439. // call ThreadContext::ProbeCurrentStack
  1440. // done:
  1441. //
  1442. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  1443. IR::Instr *insertInstr = entryInstr->m_next;
  1444. IR::Instr *instr;
  1445. IR::Opnd *stackLimitOpnd;
  1446. bool doInterruptProbe = m_func->GetJITFunctionBody()->DoInterruptProbe();
  1447. if (doInterruptProbe || !m_func->GetThreadContextInfo()->IsThreadBound())
  1448. {
  1449. // Load the current stack limit from the ThreadContext, then increment this value by the size of the
  1450. // current frame. This is the value we'll compare against below.
  1451. stackLimitOpnd = IR::RegOpnd::New(nullptr, RegEAX, TyMachReg, this->m_func);
  1452. intptr_t pLimit = m_func->GetThreadContextInfo()->GetThreadStackLimitAddr();
  1453. IR::MemRefOpnd * memOpnd = IR::MemRefOpnd::New(pLimit, TyMachReg, this->m_func);
  1454. Lowerer::InsertMove(stackLimitOpnd, memOpnd, insertInstr);
  1455. instr = IR::Instr::New(Js::OpCode::ADD, stackLimitOpnd, stackLimitOpnd,
  1456. IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func), this->m_func);
  1457. insertInstr->InsertBefore(instr);
  1458. if (doInterruptProbe)
  1459. {
  1460. // If this add overflows, then we need to call out to the helper.
  1461. instr = IR::BranchInstr::New(Js::OpCode::JO, helperLabel, this->m_func);
  1462. insertInstr->InsertBefore(instr);
  1463. }
  1464. }
  1465. else
  1466. {
  1467. // The incremented stack limit is a compile-time constant.
  1468. size_t scriptStackLimit = (size_t)m_func->GetThreadContextInfo()->GetScriptStackLimit();
  1469. stackLimitOpnd = IR::IntConstOpnd::New((frameSize + scriptStackLimit), TyMachReg, this->m_func);
  1470. }
  1471. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  1472. if (!IS_FAULTINJECT_STACK_PROBE_ON) // Do stack check fastpath only if not doing StackProbe fault injection
  1473. {
  1474. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  1475. instr->SetSrc1(IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func));
  1476. instr->SetSrc2(stackLimitOpnd);
  1477. insertInstr->InsertBefore(instr);
  1478. instr = IR::BranchInstr::New(Js::OpCode::JGT, doneLabel, this->m_func);
  1479. insertInstr->InsertBefore(instr);
  1480. }
  1481. insertInstr->InsertBefore(helperLabel);
  1482. // Make sure we have zero where we expect to find the stack nested func pointer relative to EBP.
  1483. LoadHelperArgument(insertInstr, IR::IntConstOpnd::New(0, TyMachReg, m_func));
  1484. LoadHelperArgument(insertInstr, IR::IntConstOpnd::New(0, TyMachReg, m_func));
  1485. // Load the arguments to the probe helper and do the call.
  1486. lowererMD->m_lowerer->LoadScriptContext(insertInstr);
  1487. this->lowererMD->LoadHelperArgument(
  1488. insertInstr, IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func));
  1489. instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  1490. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProbeCurrentStack2, this->m_func));
  1491. insertInstr->InsertBefore(instr);
  1492. this->LowerCall(instr, 0, RegEAX);
  1493. insertInstr->InsertBefore(doneLabel);
  1494. Security::InsertRandomFunctionPad(doneLabel);
  1495. }
  1496. ///----------------------------------------------------------------------------
  1497. ///
  1498. /// LowererMDArch::LowerExitInstr
  1499. ///
  1500. /// Emit epilog.
  1501. ///
  1502. ///----------------------------------------------------------------------------
  1503. IR::Instr *
  1504. LowererMDArch::LowerExitInstr(IR::ExitInstr * exitInstr)
  1505. {
  1506. exitInstr = LowerExitInstrCommon(exitInstr);
  1507. // Insert RET
  1508. IR::IntConstOpnd * intSrc = IR::IntConstOpnd::New(0, TyMachReg, this->m_func);
  1509. IR::RegOpnd *eaxReg = IR::RegOpnd::New(nullptr, this->GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  1510. IR::Instr *retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  1511. retInstr->SetSrc1(intSrc);
  1512. retInstr->SetSrc2(eaxReg);
  1513. exitInstr->InsertBefore(retInstr);
  1514. return exitInstr;
  1515. }
  1516. IR::Instr *
  1517. LowererMDArch::LowerExitInstrAsmJs(IR::ExitInstr * exitInstr)
  1518. {
  1519. exitInstr = LowerExitInstrCommon(exitInstr);
  1520. // get asm.js return type
  1521. IR::IntConstOpnd* intSrc = nullptr;
  1522. if (m_func->IsLoopBody())
  1523. {
  1524. // Insert RET
  1525. intSrc = IR::IntConstOpnd::New(0, TyMachReg, this->m_func);
  1526. }
  1527. else
  1528. {
  1529. // Generate RET
  1530. int32 alignedSize = Math::Align<int32>(m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetArgByteSize(), MachStackAlignment);
  1531. intSrc = IR::IntConstOpnd::New(alignedSize + MachPtr, TyMachReg, m_func);
  1532. }
  1533. IR::Instr *retInstr = IR::Instr::New(Js::OpCode::RET, m_func);
  1534. retInstr->SetSrc1(intSrc);
  1535. exitInstr->InsertBefore(retInstr);
  1536. return exitInstr;
  1537. }
  1538. IR::ExitInstr *
  1539. LowererMDArch::LowerExitInstrCommon(IR::ExitInstr * exitInstr)
  1540. {
  1541. IR::RegOpnd * ebpOpnd = IR::RegOpnd::New(nullptr, GetRegBlockPointer(), TyMachReg, m_func);
  1542. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, m_func);
  1543. // POP used callee-saved registers
  1544. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg + 1))
  1545. {
  1546. if (LinearScan::IsCalleeSaved(reg) && (m_func->m_regsUsed.Test(reg)))
  1547. {
  1548. IR::RegOpnd * regOpnd = IR::RegOpnd::New(nullptr, reg, TyMachReg, m_func);
  1549. IR::Instr * popInstr = IR::Instr::New(Js::OpCode::POP, regOpnd, m_func);
  1550. exitInstr->InsertBefore(popInstr);
  1551. }
  1552. }
  1553. // Restore frame
  1554. // Generate MOV ESP, EBP
  1555. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, espOpnd, ebpOpnd, m_func);
  1556. exitInstr->InsertBefore(movInstr);
  1557. // Generate POP EBP
  1558. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::POP, ebpOpnd, m_func);
  1559. exitInstr->InsertBefore(pushInstr);
  1560. return exitInstr;
  1561. }
  1562. IR::Instr *
  1563. LowererMDArch::ChangeToAssignInt64(IR::Instr * instr)
  1564. {
  1565. IR::Opnd* dst = instr->UnlinkDst();
  1566. IR::Opnd* src1 = instr->UnlinkSrc1();
  1567. Func* m_func = instr->m_func;
  1568. if (dst && (dst->IsRegOpnd() || dst->IsSymOpnd() || dst->IsIndirOpnd()) && src1)
  1569. {
  1570. int dstSize = dst->GetSize();
  1571. int srcSize = src1->GetSize();
  1572. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(dst);
  1573. Int64RegPair src1Pair = m_func->FindOrCreateInt64Pair(src1);
  1574. instr->SetSrc1(src1Pair.low);
  1575. instr->SetDst(dstPair.low);
  1576. LowererMD::ChangeToAssignNoBarrierCheck(instr); // No WriteBarrier for assigning int64 on x86
  1577. IR::Instr * insertBeforeInstr = instr->m_next;
  1578. // Do not store to memory if we wanted less than 8 bytes
  1579. const bool canAssignHigh = !dst->IsIndirOpnd() || dstSize == 8;
  1580. const bool isLoadFromWordMem = src1->IsIndirOpnd() && srcSize < 8;
  1581. if (canAssignHigh)
  1582. {
  1583. if (!isLoadFromWordMem)
  1584. {
  1585. // Normal case, assign source's high bits to dst's high bits
  1586. Lowerer::InsertMove(dstPair.high, src1Pair.high, insertBeforeInstr, /*generateWriteBarrier*/false);
  1587. }
  1588. else
  1589. {
  1590. // Do not load from memory if we wanted less than 8 bytes
  1591. src1Pair.high->Free(m_func);
  1592. if (IRType_IsUnsignedInt(src1->GetType()))
  1593. {
  1594. // If this is an unsigned assign from memory, we can simply set the high bits to 0
  1595. Lowerer::InsertMove(dstPair.high, IR::IntConstOpnd::New(0, TyInt32, m_func), insertBeforeInstr, /*generateWriteBarrier*/false);
  1596. }
  1597. else
  1598. {
  1599. // If this is a signed assign from memory, we need to extend the sign
  1600. IR::Instr* highExtendInstr = Lowerer::InsertMove(dstPair.high, dstPair.low, insertBeforeInstr, /*generateWriteBarrier*/false);
  1601. highExtendInstr = IR::Instr::New(Js::OpCode::SAR, dstPair.high, dstPair.high, IR::IntConstOpnd::New(31, TyInt32, m_func), m_func);
  1602. insertBeforeInstr->InsertBefore(highExtendInstr);
  1603. }
  1604. }
  1605. }
  1606. return instr->m_prev;
  1607. }
  1608. return instr;
  1609. }
  1610. void
  1611. LowererMDArch::EmitInt64Instr(IR::Instr *instr)
  1612. {
  1613. if (instr->IsBranchInstr())
  1614. {
  1615. LowerInt64Branch(instr);
  1616. return;
  1617. }
  1618. IR::Opnd* dst = instr->GetDst();
  1619. IR::Opnd* src1 = instr->GetSrc1();
  1620. IR::Opnd* src2 = instr->GetSrc2();
  1621. Assert(!dst || dst->IsInt64());
  1622. Assert(!src1 || src1->IsInt64());
  1623. Assert(!src2 || src2->IsInt64());
  1624. const auto LowerToHelper = [&](IR::JnHelperMethod helper) {
  1625. if (src2)
  1626. {
  1627. LoadInt64HelperArgument(instr, src2);
  1628. }
  1629. Assert(src1);
  1630. LoadInt64HelperArgument(instr, src1);
  1631. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, dst, this->m_func);
  1632. instr->InsertBefore(callInstr);
  1633. lowererMD->ChangeToHelperCall(callInstr, helper);
  1634. instr->Remove();
  1635. return callInstr;
  1636. };
  1637. Js::OpCode lowOpCode, highOpCode;
  1638. switch (instr->m_opcode)
  1639. {
  1640. case Js::OpCode::Xor_A:
  1641. case Js::OpCode::Xor_I4:
  1642. lowOpCode = Js::OpCode::XOR;
  1643. highOpCode = Js::OpCode::XOR;
  1644. goto binopCommon;
  1645. case Js::OpCode::Or_A:
  1646. case Js::OpCode::Or_I4:
  1647. lowOpCode = Js::OpCode::OR;
  1648. highOpCode = Js::OpCode::OR;
  1649. goto binopCommon;
  1650. case Js::OpCode::And_A:
  1651. case Js::OpCode::And_I4:
  1652. lowOpCode = Js::OpCode::AND;
  1653. highOpCode = Js::OpCode::AND;
  1654. goto binopCommon;
  1655. case Js::OpCode::Add_A:
  1656. case Js::OpCode::Add_I4:
  1657. lowOpCode = Js::OpCode::ADD;
  1658. highOpCode = Js::OpCode::ADC;
  1659. goto binopCommon;
  1660. case Js::OpCode::Sub_A:
  1661. case Js::OpCode::Sub_I4:
  1662. lowOpCode = Js::OpCode::SUB;
  1663. highOpCode = Js::OpCode::SBB;
  1664. binopCommon:
  1665. {
  1666. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(dst);
  1667. Int64RegPair src1Pair = m_func->FindOrCreateInt64Pair(src1);
  1668. Int64RegPair src2Pair = m_func->FindOrCreateInt64Pair(src2);
  1669. IR::Instr* lowInstr = IR::Instr::New(lowOpCode, dstPair.low, src1Pair.low, src2Pair.low, m_func);
  1670. instr->InsertBefore(lowInstr);
  1671. LowererMD::Legalize(lowInstr);
  1672. instr->ReplaceDst(dstPair.high);
  1673. instr->ReplaceSrc1(src1Pair.high);
  1674. instr->ReplaceSrc2(src2Pair.high);
  1675. instr->m_opcode = highOpCode;
  1676. LowererMD::Legalize(instr);
  1677. break;
  1678. }
  1679. case Js::OpCode::ShrU_A:
  1680. case Js::OpCode::ShrU_I4:
  1681. instr = LowerToHelper(IR::HelperDirectMath_Int64ShrU);
  1682. break;
  1683. case Js::OpCode::Shr_A:
  1684. case Js::OpCode::Shr_I4:
  1685. instr = LowerToHelper(IR::HelperDirectMath_Int64Shr);
  1686. break;
  1687. case Js::OpCode::Shl_A:
  1688. case Js::OpCode::Shl_I4:
  1689. instr = LowerToHelper(IR::HelperDirectMath_Int64Shl);
  1690. break;
  1691. case Js::OpCode::Rol_I4:
  1692. instr = LowerToHelper(IR::HelperDirectMath_Int64Rol);
  1693. break;
  1694. case Js::OpCode::Ror_I4:
  1695. instr = LowerToHelper(IR::HelperDirectMath_Int64Ror);
  1696. break;
  1697. case Js::OpCode::InlineMathClz:
  1698. instr = LowerToHelper(IR::HelperDirectMath_Int64Clz);
  1699. break;
  1700. case Js::OpCode::Ctz:
  1701. instr = LowerToHelper(IR::HelperDirectMath_Int64Ctz);
  1702. break;
  1703. case Js::OpCode::PopCnt:
  1704. instr = LowerToHelper(IR::HelperPopCnt64);
  1705. break;
  1706. case Js::OpCode::Mul_A:
  1707. case Js::OpCode::Mul_I4:
  1708. instr = LowerToHelper(IR::HelperDirectMath_Int64Mul);
  1709. break;
  1710. case Js::OpCode::DivU_I4:
  1711. this->lowererMD->m_lowerer->LoadScriptContext(instr);
  1712. instr = LowerToHelper(IR::HelperDirectMath_Int64DivU);
  1713. break;
  1714. case Js::OpCode::Div_A:
  1715. case Js::OpCode::Div_I4:
  1716. this->lowererMD->m_lowerer->LoadScriptContext(instr);
  1717. instr = LowerToHelper(IR::HelperDirectMath_Int64DivS);
  1718. break;
  1719. case Js::OpCode::RemU_I4:
  1720. this->lowererMD->m_lowerer->LoadScriptContext(instr);
  1721. instr = LowerToHelper(IR::HelperDirectMath_Int64RemU);
  1722. break;
  1723. case Js::OpCode::Rem_A:
  1724. case Js::OpCode::Rem_I4:
  1725. this->lowererMD->m_lowerer->LoadScriptContext(instr);
  1726. instr = LowerToHelper(IR::HelperDirectMath_Int64RemS);
  1727. break;
  1728. default:
  1729. AssertMsg(UNREACHED, "Int64 opcode not supported");
  1730. }
  1731. }
  1732. void LowererMDArch::LowerInt64Branch(IR::Instr *instr)
  1733. {
  1734. AssertOrFailFast(instr->IsBranchInstr());
  1735. IR::BranchInstr* branchInstr = instr->AsBranchInstr();
  1736. Assert(branchInstr->IsConditional());
  1737. // destination label
  1738. IR::LabelInstr* jmpLabel = branchInstr->GetTarget();
  1739. // Label to use when we know the condition is false after checking only the high bits
  1740. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  1741. branchInstr->InsertAfter(doneLabel);
  1742. IR::Opnd* src1 = instr->UnlinkSrc1();
  1743. IR::Opnd* src2 = instr->GetSrc2() ? instr->UnlinkSrc2() : IR::Int64ConstOpnd::New(0, TyInt64, this->m_func);
  1744. Assert(src1 && src1->IsInt64());
  1745. Assert(src2 && src2->IsInt64());
  1746. Int64RegPair src1Pair = m_func->FindOrCreateInt64Pair(src1);
  1747. Int64RegPair src2Pair = m_func->FindOrCreateInt64Pair(src2);
  1748. const auto insertJNE = [&]()
  1749. {
  1750. IR::Instr* newInstr = IR::BranchInstr::New(Js::OpCode::JNE, doneLabel, m_func);
  1751. branchInstr->InsertBefore(newInstr);
  1752. LowererMD::Legalize(newInstr);
  1753. };
  1754. const auto cmpHighAndJump = [&](Js::OpCode jumpOp, IR::LabelInstr* label)
  1755. {
  1756. IR::Instr* newInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  1757. newInstr->SetSrc1(src1Pair.high);
  1758. newInstr->SetSrc2(src2Pair.high);
  1759. branchInstr->InsertBefore(newInstr);
  1760. LowererMD::Legalize(newInstr);
  1761. newInstr = IR::BranchInstr::New(jumpOp, label, this->m_func);
  1762. branchInstr->InsertBefore(newInstr);
  1763. LowererMD::Legalize(newInstr);
  1764. };
  1765. const auto cmpLowAndJump = [&](Js::OpCode jumpOp)
  1766. {
  1767. IR::Instr* newInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  1768. newInstr->SetSrc1(src1Pair.low);
  1769. newInstr->SetSrc2(src2Pair.low);
  1770. branchInstr->InsertBefore(newInstr);
  1771. LowererMD::Legalize(newInstr);
  1772. branchInstr->m_opcode = jumpOp;
  1773. };
  1774. const auto cmpInt64Common = [&](Js::OpCode cmpHighJmpOp, Js::OpCode cmpLowJmpOp)
  1775. {
  1776. // CMP src1.high, src2.high
  1777. // JCC target
  1778. // JNE done ;; not equal means it's inverse of JCC, do not change in case cmp opnd are swapped
  1779. // ;; Fallthrough src1.high == src2.high
  1780. // CMP src1.low, src2.low
  1781. // JCC target ;; Must do unsigned comparison on low bits
  1782. //done:
  1783. cmpHighAndJump(cmpHighJmpOp, jmpLabel);
  1784. insertJNE();
  1785. cmpLowAndJump(cmpLowJmpOp);
  1786. };
  1787. switch (instr->m_opcode)
  1788. {
  1789. case Js::OpCode::BrTrue_A:
  1790. case Js::OpCode::BrTrue_I4:
  1791. {
  1792. // For BrTrue, we only need to check the low bits
  1793. // TEST src1.low, src1.low
  1794. // JNE target
  1795. IR::Instr* newInstr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  1796. newInstr->SetSrc1(src1Pair.low);
  1797. newInstr->SetSrc2(src1Pair.low);
  1798. branchInstr->InsertBefore(newInstr);
  1799. LowererMD::Legalize(newInstr);
  1800. // If src1 is not 0, jump to destination
  1801. branchInstr->m_opcode = Js::OpCode::JNE;
  1802. // Don't need the doneLabel for this case
  1803. doneLabel->Remove();
  1804. break;
  1805. }
  1806. case Js::OpCode::BrFalse_A:
  1807. case Js::OpCode::BrFalse_I4:
  1808. {
  1809. // For BrFalse, we only need to check the low bits
  1810. // TEST src1.low, src1.low
  1811. // JNE target
  1812. IR::Instr* newInstr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  1813. newInstr->SetSrc1(src1Pair.low);
  1814. newInstr->SetSrc2(src1Pair.low);
  1815. branchInstr->InsertBefore(newInstr);
  1816. LowererMD::Legalize(newInstr);
  1817. // If src1 is 0, jump to destination
  1818. branchInstr->m_opcode = Js::OpCode::JEQ;
  1819. // Don't need the doneLabel for this case
  1820. doneLabel->Remove();
  1821. break;
  1822. }
  1823. case Js::OpCode::BrEq_A:
  1824. case Js::OpCode::BrEq_I4:
  1825. // CMP src1.high, src2.high
  1826. // JNE done
  1827. // CMP src1.low, src2.low
  1828. // JEQ target
  1829. //done:
  1830. cmpHighAndJump(Js::OpCode::JNE, doneLabel);
  1831. cmpLowAndJump(Js::OpCode::JEQ);
  1832. break;
  1833. case Js::OpCode::BrNeq_A:
  1834. case Js::OpCode::BrNeq_I4:
  1835. // CMP src1.high, src2.high
  1836. // JNE target
  1837. // CMP src1.low, src2.low
  1838. // JNE target
  1839. //done:
  1840. cmpHighAndJump(Js::OpCode::JNE, jmpLabel);
  1841. cmpLowAndJump(Js::OpCode::JNE);
  1842. // Don't need the doneLabel for this case
  1843. doneLabel->Remove();
  1844. break;
  1845. case Js::OpCode::BrUnGt_I4: cmpInt64Common(Js::OpCode::JA, Js::OpCode::JA); break;
  1846. case Js::OpCode::BrUnGe_I4: cmpInt64Common(Js::OpCode::JA, Js::OpCode::JAE); break;
  1847. case Js::OpCode::BrUnLt_I4: cmpInt64Common(Js::OpCode::JB, Js::OpCode::JB); break;
  1848. case Js::OpCode::BrUnLe_I4: cmpInt64Common(Js::OpCode::JB, Js::OpCode::JBE); break;
  1849. case Js::OpCode::BrGt_A: // Fall through
  1850. case Js::OpCode::BrGt_I4: cmpInt64Common(Js::OpCode::JGT, Js::OpCode::JA); break;
  1851. case Js::OpCode::BrGe_A: // Fall through
  1852. case Js::OpCode::BrGe_I4: cmpInt64Common(Js::OpCode::JGT, Js::OpCode::JAE); break;
  1853. case Js::OpCode::BrLt_A: // Fall through
  1854. case Js::OpCode::BrLt_I4: cmpInt64Common(Js::OpCode::JLT, Js::OpCode::JB); break;
  1855. case Js::OpCode::BrLe_A: // Fall through
  1856. case Js::OpCode::BrLe_I4: cmpInt64Common(Js::OpCode::JLT, Js::OpCode::JBE); break;
  1857. default:
  1858. AssertMsg(UNREACHED, "Int64 branch opcode not supported");
  1859. branchInstr->m_opcode = Js::OpCode::Nop;
  1860. }
  1861. }
  1862. void
  1863. LowererMDArch::EmitInt4Instr(IR::Instr *instr)
  1864. {
  1865. IR::Instr *newInstr;
  1866. IR::Opnd *src1, *src2;
  1867. IR::RegOpnd *regEDX;
  1868. switch(instr->m_opcode)
  1869. {
  1870. case Js::OpCode::Neg_I4:
  1871. instr->m_opcode = Js::OpCode::NEG;
  1872. break;
  1873. case Js::OpCode::Not_I4:
  1874. instr->m_opcode = Js::OpCode::NOT;
  1875. break;
  1876. case Js::OpCode::Add_I4:
  1877. LowererMD::ChangeToAdd(instr, false /* needFlags */);
  1878. break;
  1879. case Js::OpCode::Sub_I4:
  1880. LowererMD::ChangeToSub(instr, false /* needFlags */);
  1881. break;
  1882. case Js::OpCode::Mul_I4:
  1883. instr->m_opcode = Js::OpCode::IMUL2;
  1884. break;
  1885. case Js::OpCode::DivU_I4:
  1886. case Js::OpCode::Div_I4:
  1887. instr->SinkDst(Js::OpCode::MOV, RegEAX);
  1888. goto idiv_common;
  1889. case Js::OpCode::RemU_I4:
  1890. case Js::OpCode::Rem_I4:
  1891. instr->SinkDst(Js::OpCode::MOV, RegEDX);
  1892. idiv_common:
  1893. if (instr->GetSrc1()->IsUInt32())
  1894. {
  1895. Assert(instr->GetSrc2()->IsUInt32());
  1896. Assert(instr->m_opcode == Js::OpCode::RemU_I4 || instr->m_opcode == Js::OpCode::DivU_I4);
  1897. instr->m_opcode = Js::OpCode::DIV;
  1898. }
  1899. else
  1900. {
  1901. instr->m_opcode = Js::OpCode::IDIV;
  1902. }
  1903. instr->HoistSrc1(Js::OpCode::MOV, RegEAX);
  1904. regEDX = IR::RegOpnd::New(TyInt32, instr->m_func);
  1905. regEDX->SetReg(RegEDX);
  1906. if (instr->GetSrc1()->IsUInt32())
  1907. {
  1908. // we need to ensure that register allocator doesn't muck about with edx
  1909. instr->HoistSrc2(Js::OpCode::MOV, RegECX);
  1910. newInstr = IR::Instr::New(Js::OpCode::Ld_I4, regEDX, IR::IntConstOpnd::New(0, TyInt32, instr->m_func), instr->m_func);
  1911. instr->InsertBefore(newInstr);
  1912. LowererMD::ChangeToAssign(newInstr);
  1913. // NOP ensures that the EDX = Ld_I4 0 doesn't get deadstored, will be removed in peeps
  1914. instr->InsertBefore(IR::Instr::New(Js::OpCode::NOP, regEDX, regEDX, instr->m_func));
  1915. }
  1916. else
  1917. {
  1918. if (instr->GetSrc2()->IsImmediateOpnd())
  1919. {
  1920. instr->HoistSrc2(Js::OpCode::MOV);
  1921. }
  1922. instr->InsertBefore(IR::Instr::New(Js::OpCode::CDQ, regEDX, instr->m_func));
  1923. }
  1924. return;
  1925. case Js::OpCode::Or_I4:
  1926. instr->m_opcode = Js::OpCode::OR;
  1927. break;
  1928. case Js::OpCode::Xor_I4:
  1929. instr->m_opcode = Js::OpCode::XOR;
  1930. break;
  1931. case Js::OpCode::And_I4:
  1932. instr->m_opcode = Js::OpCode::AND;
  1933. break;
  1934. case Js::OpCode::Shl_I4:
  1935. case Js::OpCode::ShrU_I4:
  1936. case Js::OpCode::Shr_I4:
  1937. case Js::OpCode::Rol_I4:
  1938. case Js::OpCode::Ror_I4:
  1939. LowererMD::ChangeToShift(instr, false /* needFlags */);
  1940. break;
  1941. case Js::OpCode::BrTrue_I4:
  1942. instr->m_opcode = Js::OpCode::JNE;
  1943. goto br1_Common;
  1944. case Js::OpCode::BrFalse_I4:
  1945. instr->m_opcode = Js::OpCode::JEQ;
  1946. br1_Common:
  1947. src1 = instr->UnlinkSrc1();
  1948. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  1949. instr->InsertBefore(newInstr);
  1950. newInstr->SetSrc1(src1);
  1951. newInstr->SetSrc2(src1);
  1952. return;
  1953. case Js::OpCode::BrEq_I4:
  1954. instr->m_opcode = Js::OpCode::JEQ;
  1955. goto br2_Common;
  1956. case Js::OpCode::BrNeq_I4:
  1957. instr->m_opcode = Js::OpCode::JNE;
  1958. goto br2_Common;
  1959. case Js::OpCode::BrUnGt_I4:
  1960. instr->m_opcode = Js::OpCode::JA;
  1961. goto br2_Common;
  1962. case Js::OpCode::BrUnGe_I4:
  1963. instr->m_opcode = Js::OpCode::JAE;
  1964. goto br2_Common;
  1965. case Js::OpCode::BrUnLe_I4:
  1966. instr->m_opcode = Js::OpCode::JBE;
  1967. goto br2_Common;
  1968. case Js::OpCode::BrUnLt_I4:
  1969. instr->m_opcode = Js::OpCode::JB;
  1970. goto br2_Common;
  1971. case Js::OpCode::BrGt_I4:
  1972. instr->m_opcode = Js::OpCode::JGT;
  1973. goto br2_Common;
  1974. case Js::OpCode::BrGe_I4:
  1975. instr->m_opcode = Js::OpCode::JGE;
  1976. goto br2_Common;
  1977. case Js::OpCode::BrLe_I4:
  1978. instr->m_opcode = Js::OpCode::JLE;
  1979. goto br2_Common;
  1980. case Js::OpCode::BrLt_I4:
  1981. instr->m_opcode = Js::OpCode::JLT;
  1982. br2_Common:
  1983. src1 = instr->UnlinkSrc1();
  1984. src2 = instr->UnlinkSrc2();
  1985. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  1986. instr->InsertBefore(newInstr);
  1987. newInstr->SetSrc1(src1);
  1988. newInstr->SetSrc2(src2);
  1989. return;
  1990. default:
  1991. AssertMsg(UNREACHED, "Un-implemented int4 opcode");
  1992. }
  1993. LowererMD::Legalize(instr);
  1994. }
  1995. void
  1996. LowererMDArch::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  1997. {
  1998. // s2 = MOV src1
  1999. // s2 = SHL s2, Js::VarTag_Shift -- restore the var tag on the result
  2000. // JO $ToVar
  2001. // JB $ToVar [isFromUint32]
  2002. // s2 = INC s2
  2003. // dst = MOV s2
  2004. // JMP $done
  2005. //$ToVar:
  2006. // EmitLoadVarNoCheck
  2007. //$Done:
  2008. AssertMsg(instrLoad->GetSrc1()->IsRegOpnd(), "Should be regOpnd");
  2009. bool isInt = false;
  2010. bool isNotInt = false;
  2011. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2012. IR::LabelInstr *labelToVar = nullptr;
  2013. IR::LabelInstr *labelDone = nullptr;
  2014. IR::Instr *instr;
  2015. if (src1->IsTaggedInt())
  2016. {
  2017. isInt = true;
  2018. }
  2019. else if (src1->IsNotInt())
  2020. {
  2021. isNotInt = true;
  2022. }
  2023. if (!isNotInt)
  2024. {
  2025. // s2 = MOV s1
  2026. IR::Opnd * opnd32src1 = src1->UseWithNewType(TyInt32, this->m_func);
  2027. IR::RegOpnd * opndReg2 = IR::RegOpnd::New(TyMachReg, this->m_func);
  2028. IR::Opnd * opnd32Reg2 = opndReg2->UseWithNewType(TyInt32, this->m_func);
  2029. instr = IR::Instr::New(Js::OpCode::MOV, opnd32Reg2, opnd32src1, this->m_func);
  2030. instrLoad->InsertBefore(instr);
  2031. // s2 = SHL s2, Js::VarTag_Shift -- restore the var tag on the result
  2032. instr = IR::Instr::New(Js::OpCode::SHL, opnd32Reg2, opnd32Reg2,
  2033. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func),
  2034. this->m_func);
  2035. instrLoad->InsertBefore(instr);
  2036. if (!isInt)
  2037. {
  2038. // JO $ToVar
  2039. labelToVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2040. instr = IR::BranchInstr::New(Js::OpCode::JO, labelToVar, this->m_func);
  2041. instrLoad->InsertBefore(instr);
  2042. if (isFromUint32)
  2043. {
  2044. // JB $ToVar [isFromUint32]
  2045. instr = IR::BranchInstr::New(Js::OpCode::JB, labelToVar, this->m_func);
  2046. instrLoad->InsertBefore(instr);
  2047. }
  2048. }
  2049. // s2 = INC s2
  2050. instr = IR::Instr::New(Js::OpCode::INC, opndReg2, opndReg2, this->m_func);
  2051. instrLoad->InsertBefore(instr);
  2052. // dst = MOV s2
  2053. instr = IR::Instr::New(Js::OpCode::MOV, instrLoad->GetDst(), opndReg2, this->m_func);
  2054. instrLoad->InsertBefore(instr);
  2055. if (!isInt)
  2056. {
  2057. // JMP $done
  2058. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  2059. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  2060. instrLoad->InsertBefore(instr);
  2061. }
  2062. }
  2063. if (!isInt)
  2064. {
  2065. //$ToVar:
  2066. if (labelToVar)
  2067. {
  2068. instrLoad->InsertBefore(labelToVar);
  2069. }
  2070. this->lowererMD->EmitLoadVarNoCheck(instrLoad->GetDst()->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper || labelToVar != nullptr);
  2071. }
  2072. //$Done:
  2073. if (labelDone)
  2074. {
  2075. instrLoad->InsertAfter(labelDone);
  2076. }
  2077. instrLoad->Remove();
  2078. }
  2079. void
  2080. LowererMDArch::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2081. {
  2082. // We should only generate this if sse2 is available
  2083. Assert(AutoSystemInfo::Data.SSE2Available());
  2084. Assert(dst->IsRegOpnd() && dst->IsFloat());
  2085. Assert(src->IsRegOpnd() && (src->GetType() == TyInt32 || src->GetType() == TyUint32));
  2086. instrInsert->InsertBefore(IR::Instr::New(dst->IsFloat64() ? Js::OpCode::CVTSI2SD : Js::OpCode::CVTSI2SS, dst, src, this->m_func));
  2087. }
  2088. void
  2089. LowererMDArch::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2090. {
  2091. // We should only generate this if sse2 is available
  2092. Assert(AutoSystemInfo::Data.SSE2Available());
  2093. IR::Opnd* origDst = nullptr;
  2094. if (dst->IsFloat32())
  2095. {
  2096. origDst = dst;
  2097. dst = IR::RegOpnd::New(TyFloat64, this->m_func);
  2098. }
  2099. this->lowererMD->EmitIntToFloat(dst, src, instrInsert);
  2100. IR::RegOpnd * highestBitOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  2101. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, highestBitOpnd, src, this->m_func);
  2102. instrInsert->InsertBefore(instr);
  2103. instr = IR::Instr::New(Js::OpCode::SHR, highestBitOpnd, highestBitOpnd,
  2104. IR::IntConstOpnd::New(31, TyInt8, this->m_func, true), this->m_func);
  2105. instrInsert->InsertBefore(instr);
  2106. // TODO: Encode indir with base as address opnd instead
  2107. IR::RegOpnd * baseOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2108. instr = IR::Instr::New(Js::OpCode::MOV, baseOpnd, IR::AddrOpnd::New(m_func->GetThreadContextInfo()->GetUIntConvertConstAddr(),
  2109. IR::AddrOpndKindDynamicMisc, this->m_func), this->m_func);
  2110. instrInsert->InsertBefore(instr);
  2111. instr = IR::Instr::New(Js::OpCode::ADDSD, dst, dst, IR::IndirOpnd::New(baseOpnd,
  2112. highestBitOpnd, IndirScale8, TyFloat64, this->m_func), this->m_func);
  2113. instrInsert->InsertBefore(instr);
  2114. if (origDst)
  2115. {
  2116. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTSD2SS, origDst, dst, this->m_func));
  2117. }
  2118. }
  2119. void
  2120. LowererMDArch::EmitIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2121. {
  2122. Assert(dst->IsRegOpnd() && dst->IsInt64());
  2123. Assert(src->IsInt32());
  2124. Func* func = instrInsert->m_func;
  2125. Int64RegPair dstPair = func->FindOrCreateInt64Pair(dst);
  2126. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyMachPtr, func);
  2127. regEAX->SetReg(RegEAX);
  2128. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV, regEAX, src, func));
  2129. IR::RegOpnd *regEDX = IR::RegOpnd::New(TyMachPtr, func);
  2130. regEDX->SetReg(RegEDX);
  2131. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CDQ, regEDX, func));
  2132. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dstPair.low, regEAX, func));
  2133. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dstPair.high, regEDX, func));
  2134. }
  2135. void
  2136. LowererMDArch::EmitUIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2137. {
  2138. Assert(dst->IsRegOpnd() && dst->IsInt64());
  2139. Assert(src->IsUInt32());
  2140. Func* func = instrInsert->m_func;
  2141. Int64RegPair dstPair = func->FindOrCreateInt64Pair(dst);
  2142. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dstPair.high, IR::IntConstOpnd::New(0, TyInt32, func), func));
  2143. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dstPair.low, src, func));
  2144. }
  2145. void
  2146. LowererMDArch::EmitLongToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2147. {
  2148. Assert(dst->IsRegOpnd() && dst->IsInt32());
  2149. Assert(src->IsInt64());
  2150. Func* func = instrInsert->m_func;
  2151. Int64RegPair srcPair = func->FindOrCreateInt64Pair(src);
  2152. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, srcPair.low, func));
  2153. }
  2154. bool
  2155. LowererMDArch::EmitLoadInt32(IR::Instr *instrLoad, bool conversionFromObjectAllowed, bool bailOutOnHelper, IR::LabelInstr * labelBailOut)
  2156. {
  2157. // if(doShiftFirst)
  2158. // {
  2159. // r1 = MOV src1
  2160. // r1 = SAR r1, VarTag_Shift (move last-shifted bit into CF)
  2161. // JAE (CF == 0) $helper or $float
  2162. // }
  2163. // else
  2164. // {
  2165. // TEST src1, AtomTag
  2166. // JEQ $helper or $float
  2167. // r1 = MOV src1
  2168. // r1 = SAR r1, VarTag_Shift
  2169. // }
  2170. // dst = MOV r1
  2171. // JMP $Done
  2172. // $float:
  2173. // dst = ConvertToFloat(src1, $helper)
  2174. // $Helper
  2175. // dst = ToInt32(src1)
  2176. // $Done
  2177. AssertMsg(instrLoad->GetSrc1()->IsRegOpnd(), "Should be regOpnd");
  2178. bool isInt = false;
  2179. bool isNotInt = false;
  2180. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2181. IR::LabelInstr *labelHelper = nullptr;
  2182. IR::LabelInstr *labelDone = nullptr;
  2183. IR::LabelInstr* labelFloat = nullptr;
  2184. IR::Instr *instr;
  2185. if (src1->IsTaggedInt())
  2186. {
  2187. isInt = true;
  2188. }
  2189. else if (src1->IsNotInt())
  2190. {
  2191. isNotInt = true;
  2192. }
  2193. const ValueType src1ValueType(src1->GetValueType());
  2194. const bool doShiftFirst = src1ValueType.IsLikelyTaggedInt(); // faster to shift and check flags if it's likely tagged
  2195. const bool doFloatToIntFastPath =
  2196. (src1ValueType.IsLikelyFloat() || src1ValueType.IsLikelyUntaggedInt()) &&
  2197. !(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger)) &&
  2198. AutoSystemInfo::Data.SSE2Available();
  2199. IR::RegOpnd * r1 = nullptr;
  2200. if(doShiftFirst)
  2201. {
  2202. // r1 = MOV src1
  2203. r1 = IR::RegOpnd::New(TyVar, instrLoad->m_func);
  2204. r1->SetValueType(src1->GetValueType());
  2205. instr = IR::Instr::New(Js::OpCode::MOV, r1, src1, instrLoad->m_func);
  2206. instrLoad->InsertBefore(instr);
  2207. }
  2208. if (isNotInt)
  2209. {
  2210. // Known to be non-integer. If we are required to bail out on helper call, just re-jit.
  2211. if (!doFloatToIntFastPath && bailOutOnHelper)
  2212. {
  2213. if(!GlobOpt::DoEliminateArrayAccessHelperCall(this->m_func))
  2214. {
  2215. // Array access helper call removal is already off for some reason. Prevent trying to rejit again
  2216. // because it won't help and the same thing will happen again. Just abort jitting this function.
  2217. if(PHASE_TRACE(Js::BailOutPhase, this->m_func))
  2218. {
  2219. Output::Print(_u(" Aborting JIT because EliminateArrayAccessHelperCall is already off\n"));
  2220. Output::Flush();
  2221. }
  2222. throw Js::OperationAbortedException();
  2223. }
  2224. throw Js::RejitException(RejitReason::ArrayAccessHelperCallEliminationDisabled);
  2225. }
  2226. }
  2227. else
  2228. {
  2229. // It could be an integer in this case
  2230. if(doShiftFirst)
  2231. {
  2232. // r1 = SAR r1, VarTag_Shift (move last-shifted bit into CF)
  2233. Assert(r1);
  2234. instr = IR::Instr::New(Js::OpCode::SAR, r1, r1,
  2235. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrLoad->m_func), instrLoad->m_func);
  2236. instrLoad->InsertBefore(instr);
  2237. }
  2238. // We do not know for sure it is an integer - add a Smint test
  2239. if (!isInt)
  2240. {
  2241. if(doFloatToIntFastPath)
  2242. {
  2243. labelFloat = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  2244. }
  2245. else
  2246. {
  2247. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2248. }
  2249. if(doShiftFirst)
  2250. {
  2251. // JAE (CF == 0) $helper or $float
  2252. instrLoad->InsertBefore(
  2253. IR::BranchInstr::New(Js::OpCode::JAE, labelFloat ? labelFloat : labelHelper, this->m_func));
  2254. }
  2255. else
  2256. {
  2257. // TEST src1, AtomTag
  2258. // JEQ $helper or $float
  2259. this->lowererMD->GenerateSmIntTest(src1, instrLoad, labelFloat ? labelFloat : labelHelper);
  2260. }
  2261. }
  2262. if(!doShiftFirst)
  2263. {
  2264. if(src1->IsEqual(instrLoad->GetDst()))
  2265. {
  2266. // Go ahead and change src1, since it was already confirmed that we won't bail out or go to helper where src1
  2267. // may be used
  2268. r1 = src1;
  2269. }
  2270. else
  2271. {
  2272. // r1 = MOV src1
  2273. Assert(!r1);
  2274. r1 = IR::RegOpnd::New(TyVar, instrLoad->m_func);
  2275. r1->SetValueType(src1->GetValueType());
  2276. instr = IR::Instr::New(Js::OpCode::MOV, r1, src1, instrLoad->m_func);
  2277. instrLoad->InsertBefore(instr);
  2278. }
  2279. // r1 = SAR r1, VarTag_Shift
  2280. Assert(r1);
  2281. instr = IR::Instr::New(Js::OpCode::SAR, r1, r1,
  2282. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrLoad->m_func), instrLoad->m_func);
  2283. instrLoad->InsertBefore(instr);
  2284. }
  2285. // dst = MOV r1
  2286. Assert(r1);
  2287. instr = IR::Instr::New(Js::OpCode::MOV, instrLoad->GetDst(), r1, instrLoad->m_func);
  2288. instrLoad->InsertBefore(instr);
  2289. if (!isInt)
  2290. {
  2291. // JMP $Done
  2292. labelDone = instrLoad->GetOrCreateContinueLabel();
  2293. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  2294. instrLoad->InsertBefore(instr);
  2295. }
  2296. }
  2297. // if it is not an int - we need to convert.
  2298. if (!isInt)
  2299. {
  2300. if(doFloatToIntFastPath)
  2301. {
  2302. if(labelFloat)
  2303. {
  2304. instrLoad->InsertBefore(labelFloat);
  2305. }
  2306. if(!labelHelper)
  2307. {
  2308. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2309. }
  2310. if(!labelDone)
  2311. {
  2312. labelDone = instrLoad->GetOrCreateContinueLabel();
  2313. }
  2314. this->lowererMD->GenerateFloatTest(src1, instrLoad, labelHelper, instrLoad->HasBailOutInfo());
  2315. IR::Opnd* floatOpnd = IR::IndirOpnd::New(src1, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  2316. this->lowererMD->ConvertFloatToInt32(instrLoad->GetDst(), floatOpnd, labelHelper, labelDone, instrLoad);
  2317. }
  2318. // $Helper
  2319. // dst = ToInt32(r1)
  2320. // $Done
  2321. if (labelHelper)
  2322. {
  2323. instrLoad->InsertBefore(labelHelper);
  2324. }
  2325. if(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger))
  2326. {
  2327. // Avoid bailout if we have a JavascriptNumber whose value is a signed 32-bit integer
  2328. lowererMD->m_lowerer->LoadInt32FromUntaggedVar(instrLoad);
  2329. // Need to bail out instead of calling a helper
  2330. return true;
  2331. }
  2332. if (bailOutOnHelper)
  2333. {
  2334. Assert(labelBailOut);
  2335. lowererMD->m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrLoad);
  2336. instrLoad->Remove();
  2337. }
  2338. else if (conversionFromObjectAllowed)
  2339. {
  2340. lowererMD->m_lowerer->LowerUnaryHelperMem(instrLoad, IR::HelperConv_ToInt32);
  2341. }
  2342. else
  2343. {
  2344. lowererMD->m_lowerer->LowerUnaryHelperMemWithBoolReference(instrLoad, IR::HelperConv_ToInt32_NoObjects, true /*useBoolForBailout*/);
  2345. }
  2346. }
  2347. else
  2348. {
  2349. instrLoad->Remove();
  2350. }
  2351. return false;
  2352. }
  2353. IR::Instr *
  2354. LowererMDArch::LoadCheckedFloat(
  2355. IR::RegOpnd *opndOrig,
  2356. IR::RegOpnd *opndFloat,
  2357. IR::LabelInstr *labelInline,
  2358. IR::LabelInstr *labelHelper,
  2359. IR::Instr *instrInsert,
  2360. const bool checkForNullInLoopBody)
  2361. {
  2362. // Load one floating-point var into an XMM register, inserting checks to see if it's really a float:
  2363. // TEST src, 1
  2364. // JNE $non-int
  2365. // t0 = MOV src // convert a tagged int to float
  2366. // t0 = SAR t0, 1
  2367. // flt = CVTSI2SD t0
  2368. // JMP $labelInline
  2369. // $non-int
  2370. // CMP [src], JavascriptNumber::`vtable'
  2371. // JNE $labelHelper
  2372. // flt = MOVSD [t0 + offset(value)]
  2373. IR::Opnd * opnd;
  2374. IR::Instr * instr;
  2375. IR::Instr * instrFirst = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2376. instrFirst->SetSrc1(opndOrig);
  2377. instrFirst->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func));
  2378. instrInsert->InsertBefore(instrFirst);
  2379. IR::LabelInstr * labelVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2380. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelVar, this->m_func);
  2381. instrInsert->InsertBefore(instr);
  2382. if (opndOrig->GetValueType().IsLikelyFloat())
  2383. {
  2384. // Make this path helper if value is likely a float
  2385. instrInsert->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true));
  2386. }
  2387. opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  2388. instr = IR::Instr::New(Js::OpCode::MOV, opnd, opndOrig, this->m_func);
  2389. instrInsert->InsertBefore(instr);
  2390. instr = IR::Instr::New(
  2391. Js::OpCode::SAR, opnd, opnd, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  2392. instrInsert->InsertBefore(instr);
  2393. instr = IR::Instr::New(Js::OpCode::CVTSI2SD, opndFloat, opnd, this->m_func);
  2394. instrInsert->InsertBefore(instr);
  2395. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelInline, this->m_func);
  2396. instrInsert->InsertBefore(instr);
  2397. instrInsert->InsertBefore(labelVar);
  2398. lowererMD->GenerateFloatTest(opndOrig, instrInsert, labelHelper, checkForNullInLoopBody);
  2399. opnd = IR::IndirOpnd::New(opndOrig, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  2400. instr = IR::Instr::New(Js::OpCode::MOVSD, opndFloat, opnd, this->m_func);
  2401. instrInsert->InsertBefore(instr);
  2402. return instrFirst;
  2403. }
  2404. IR::LabelInstr *
  2405. LowererMDArch::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  2406. {
  2407. IR::Instr * exitPrevInstr = exitTargetInstr->m_prev;
  2408. // On x86 we push and pop the out param area, but the start call can be moved passed the bailout instruction
  2409. // which we don't keep track of. There isn't a flow based pass after lowerer,
  2410. // So we don't know how much stack we need to pop. Instead, generate a landing area to restore the stack
  2411. // Via EBP, the prolog/epilog phase will fix up the size from EBP we need to restore to ESP before the epilog
  2412. if (bailOutInfo->startCallCount != 0)
  2413. {
  2414. if (this->bailOutStackRestoreLabel == nullptr)
  2415. {
  2416. if (exitPrevInstr->HasFallThrough())
  2417. {
  2418. // Branch around the stack reload
  2419. IR::BranchInstr * branchToExit = IR::BranchInstr::New(Js::OpCode::JMP, exitTargetInstr, this->m_func);
  2420. exitPrevInstr->InsertAfter(branchToExit);
  2421. exitPrevInstr = branchToExit;
  2422. }
  2423. this->bailOutStackRestoreLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2424. IR::RegOpnd * ebpOpnd = IR::RegOpnd::New(nullptr, RegEBP, TyMachReg, this->m_func);
  2425. IR::RegOpnd * espOpnd = IR::RegOpnd::New(nullptr, RegESP, TyMachReg, this->m_func);
  2426. // -4 for now, fix up in prolog/epilog phase
  2427. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(ebpOpnd, (size_t)-4, TyMachReg, this->m_func);
  2428. // Lower this after register allocation, once we know the frame size.
  2429. IR::Instr *bailOutStackRestoreInstr = IR::Instr::New(Js::OpCode::BailOutStackRestore, espOpnd, indirOpnd, this->m_func);
  2430. exitPrevInstr->InsertAfter(bailOutStackRestoreInstr);
  2431. exitPrevInstr->InsertAfter(this->bailOutStackRestoreLabel);
  2432. }
  2433. // Jump to the stack restore label instead
  2434. exitTargetInstr = this->bailOutStackRestoreLabel;
  2435. }
  2436. return exitTargetInstr;
  2437. }
  2438. ///----------------------------------------------------------------------------
  2439. ///
  2440. /// LowererMDArch::GenerateFastShiftLeft
  2441. ///
  2442. ///----------------------------------------------------------------------------
  2443. bool
  2444. LowererMDArch::GenerateFastShiftLeft(IR::Instr * instrShift)
  2445. {
  2446. // Given:
  2447. //
  2448. // dst = Shl src1, src2
  2449. //
  2450. // Generate:
  2451. //
  2452. // (If not 2 Int31's, jump to $helper.)
  2453. // s1 = MOV src1
  2454. // s1 = SAR s1, Js::VarTag_Shift -- Remove the var tag from the value to be shifted
  2455. // s2 = MOV src2
  2456. // s2 = SAR s2, Js::VarTag_Shift -- extract the real shift amount from the var
  2457. // s1 = SHL s1, s2 -- do the inline shift
  2458. // s3 = MOV s1
  2459. // s3 = SHL s3, Js::VarTag_Shift -- restore the var tag on the result
  2460. // JO $ToVar
  2461. // s3 = INC s3
  2462. // dst = MOV s3
  2463. // JMP $fallthru
  2464. //$ToVar:
  2465. // PUSH scriptContext
  2466. // PUSH s1
  2467. // dst = ToVar()
  2468. // JMP $fallthru
  2469. // $helper:
  2470. // (caller generates helper call)
  2471. // $fallthru:
  2472. IR::LabelInstr * labelHelper = nullptr;
  2473. IR::LabelInstr * labelFallThru;
  2474. IR::Instr * instr;
  2475. IR::RegOpnd * opndReg1;
  2476. IR::RegOpnd * opndReg2;
  2477. IR::Opnd * opndSrc1;
  2478. IR::Opnd * opndSrc2;
  2479. opndSrc1 = instrShift->GetSrc1();
  2480. opndSrc2 = instrShift->GetSrc2();
  2481. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Shl instruction");
  2482. // Not tagged ints?
  2483. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2484. {
  2485. return true;
  2486. }
  2487. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2488. {
  2489. return true;
  2490. }
  2491. // Tagged ints?
  2492. bool isTaggedInts = false;
  2493. bool src2IsIntConst = false;
  2494. IntConstType s2Value = 0;
  2495. if (opndSrc2->IsRegOpnd())
  2496. {
  2497. if (opndSrc2->AsRegOpnd()->IsTaggedInt())
  2498. {
  2499. if (opndSrc2->AsRegOpnd()->m_sym->IsTaggableIntConst())
  2500. {
  2501. src2IsIntConst = true;
  2502. s2Value = opndSrc2->AsRegOpnd()->m_sym->GetIntConstValue();
  2503. s2Value = (s2Value & 0x1F);
  2504. }
  2505. if (opndSrc1->IsTaggedInt())
  2506. {
  2507. isTaggedInts = true;
  2508. }
  2509. }
  2510. }
  2511. else
  2512. {
  2513. AssertMsg(opndSrc2->IsAddrOpnd() && Js::TaggedInt::Is(opndSrc2->AsAddrOpnd()->m_address),
  2514. "Expect src2 of shift right to be reg or Var.");
  2515. src2IsIntConst = true;
  2516. s2Value = Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address);
  2517. s2Value = (s2Value & 0x1F);
  2518. if (opndSrc1->IsTaggedInt())
  2519. {
  2520. isTaggedInts = true;
  2521. }
  2522. }
  2523. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2524. if (!isTaggedInts)
  2525. {
  2526. // (If not 2 Int31's, jump to $helper.)
  2527. lowererMD->GenerateSmIntPairTest(instrShift, opndSrc1, opndSrc2, labelHelper);
  2528. }
  2529. // s1 = MOV src1
  2530. opndReg1 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2531. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc1, instrShift->m_func);
  2532. instrShift->InsertBefore(instr);
  2533. // s1 = SAR s1, Js::VarTag_Shift
  2534. //
  2535. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2536. //
  2537. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2538. instr = IR::Instr::New(
  2539. Js::OpCode::SAR, opnd32Reg1, opnd32Reg1,
  2540. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2541. instrShift->InsertBefore(instr);
  2542. IR::Opnd *countOpnd;
  2543. if (src2IsIntConst)
  2544. {
  2545. countOpnd = IR::IntConstOpnd::New(s2Value, TyMachReg, instrShift->m_func);
  2546. }
  2547. else
  2548. {
  2549. // s2 = MOV src2
  2550. opndReg2 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2551. // Shift count needs to be in ECX
  2552. opndReg2->SetReg(this->GetRegShiftCount());
  2553. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opndSrc2, instrShift->m_func);
  2554. instrShift->InsertBefore(instr);
  2555. // s2 = SAR s2, Js::VarTag_Shift
  2556. //
  2557. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2558. //
  2559. IR::Opnd * opnd32Reg2 = opndReg2->UseWithNewType(TyInt32, instrShift->m_func);
  2560. instr = IR::Instr::New(
  2561. Js::OpCode::SAR, opnd32Reg2, opnd32Reg2,
  2562. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2563. instrShift->InsertBefore(instr);
  2564. countOpnd = opndReg2;
  2565. }
  2566. // s1 = SHL s1, s2
  2567. //
  2568. // Ecmascript spec says we only need mask the shift amount by 0x1F. But intel uses 0x3F
  2569. // for 64 operands. Hence using 32 bits. opnd32Reg1 is already refined. reusing that.
  2570. //
  2571. instr = IR::Instr::New(Js::OpCode::SHL, opnd32Reg1, opnd32Reg1, countOpnd, instrShift->m_func);
  2572. instrShift->InsertBefore(instr);
  2573. // s3 = MOV s1
  2574. IR::RegOpnd * opndReg3 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2575. IR::Opnd * opnd32Reg3 = opndReg3->UseWithNewType(TyInt32, instrShift->m_func);
  2576. instr = IR::Instr::New(Js::OpCode::MOV, opnd32Reg3, opnd32Reg1, instrShift->m_func);
  2577. instrShift->InsertBefore(instr);
  2578. // s3 = SHL s3, Js::VarTag_Shift -- restore the var tag on the result
  2579. //
  2580. // Ecmascript spec says we only need mask the shift amount by 0x1F. But intel uses 0x3F
  2581. // for 64 operands. Hence using 32 bits. opnd32Reg1 is already refined. reusing that.
  2582. //
  2583. instr = IR::Instr::New(
  2584. Js::OpCode::SHL, opnd32Reg3, opnd32Reg3,
  2585. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2586. instrShift->InsertBefore(instr);
  2587. // JO $ToVar
  2588. IR::LabelInstr *labelToVar = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2589. instr = IR::BranchInstr::New(Js::OpCode::JO, labelToVar, instrShift->m_func);
  2590. instrShift->InsertBefore(instr);
  2591. // s3 = INC s3
  2592. instr = IR::Instr::New(Js::OpCode::INC, opndReg3, opndReg3, instrShift->m_func);
  2593. instrShift->InsertBefore(instr);
  2594. // dst = MOV s3
  2595. instr = IR::Instr::New(Js::OpCode::MOV, instrShift->GetDst(), opndReg3, instrShift->m_func);
  2596. instrShift->InsertBefore(instr);
  2597. // JMP $fallthru
  2598. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func);
  2599. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrShift->m_func);
  2600. instrShift->InsertBefore(instr);
  2601. //$ToVar:
  2602. instrShift->InsertBefore(labelToVar);
  2603. IR::JnHelperMethod helperMethod;
  2604. IR::Opnd *dst;
  2605. dst = instrShift->GetDst();
  2606. if (instrShift->dstIsTempNumber)
  2607. {
  2608. IR::Opnd *tempOpnd;
  2609. helperMethod = IR::HelperOp_Int32ToAtomInPlace;
  2610. Assert(dst->IsRegOpnd());
  2611. StackSym * tempNumberSym = lowererMD->GetLowerer()->GetTempNumberSym(dst, instrShift->dstIsTempNumberTransferred);
  2612. IR::Instr *load = lowererMD->m_lowerer->InsertLoadStackAddress(tempNumberSym, instrShift);
  2613. tempOpnd = load->GetDst();
  2614. this->LoadHelperArgument(instrShift, tempOpnd);
  2615. }
  2616. else
  2617. {
  2618. helperMethod = IR::HelperOp_Int32ToAtom;
  2619. }
  2620. // PUSH scriptContext
  2621. this->lowererMD->m_lowerer->LoadScriptContext(instrShift);
  2622. // PUSH s1
  2623. this->LoadHelperArgument(instrShift, opndReg1);
  2624. // dst = ToVar()
  2625. instr = IR::Instr::New(Js::OpCode::Call, dst,
  2626. IR::HelperCallOpnd::New(helperMethod, instrShift->m_func), instrShift->m_func);
  2627. instrShift->InsertBefore(instr);
  2628. this->LowerCall(instr, 0);
  2629. // JMP $fallthru
  2630. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrShift->m_func);
  2631. instrShift->InsertBefore(instr);
  2632. // $helper:
  2633. // (caller generates helper call)
  2634. // $fallthru:
  2635. instrShift->InsertBefore(labelHelper);
  2636. instrShift->InsertAfter(labelFallThru);
  2637. return true;
  2638. }
  2639. ///----------------------------------------------------------------------------
  2640. ///
  2641. /// LowererMDArch::GenerateFastShiftRight
  2642. ///
  2643. ///----------------------------------------------------------------------------
  2644. bool
  2645. LowererMDArch::GenerateFastShiftRight(IR::Instr * instrShift)
  2646. {
  2647. // Given:
  2648. //
  2649. // dst = Shr/Sar src1, src2
  2650. //
  2651. // Generate:
  2652. //
  2653. // s1 = MOV src1
  2654. // TEST s1, 1
  2655. // JEQ $S1ToInt
  2656. // s1 = SAR s1, VarTag_Shift -- extract the real shift amount from the var
  2657. // JMP $src2
  2658. //$S1ToInt:
  2659. // PUSH scriptContext
  2660. // PUSH s1
  2661. // s1 = ToInt32()/ToUInt32
  2662. //$src2:
  2663. // Load s2
  2664. // TEST s2, 1
  2665. // JEQ $S2ToUInt
  2666. // s2 = SAR s2, VarTag_Shift -- extract the real shift amount from the var
  2667. // JMP $Shr
  2668. //$S2ToUInt:
  2669. // PUSH scriptContext
  2670. // PUSH s2
  2671. // s2 = ToUInt32()
  2672. //$Shr:
  2673. // s1 = SHR/SAR s1, s2 -- do the inline shift
  2674. // s3 = MOV s1
  2675. //ECX = MOV s2
  2676. // s3 = SHL s3, ECX -- To tagInt
  2677. // JO $ToVar
  2678. // JS $ToVar
  2679. // s3 = INC s3
  2680. // JMP $done
  2681. //$ToVar:
  2682. // EmitLoadVarNoCheck
  2683. //$Done:
  2684. // dst = MOV s3
  2685. IR::LabelInstr * labelS1ToInt = nullptr;
  2686. IR::LabelInstr * labelSrc2 = nullptr;
  2687. IR::LabelInstr * labelS2ToUInt = nullptr;
  2688. IR::LabelInstr * labelShr = nullptr;
  2689. IR::LabelInstr * labelToVar = nullptr;
  2690. IR::LabelInstr * labelDone = nullptr;
  2691. IR::Instr * instr;
  2692. IR::RegOpnd * opndReg1;
  2693. IR::RegOpnd * opndReg2;
  2694. IR::Opnd * opndSrc1;
  2695. IR::Opnd * opndSrc2;
  2696. bool src1IsInt = false;
  2697. bool src1IsNotInt = false;
  2698. bool src2IsInt = false;
  2699. bool src2IsIntConst = false;
  2700. bool src2IsNotInt = false;
  2701. bool resultIsTaggedInt = false;
  2702. bool isUnsignedShift = (instrShift->m_opcode == Js::OpCode::ShrU_A);
  2703. opndSrc1 = instrShift->UnlinkSrc1();
  2704. opndSrc2 = instrShift->UnlinkSrc2();
  2705. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Shl instruction");
  2706. if (instrShift->HasBailOutInfo())
  2707. {
  2708. IR::Instr * bailOutInstr = this->lowererMD->m_lowerer->SplitBailOnImplicitCall(instrShift);
  2709. this->lowererMD->m_lowerer->LowerBailOnEqualOrNotEqual(bailOutInstr);
  2710. }
  2711. AssertMsg(opndSrc1->IsRegOpnd(), "We expect this to be a regOpnd");
  2712. opndReg1 = opndSrc1->AsRegOpnd();
  2713. src1IsInt = opndReg1->IsTaggedInt();
  2714. if (src1IsInt && !isUnsignedShift)
  2715. {
  2716. // -1 >>> 0 != taggedInt...
  2717. resultIsTaggedInt = true;
  2718. }
  2719. src1IsNotInt = opndReg1->IsNotInt();
  2720. // s1 = MOV src1
  2721. opndReg1 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2722. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc1, instrShift->m_func);
  2723. instrShift->InsertBefore(instr);
  2724. IR::Opnd *dst = instrShift->GetDst();
  2725. AssertMsg(dst->IsRegOpnd(), "We expect this to be a regOpnd");
  2726. IntConstType s2Value = 0;
  2727. if (opndSrc2->IsRegOpnd())
  2728. {
  2729. opndReg2 = opndSrc2->AsRegOpnd();
  2730. src2IsInt = opndReg2->IsTaggedInt();
  2731. src2IsIntConst = opndReg2->m_sym->IsTaggableIntConst();
  2732. src2IsNotInt = opndReg2->IsNotInt();
  2733. }
  2734. else
  2735. {
  2736. AssertMsg(opndSrc2->IsAddrOpnd() && Js::TaggedInt::Is(opndSrc2->AsAddrOpnd()->m_address),
  2737. "Expect src2 of shift right to be reg or Var.");
  2738. src2IsInt = src2IsIntConst = true;
  2739. opndReg2 = nullptr;
  2740. }
  2741. if (isUnsignedShift)
  2742. {
  2743. // We use the src2IsIntConst to combine the tag shifting with the actual shift.
  2744. // The tag shift however needs to be a signed shift...
  2745. src2IsIntConst = false;
  2746. if (opndSrc2->IsAddrOpnd())
  2747. {
  2748. instr = Lowerer::InsertMove(
  2749. IR::RegOpnd::New(opndSrc2->GetType(), instrShift->m_func),
  2750. opndSrc2, instrShift);
  2751. opndSrc2 = instr->GetDst();
  2752. opndReg2 = opndSrc2->AsRegOpnd();
  2753. }
  2754. }
  2755. if (src2IsIntConst)
  2756. {
  2757. if (opndSrc2->IsRegOpnd())
  2758. {
  2759. AnalysisAssert(opndReg2);
  2760. s2Value = opndReg2->m_sym->GetIntConstValue();
  2761. }
  2762. else
  2763. {
  2764. s2Value = Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address);
  2765. }
  2766. s2Value = (s2Value & 0x1F);
  2767. if (s2Value >= Js::VarTag_Shift)
  2768. {
  2769. resultIsTaggedInt = true;
  2770. if ((unsigned)(s2Value + Js::VarTag_Shift) > 0x1f)
  2771. {
  2772. // Can't combine the SHR with the AtomTag shift if we overflow...
  2773. s2Value = 0;
  2774. src2IsIntConst = false;
  2775. }
  2776. }
  2777. }
  2778. if (!src1IsNotInt)
  2779. {
  2780. if (!src1IsInt)
  2781. {
  2782. // TEST s1, AtomTag
  2783. instr = IR::Instr::New(Js::OpCode::TEST, instrShift->m_func);
  2784. instr->SetSrc1(opndReg1);
  2785. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, instrShift->m_func));
  2786. instrShift->InsertBefore(instr);
  2787. // JEQ $S1ToInt
  2788. labelS1ToInt = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2789. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelS1ToInt, instrShift->m_func);
  2790. instrShift->InsertBefore(instr);
  2791. }
  2792. // s1 = SAR s1, VarTag_Shift -- extract the real shift amount from the var
  2793. //
  2794. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2795. //
  2796. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2797. instr = IR::Instr::New(Js::OpCode::SAR, opnd32Reg1, opnd32Reg1,
  2798. IR::IntConstOpnd::New(Js::VarTag_Shift + s2Value, TyInt8, instrShift->m_func), instrShift->m_func);
  2799. instrShift->InsertBefore(instr);
  2800. // JMP $src2
  2801. labelSrc2 = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func);
  2802. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelSrc2, instrShift->m_func);
  2803. instrShift->InsertBefore(instr);
  2804. }
  2805. if (!src1IsInt)
  2806. {
  2807. if (labelS1ToInt)
  2808. {
  2809. //$S1ToInt:
  2810. instrShift->InsertBefore(labelS1ToInt);
  2811. }
  2812. // PUSH scriptContext
  2813. this->lowererMD->m_lowerer->LoadScriptContext(instrShift);
  2814. // PUSH s1
  2815. this->LoadHelperArgument(instrShift, opndReg1);
  2816. // s1 = ToInt32()/ToUint32
  2817. instr = IR::Instr::New(Js::OpCode::Call, opndReg1,
  2818. IR::HelperCallOpnd::New((isUnsignedShift ? IR::HelperConv_ToUInt32_Full : IR::HelperConv_ToInt32_Full), instrShift->m_func),
  2819. instrShift->m_func);
  2820. instrShift->InsertBefore(instr);
  2821. this->LowerCall(instr, 0);
  2822. if (src2IsIntConst && s2Value != 0)
  2823. {
  2824. // s1 = SHR/SAR s1, s2 -- do the inline shift
  2825. //
  2826. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2827. //
  2828. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2829. instr = IR::Instr::New(isUnsignedShift ? Js::OpCode::SHR : Js::OpCode::SAR,
  2830. opnd32Reg1, opnd32Reg1, IR::IntConstOpnd::New(s2Value, TyInt8, instrShift->m_func), instrShift->m_func);
  2831. instrShift->InsertBefore(instr);
  2832. }
  2833. }
  2834. //$src2:
  2835. if (labelSrc2)
  2836. {
  2837. instrShift->InsertBefore(labelSrc2);
  2838. }
  2839. if (!src2IsIntConst)
  2840. {
  2841. // Load s2
  2842. opndReg2 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2843. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opndSrc2, instrShift->m_func);
  2844. instrShift->InsertBefore(instr);
  2845. }
  2846. if (!src2IsNotInt)
  2847. {
  2848. if (!src2IsInt)
  2849. {
  2850. // TEST s2, AtomTag
  2851. instr = IR::Instr::New(Js::OpCode::TEST, instrShift->m_func);
  2852. instr->SetSrc1(opndReg2);
  2853. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, instrShift->m_func));
  2854. instrShift->InsertBefore(instr);
  2855. // JEQ $ToUInt
  2856. labelS2ToUInt = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2857. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelS2ToUInt, instrShift->m_func);
  2858. instrShift->InsertBefore(instr);
  2859. }
  2860. if (!src2IsIntConst)
  2861. {
  2862. // s2 = SAR s2, VarTag_Shift -- extract the real shift amount from the var
  2863. //
  2864. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2865. //
  2866. IR::Opnd * opnd32Reg2 = opndReg2->UseWithNewType(TyInt32, instrShift->m_func);
  2867. instr = IR::Instr::New(Js::OpCode::SAR, opnd32Reg2, opnd32Reg2,
  2868. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2869. instrShift->InsertBefore(instr);
  2870. }
  2871. // JMP $shr
  2872. labelShr = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func);
  2873. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelShr, instrShift->m_func);
  2874. instrShift->InsertBefore(instr);
  2875. }
  2876. if (!src2IsInt)
  2877. {
  2878. if (labelS2ToUInt)
  2879. {
  2880. //$S2ToUInt:
  2881. instrShift->InsertBefore(labelS2ToUInt);
  2882. }
  2883. // PUSH scriptContext
  2884. this->lowererMD->m_lowerer->LoadScriptContext(instrShift);
  2885. // PUSH s2
  2886. this->LoadHelperArgument(instrShift, opndReg2);
  2887. // s2 = ToUInt32()
  2888. instr = IR::Instr::New(Js::OpCode::Call, opndReg2,
  2889. IR::HelperCallOpnd::New(IR::HelperConv_ToUInt32_Full, instrShift->m_func), instrShift->m_func);
  2890. instrShift->InsertBefore(instr);
  2891. this->LowerCall(instr, 0);
  2892. }
  2893. //$Shr:
  2894. if (labelShr)
  2895. {
  2896. instrShift->InsertBefore(labelShr);
  2897. }
  2898. if (!src2IsIntConst)
  2899. {
  2900. // s1 = SHR/SAR s1, s2 -- do the inline shift
  2901. //
  2902. // Sign of the operand matters to SAR. Hence it need to operate on Int32 only
  2903. //
  2904. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2905. IR::RegOpnd * opnd32Ecx = IR::RegOpnd::New(TyInt32, this->m_func);
  2906. opnd32Ecx->SetReg(this->GetRegShiftCount());
  2907. instr = IR::Instr::New(Js::OpCode::MOV, opnd32Ecx, opndReg2, this->m_func);
  2908. instrShift->InsertBefore(instr);
  2909. instr = IR::Instr::New(isUnsignedShift ? Js::OpCode::SHR : Js::OpCode::SAR,
  2910. opnd32Reg1, opnd32Reg1, opnd32Ecx, instrShift->m_func);
  2911. instrShift->InsertBefore(instr);
  2912. }
  2913. // s3 = MOV s1
  2914. IR::Opnd * opnd32Reg1 = opndReg1->UseWithNewType(TyInt32, instrShift->m_func);
  2915. IR::RegOpnd * opndReg3 = IR::RegOpnd::New(TyMachReg, instrShift->m_func);
  2916. IR::Opnd * opnd32Reg3 = opndReg3->UseWithNewType(TyInt32, instrShift->m_func);
  2917. instr = IR::Instr::New(Js::OpCode::MOV, opnd32Reg3, opnd32Reg1, instrShift->m_func);
  2918. instrShift->InsertBefore(instr);
  2919. // s3 = SHL s3, VarTag_Shift -- To tagInt
  2920. //
  2921. // Ecmascript spec says we only need mask the shift amount by 0x1F. But intel uses 0x3F
  2922. // for 64 operands. Hence using 32 bits.
  2923. //
  2924. instr = IR::Instr::New(Js::OpCode::SHL, opnd32Reg3, opnd32Reg3,
  2925. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrShift->m_func), instrShift->m_func);
  2926. instrShift->InsertBefore(instr);
  2927. if (!resultIsTaggedInt)
  2928. {
  2929. // JO $ToVar
  2930. labelToVar = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func, true);
  2931. instr = IR::BranchInstr::New(Js::OpCode::JO, labelToVar, instrShift->m_func);
  2932. instrShift->InsertBefore(instr);
  2933. if (isUnsignedShift)
  2934. {
  2935. // JS $ToVar
  2936. instr = IR::BranchInstr::New(Js::OpCode::JSB, labelToVar, instrShift->m_func);
  2937. instrShift->InsertBefore(instr);
  2938. }
  2939. }
  2940. // s1 = INC s1
  2941. instr = IR::Instr::New(Js::OpCode::INC, opndReg3, opndReg3, instrShift->m_func);
  2942. instrShift->InsertBefore(instr);
  2943. if (!src1IsInt || !src2IsInt || !resultIsTaggedInt)
  2944. {
  2945. // JMP $done
  2946. labelDone = IR::LabelInstr::New(Js::OpCode::Label, instrShift->m_func);
  2947. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, instrShift->m_func);
  2948. instrShift->InsertBefore(instr);
  2949. }
  2950. if (!resultIsTaggedInt)
  2951. {
  2952. //$ToVar:
  2953. instrShift->InsertBefore(labelToVar);
  2954. this->lowererMD->EmitLoadVarNoCheck(opndReg3, opndReg1, instrShift, isUnsignedShift, true);
  2955. }
  2956. if (labelDone)
  2957. {
  2958. //$Done:
  2959. instrShift->InsertBefore(labelDone);
  2960. }
  2961. // dst = MOV s3
  2962. instrShift->m_opcode = Js::OpCode::MOV;
  2963. instrShift->SetSrc1(opndReg3);
  2964. // Skip lowering call to helper
  2965. return false;
  2966. }
  2967. bool
  2968. LowererMDArch::GenerateFastDivAndRem(IR::Instr* instrDiv, IR::LabelInstr* bailOutLabel)
  2969. {
  2970. return false;
  2971. }
  2972. ///----------------------------------------------------------------------------
  2973. ///
  2974. /// LowererMDArch::GenerateFastAnd
  2975. ///
  2976. ///----------------------------------------------------------------------------
  2977. bool
  2978. LowererMDArch::GenerateFastAnd(IR::Instr * instrAnd)
  2979. {
  2980. // Given:
  2981. //
  2982. // dst = And src1, src2
  2983. //
  2984. // Generate:
  2985. //
  2986. // s1 = MOV src1
  2987. // s1 = AND s1, src2 -- try an inline add
  2988. // TEST s1, 1 -- if both opnds are ints, the int tag will be set in the result
  2989. // JEQ $helper
  2990. // dst = MOV s1
  2991. // JMP $fallthru
  2992. // $helper:
  2993. // (caller generates helper sequence)
  2994. // $fallthru:
  2995. IR::Instr * instr;
  2996. IR::LabelInstr * labelHelper=nullptr;
  2997. IR::LabelInstr * labelFallThru;
  2998. IR::Opnd * opndReg;
  2999. IR::Opnd * opndSrc1;
  3000. IR::Opnd * opndSrc2;
  3001. opndSrc1 = instrAnd->GetSrc1();
  3002. opndSrc2 = instrAnd->GetSrc2();
  3003. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on And instruction");
  3004. // Not tagged ints?
  3005. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  3006. {
  3007. return true;
  3008. }
  3009. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  3010. {
  3011. return true;
  3012. }
  3013. // Tagged ints?
  3014. bool isTaggedInts = false;
  3015. if (opndSrc1->IsTaggedInt())
  3016. {
  3017. if (opndSrc2->IsTaggedInt())
  3018. {
  3019. isTaggedInts = true;
  3020. }
  3021. }
  3022. // s1 = MOV src1
  3023. opndReg = IR::RegOpnd::New(TyMachReg, instrAnd->m_func);
  3024. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, instrAnd->m_func);
  3025. instrAnd->InsertBefore(instr);
  3026. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->m_sym->IsTaggableIntConst())
  3027. {
  3028. Js::Var value = Js::TaggedInt::ToVarUnchecked(opndSrc2->AsRegOpnd()->m_sym->GetIntConstValue());
  3029. opndSrc2 = IR::AddrOpnd::New(value, IR::AddrOpndKindConstantVar, instrAnd->m_func);
  3030. }
  3031. // s1 = AND s1, src2
  3032. instr = IR::Instr::New(Js::OpCode::AND, opndReg, opndReg, opndSrc2, instrAnd->m_func);
  3033. instrAnd->InsertBefore(instr);
  3034. if (!isTaggedInts)
  3035. {
  3036. // TEST s1, 1
  3037. instr = IR::Instr::New(Js::OpCode::TEST, instrAnd->m_func);
  3038. instr->SetSrc1(opndReg);
  3039. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, instrAnd->m_func));
  3040. instrAnd->InsertBefore(instr);
  3041. // JNE $helper
  3042. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrAnd->m_func, true);
  3043. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, instrAnd->m_func);
  3044. instrAnd->InsertBefore(instr);
  3045. }
  3046. // dst = MOV s1
  3047. if (isTaggedInts)
  3048. {
  3049. // Reuse the existing instruction
  3050. instrAnd->m_opcode = Js::OpCode::MOV;
  3051. instrAnd->ReplaceSrc1(opndReg);
  3052. instrAnd->FreeSrc2();
  3053. // Skip lowering call to helper
  3054. return false;
  3055. }
  3056. instr = IR::Instr::New(Js::OpCode::MOV, instrAnd->GetDst(), opndReg, instrAnd->m_func);
  3057. instrAnd->InsertBefore(instr);
  3058. // JMP $fallthru
  3059. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrAnd->m_func);
  3060. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrAnd->m_func);
  3061. instrAnd->InsertBefore(instr);
  3062. // $helper:
  3063. // (caller generates helper sequence)
  3064. // $fallthru:
  3065. AssertMsg(labelHelper, "Should not be NULL");
  3066. instrAnd->InsertBefore(labelHelper);
  3067. instrAnd->InsertAfter(labelFallThru);
  3068. return true;
  3069. }
  3070. ///----------------------------------------------------------------------------
  3071. ///
  3072. /// LowererMDArch::GenerateFastOr
  3073. ///
  3074. ///----------------------------------------------------------------------------
  3075. bool
  3076. LowererMDArch::GenerateFastOr(IR::Instr * instrOr)
  3077. {
  3078. // Given:
  3079. //
  3080. // dst = Or src1, src2
  3081. //
  3082. // Generate:
  3083. //
  3084. // (If not 2 Int31's, jump to $helper.)
  3085. //
  3086. // s1 = MOV src1
  3087. // s1 = OR s1, src2 -- try an inline OR
  3088. // dst = MOV s1
  3089. // JMP $fallthru
  3090. // $helper:
  3091. // (caller generates helper sequence)
  3092. // $fallthru:
  3093. IR::Instr * instr;
  3094. IR::LabelInstr * labelHelper=nullptr;
  3095. IR::LabelInstr * labelFallThru;
  3096. IR::Opnd * opndReg;
  3097. IR::Opnd * opndSrc1;
  3098. IR::Opnd * opndSrc2;
  3099. opndSrc1 = instrOr->GetSrc1();
  3100. opndSrc2 = instrOr->GetSrc2();
  3101. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Or instruction");
  3102. // Not tagged ints?
  3103. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  3104. {
  3105. return true;
  3106. }
  3107. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  3108. {
  3109. return true;
  3110. }
  3111. // Tagged ints?
  3112. bool isTaggedInts = false;
  3113. if (opndSrc1->IsTaggedInt())
  3114. {
  3115. if (opndSrc2->IsTaggedInt())
  3116. {
  3117. isTaggedInts = true;
  3118. }
  3119. }
  3120. if (!isTaggedInts)
  3121. {
  3122. // (If not 2 Int31's, jump to $helper.)
  3123. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrOr->m_func, true);
  3124. lowererMD->GenerateSmIntPairTest(instrOr, opndSrc1, opndSrc2, labelHelper);
  3125. }
  3126. // s1 = MOV src1
  3127. opndReg = IR::RegOpnd::New(TyMachReg, instrOr->m_func);
  3128. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, instrOr->m_func);
  3129. instrOr->InsertBefore(instr);
  3130. // s1 = OR s1, src2
  3131. instr = IR::Instr::New(Js::OpCode::OR, opndReg, opndReg, opndSrc2, instrOr->m_func);
  3132. instrOr->InsertBefore(instr);
  3133. // dst = MOV s1
  3134. if (isTaggedInts)
  3135. {
  3136. // Reuse the existing instruction
  3137. instrOr->m_opcode = Js::OpCode::MOV;
  3138. instrOr->ReplaceSrc1(opndReg);
  3139. instrOr->FreeSrc2();
  3140. // Skip lowering call to helper
  3141. return false;
  3142. }
  3143. instr = IR::Instr::New(Js::OpCode::MOV, instrOr->GetDst(), opndReg, instrOr->m_func);
  3144. instrOr->InsertBefore(instr);
  3145. // JMP $fallthru
  3146. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrOr->m_func);
  3147. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrOr->m_func);
  3148. instrOr->InsertBefore(instr);
  3149. // $helper:
  3150. // (caller generates helper sequence)
  3151. // $fallthru:
  3152. AssertMsg(labelHelper, "Should not be NULL");
  3153. instrOr->InsertBefore(labelHelper);
  3154. instrOr->InsertAfter(labelFallThru);
  3155. return true;
  3156. }
  3157. ///----------------------------------------------------------------------------
  3158. ///
  3159. /// LowererMD::GenerateFastXor
  3160. ///
  3161. ///----------------------------------------------------------------------------
  3162. bool
  3163. LowererMDArch::GenerateFastXor(IR::Instr * instrXor)
  3164. {
  3165. // Given:
  3166. //
  3167. // dst = Xor src1, src2
  3168. //
  3169. // Generate:
  3170. //
  3171. // (If not 2 Int31's, jump to $helper.)
  3172. //
  3173. // s1 = MOV src1
  3174. // s1 = XOR s1, src2 -- try an inline XOR
  3175. // s1 = INC s1
  3176. // dst = MOV s1
  3177. // JMP $fallthru
  3178. // $helper:
  3179. // (caller generates helper sequence)
  3180. // $fallthru:
  3181. IR::Instr * instr;
  3182. IR::LabelInstr * labelHelper=nullptr;
  3183. IR::LabelInstr * labelFallThru;
  3184. IR::Opnd * opndReg;
  3185. IR::Opnd * opndSrc1;
  3186. IR::Opnd * opndSrc2;
  3187. opndSrc1 = instrXor->GetSrc1();
  3188. opndSrc2 = instrXor->GetSrc2();
  3189. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Xor instruction");
  3190. // Not tagged ints?
  3191. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  3192. {
  3193. return true;
  3194. }
  3195. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  3196. {
  3197. return true;
  3198. }
  3199. // Tagged ints?
  3200. bool isTaggedInts = false;
  3201. if (opndSrc1->IsTaggedInt())
  3202. {
  3203. if (opndSrc2->IsTaggedInt())
  3204. {
  3205. isTaggedInts = true;
  3206. }
  3207. }
  3208. if (!isTaggedInts)
  3209. {
  3210. // (If not 2 Int31's, jump to $helper.)
  3211. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrXor->m_func, true);
  3212. lowererMD->GenerateSmIntPairTest(instrXor, opndSrc1, opndSrc2, labelHelper);
  3213. }
  3214. // s1 = MOV src1
  3215. opndReg = IR::RegOpnd::New(TyMachReg, instrXor->m_func);
  3216. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, instrXor->m_func);
  3217. instrXor->InsertBefore(instr);
  3218. // s1 = XOR s1, src2
  3219. instr = IR::Instr::New(Js::OpCode::XOR, opndReg, opndReg, opndSrc2, instrXor->m_func);
  3220. instrXor->InsertBefore(instr);
  3221. // s1 = INC s1
  3222. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, instrXor->m_func);
  3223. instrXor->InsertBefore(instr);
  3224. // dst = MOV s1
  3225. if (isTaggedInts)
  3226. {
  3227. // Reuse the existing instruction
  3228. instrXor->m_opcode = Js::OpCode::MOV;
  3229. instrXor->ReplaceSrc1(opndReg);
  3230. instrXor->FreeSrc2();
  3231. // Skip lowering call to helper
  3232. return false;
  3233. }
  3234. instr = IR::Instr::New(Js::OpCode::MOV, instrXor->GetDst(), opndReg, instrXor->m_func);
  3235. instrXor->InsertBefore(instr);
  3236. // JMP $fallthru
  3237. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrXor->m_func);
  3238. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrXor->m_func);
  3239. instrXor->InsertBefore(instr);
  3240. // $helper:
  3241. // (caller generates helper sequence)
  3242. // $fallthru:
  3243. AssertMsg(labelHelper, "Should not be NULL");
  3244. instrXor->InsertBefore(labelHelper);
  3245. instrXor->InsertAfter(labelFallThru);
  3246. return true;
  3247. }
  3248. //----------------------------------------------------------------------------
  3249. //
  3250. // LowererMD::GenerateFastNot
  3251. //
  3252. //----------------------------------------------------------------------------
  3253. bool
  3254. LowererMDArch::GenerateFastNot(IR::Instr * instrNot)
  3255. {
  3256. // Given:
  3257. //
  3258. // dst = Not src
  3259. //
  3260. // Generate:
  3261. //
  3262. // TEST src, 1 -- test for int src
  3263. // JEQ $helper
  3264. // dst = MOV src
  3265. // dst = NOT dst -- do an inline NOT
  3266. // dst = INC dst -- restore the var tag on the result (!1 becomes 0, INC to get 1 again)
  3267. // JMP $fallthru
  3268. // $helper:
  3269. // (caller generates helper call)
  3270. // $fallthru:
  3271. IR::Instr * instr;
  3272. IR::LabelInstr * labelHelper = nullptr;
  3273. IR::LabelInstr * labelFallThru = nullptr;
  3274. IR::Opnd * opndSrc1;
  3275. IR::Opnd * opndDst;
  3276. opndSrc1 = instrNot->GetSrc1();
  3277. AssertMsg(opndSrc1, "Expected src opnd on Not instruction");
  3278. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->IsIntConst())
  3279. {
  3280. IntConstType value = opndSrc1->AsRegOpnd()->m_sym->GetIntConstValue();
  3281. value = ~value;
  3282. instrNot->ClearBailOutInfo();
  3283. instrNot->FreeSrc1();
  3284. instrNot->SetSrc1(IR::AddrOpnd::NewFromNumber(value, instrNot->m_func));
  3285. instrNot = this->lowererMD->ChangeToAssign(instrNot);
  3286. // Skip lowering call to helper
  3287. return false;
  3288. }
  3289. bool isInt = (opndSrc1->IsTaggedInt());
  3290. if (!isInt)
  3291. {
  3292. // TEST src1, AtomTag
  3293. instr = IR::Instr::New(Js::OpCode::TEST, instrNot->m_func);
  3294. instr->SetSrc1(opndSrc1);
  3295. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, instrNot->m_func));
  3296. instrNot->InsertBefore(instr);
  3297. // JEQ $helper
  3298. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrNot->m_func, true);
  3299. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, instrNot->m_func);
  3300. instrNot->InsertBefore(instr);
  3301. }
  3302. // dst = MOV src
  3303. opndDst = instrNot->GetDst();
  3304. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndSrc1, instrNot->m_func);
  3305. instrNot->InsertBefore(instr);
  3306. // dst = NOT dst
  3307. instr = IR::Instr::New(Js::OpCode::NOT, opndDst, opndDst, instrNot->m_func);
  3308. instrNot->InsertBefore(instr);
  3309. // dst = INC dst
  3310. instr = IR::Instr::New(Js::OpCode::INC, opndDst, opndDst, instrNot->m_func);
  3311. instrNot->InsertBefore(instr);
  3312. if (isInt)
  3313. {
  3314. instrNot->Remove();
  3315. // Skip lowering call to helper
  3316. return false;
  3317. }
  3318. // JMP $fallthru
  3319. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, instrNot->m_func);
  3320. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrNot->m_func);
  3321. instrNot->InsertBefore(instr);
  3322. // $helper:
  3323. // (caller generates helper sequence)
  3324. // $fallthru:
  3325. AssertMsg(labelHelper, "Should not be NULL");
  3326. instrNot->InsertBefore(labelHelper);
  3327. instrNot->InsertAfter(labelFallThru);
  3328. return true;
  3329. }
  3330. void
  3331. LowererMDArch::FinalLower()
  3332. {
  3333. int32 offset;
  3334. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, this->m_func->m_tailInstr, this->m_func->m_headInstr)
  3335. {
  3336. switch (instr->m_opcode)
  3337. {
  3338. case Js::OpCode::Ret:
  3339. instr->Remove();
  3340. break;
  3341. case Js::OpCode::Leave:
  3342. Assert(this->m_func->DoOptimizeTry() && !this->m_func->IsLoopBodyInTry());
  3343. this->lowererMD->m_lowerer->LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), true /*fromFinalLower*/);
  3344. break;
  3345. case Js::OpCode::BailOutStackRestore:
  3346. // We don't know the frameSize at lower time...
  3347. instr->m_opcode = Js::OpCode::LEA;
  3348. // exclude the EBP and return address
  3349. instr->GetSrc1()->AsIndirOpnd()->SetOffset(-(int)(this->m_func->frameSize) + 2 * MachPtr);
  3350. break;
  3351. case Js::OpCode::RestoreOutParam:
  3352. Assert(instr->GetDst() != nullptr);
  3353. Assert(instr->GetDst()->IsIndirOpnd());
  3354. offset = instr->GetDst()->AsIndirOpnd()->GetOffset();
  3355. offset -= this->m_func->frameSize;
  3356. offset += 2 * sizeof(void*);
  3357. instr->GetDst()->AsIndirOpnd()->SetOffset(offset, true);
  3358. instr->m_opcode = Js::OpCode::MOV;
  3359. break;
  3360. case Js::OpCode::CMOVA:
  3361. case Js::OpCode::CMOVAE:
  3362. case Js::OpCode::CMOVB:
  3363. case Js::OpCode::CMOVBE:
  3364. case Js::OpCode::CMOVE:
  3365. case Js::OpCode::CMOVG:
  3366. case Js::OpCode::CMOVGE:
  3367. case Js::OpCode::CMOVL:
  3368. case Js::OpCode::CMOVLE:
  3369. case Js::OpCode::CMOVNE:
  3370. case Js::OpCode::CMOVNO:
  3371. case Js::OpCode::CMOVNP:
  3372. case Js::OpCode::CMOVNS:
  3373. case Js::OpCode::CMOVO:
  3374. case Js::OpCode::CMOVP:
  3375. case Js::OpCode::CMOVS:
  3376. // Get rid of fake src1.
  3377. if (instr->GetSrc2())
  3378. {
  3379. // CMOV inserted before regalloc have a dummy src1 to simulate the fact that
  3380. // CMOV is not a definite def of the dst.
  3381. instr->SwapOpnds();
  3382. instr->FreeSrc2();
  3383. }
  3384. break;
  3385. case Js::OpCode::LOCKCMPXCHG8B:
  3386. case Js::OpCode::CMPXCHG8B:
  3387. // Get rid of the deps and srcs
  3388. instr->FreeDst();
  3389. instr->FreeSrc2();
  3390. break;
  3391. }
  3392. }
  3393. NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  3394. }
  3395. //This is dependent on calling convention and harder to do common thing here.
  3396. IR::Opnd*
  3397. LowererMDArch::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  3398. {
  3399. // x86:
  3400. // s25.i32 = LdLen_A s4.var
  3401. // s26.i32 = Ld_A s25.i32
  3402. // s25.i32 = Or_I4 s25.i32, 1 // For alignment
  3403. // $L2:
  3404. // s10.var = LdElemI_A [s4.var+s25.i32].var
  3405. // ArgOut_A_Dynamic s10.var
  3406. // s25.i32 = SUB_I4 s25.i32, 0x1
  3407. // JNE $L2
  3408. // $L3
  3409. GenerateFunctionObjectTest(callInstr, callInstr->GetSrc1()->AsRegOpnd(), false);
  3410. if (callInstr->m_func->IsInlinee())
  3411. {
  3412. return this->lowererMD->m_lowerer->GenerateArgOutForInlineeStackArgs(callInstr, stackArgsInstr);
  3413. }
  3414. Assert(stackArgsInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  3415. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamic);
  3416. Func *func = callInstr->m_func;
  3417. IR::RegOpnd* stackArgs = stackArgsInstr->GetSrc1()->AsRegOpnd();
  3418. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, func);
  3419. const IR::AutoReuseOpnd autoReuseLdLenDstOpnd(ldLenDstOpnd, func);
  3420. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd, stackArgs, func);
  3421. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); // LdLen_A works only on stack arguments
  3422. callInstr->InsertBefore(ldLen);
  3423. this->lowererMD->m_lowerer->GenerateFastRealStackArgumentsLdLen(ldLen);
  3424. IR::Instr* saveLenInstr = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyUint32, func), ldLenDstOpnd, func);
  3425. saveLenInstr->GetDst()->SetValueType(ValueType::GetTaggedInt());
  3426. callInstr->InsertBefore(saveLenInstr);
  3427. // Align frame
  3428. IR::Instr* orInstr = IR::Instr::New(Js::OpCode::OR, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func), this->m_func);
  3429. callInstr->InsertBefore(orInstr);
  3430. IR::LabelInstr* startLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
  3431. startLoop->m_isLoopTop = true;
  3432. Loop *loop = JitAnew(this->m_func->m_alloc, Loop, this->m_func->m_alloc, this->m_func);
  3433. startLoop->SetLoop(loop);
  3434. loop->SetLoopTopInstr(startLoop);
  3435. loop->regAlloc.liveOnBackEdgeSyms = JitAnew(func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  3436. callInstr->InsertBefore(startLoop);
  3437. IR::IndirOpnd *nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
  3438. nthArgument->SetOffset(-1);
  3439. IR::RegOpnd* ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
  3440. const IR::AutoReuseOpnd autoReuseldElemDstOpnd(ldElemDstOpnd, func);
  3441. IR::Instr* ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  3442. callInstr->InsertBefore(ldElem);
  3443. this->lowererMD->m_lowerer->GenerateFastStackArgumentsLdElemI(ldElem);
  3444. IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  3445. argout->SetSrc1(ldElemDstOpnd);
  3446. callInstr->InsertBefore(argout);
  3447. this->LoadDynamicArgument(argout);
  3448. IR::Instr *subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyUint32, func),func);
  3449. callInstr->InsertBefore(subInstr);
  3450. this->lowererMD->EmitInt4Instr(subInstr);
  3451. IR::BranchInstr *tailBranch = IR::BranchInstr::New(Js::OpCode::JNE, startLoop, func);
  3452. callInstr->InsertBefore(tailBranch);
  3453. loop->regAlloc.liveOnBackEdgeSyms->Set(ldLenDstOpnd->m_sym->m_id);
  3454. // return the length which will be used for callInfo generations & stack allocation
  3455. return saveLenInstr->GetDst()->AsRegOpnd();
  3456. }
  3457. IR::Instr *
  3458. LowererMDArch::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  3459. {
  3460. IR::RegOpnd *retReg = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  3461. // Load the continuation address into the return register.
  3462. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, retReg, targetOpnd, this->m_func));
  3463. IR::IntConstOpnd *intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  3464. IR::Instr * retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  3465. retInstr->SetSrc1(intSrc);
  3466. retInstr->SetSrc2(retReg);
  3467. insertBeforeInstr->InsertBefore(retInstr);
  3468. // return the last instruction inserted
  3469. return retInstr;
  3470. }
  3471. IR::BranchInstr*
  3472. LowererMDArch::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr)
  3473. {
  3474. Assert(compareSrc->IsFloat64() && missingItemOpnd->IsUInt32());
  3475. IR::Opnd * compareSrcUint32Opnd = IR::RegOpnd::New(TyUint32, m_func);
  3476. // Missing item NaN have a different bit pattern from k_Nan, but is a NaN nonetheless. Given that, it is sufficient
  3477. // to compare just the top 32 bits
  3478. //
  3479. // IF sse4.1 available
  3480. // mov xmm0, compareSrc
  3481. // pextrd ecx, xmm0, 1 <-- ecx will containg xmm0[63:32] after this
  3482. // cmp missingItemOpnd, ecx
  3483. // jcc target
  3484. //
  3485. // ELSE
  3486. // mov xmm0, compareSrc
  3487. // shufps xmm0, xmm0, (3 << 6 | 2 << 4 | 1 << 2 | 1) <-- xmm0[31:0] will contain compareSrc[63:32] after this
  3488. // movd ecx, xmm0
  3489. // cmp missingItemOpnd, ecx
  3490. // jcc $target
  3491. IR::RegOpnd* tmpDoubleRegOpnd = IR::RegOpnd::New(TyFloat64, m_func);
  3492. if (AutoSystemInfo::Data.SSE4_1Available())
  3493. {
  3494. if (compareSrc->IsIndirOpnd())
  3495. {
  3496. Lowerer::InsertMove(tmpDoubleRegOpnd, compareSrc, insertBeforeInstr);
  3497. }
  3498. else
  3499. {
  3500. tmpDoubleRegOpnd = compareSrc->AsRegOpnd();
  3501. }
  3502. Lowerer::InsertAndLegalize(IR::Instr::New(Js::OpCode::PEXTRD, compareSrcUint32Opnd, tmpDoubleRegOpnd, IR::IntConstOpnd::New(1, TyInt8, m_func, true), m_func), insertBeforeInstr);
  3503. }
  3504. else
  3505. {
  3506. Lowerer::InsertMove(tmpDoubleRegOpnd, compareSrc, insertBeforeInstr);
  3507. Lowerer::InsertAndLegalize(IR::Instr::New(Js::OpCode::SHUFPS, tmpDoubleRegOpnd, tmpDoubleRegOpnd, IR::IntConstOpnd::New(3 << 6 | 2 << 4 | 1 << 2 | 1, TyInt8, m_func, true), m_func), insertBeforeInstr);
  3508. Lowerer::InsertAndLegalize(IR::Instr::New(Js::OpCode::MOVD, compareSrcUint32Opnd, tmpDoubleRegOpnd, m_func), insertBeforeInstr);
  3509. }
  3510. return this->lowererMD->m_lowerer->InsertCompareBranch(missingItemOpnd, compareSrcUint32Opnd, opcode, target, insertBeforeInstr);
  3511. }