LowererMDArch.cpp 111 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "LowererMDArch.h"
  7. #include "Library/JavascriptGeneratorFunction.h"
  8. const Js::OpCode LowererMD::MDExtend32Opcode = Js::OpCode::MOVSXD;
  9. extern const IRType RegTypes[RegNumCount];
  10. BYTE
  11. LowererMDArch::GetDefaultIndirScale()
  12. {
  13. return IndirScale8;
  14. }
  15. RegNum
  16. LowererMDArch::GetRegShiftCount()
  17. {
  18. return RegRCX;
  19. }
  20. RegNum
  21. LowererMDArch::GetRegReturn(IRType type)
  22. {
  23. return ( IRType_IsFloat(type) || IRType_IsSimd128(type) ) ? RegXMM0 : RegRAX;
  24. }
  25. RegNum
  26. LowererMDArch::GetRegReturnAsmJs(IRType type)
  27. {
  28. if (IRType_IsFloat(type))
  29. {
  30. return RegXMM0;
  31. }
  32. else if (IRType_IsSimd128(type))
  33. {
  34. return RegXMM0;
  35. }
  36. else
  37. {
  38. return RegRAX;
  39. }
  40. }
  41. RegNum
  42. LowererMDArch::GetRegStackPointer()
  43. {
  44. return RegRSP;
  45. }
  46. RegNum
  47. LowererMDArch::GetRegBlockPointer()
  48. {
  49. return RegRBP;
  50. }
  51. RegNum
  52. LowererMDArch::GetRegFramePointer()
  53. {
  54. return RegRBP;
  55. }
  56. RegNum
  57. LowererMDArch::GetRegChkStkParam()
  58. {
  59. return RegRAX;
  60. }
  61. RegNum
  62. LowererMDArch::GetRegIMulDestLower()
  63. {
  64. return RegRAX;
  65. }
  66. RegNum
  67. LowererMDArch::GetRegIMulHighDestLower()
  68. {
  69. return RegRDX;
  70. }
  71. RegNum
  72. LowererMDArch::GetRegArgI4(int32 argNum)
  73. {
  74. // TODO: decide on registers to use for int
  75. return RegNOREG;
  76. }
  77. RegNum
  78. LowererMDArch::GetRegArgR8(int32 argNum)
  79. {
  80. // TODO: decide on registers to use for double
  81. return RegNOREG;
  82. }
  83. Js::OpCode
  84. LowererMDArch::GetAssignOp(IRType type)
  85. {
  86. switch (type)
  87. {
  88. case TyFloat64:
  89. return Js::OpCode::MOVSD;
  90. case TyFloat32:
  91. return Js::OpCode::MOVSS;
  92. case TySimd128F4:
  93. case TySimd128I4:
  94. case TySimd128I8:
  95. case TySimd128I16:
  96. case TySimd128U4:
  97. case TySimd128U8:
  98. case TySimd128U16:
  99. case TySimd128B4:
  100. case TySimd128B8:
  101. case TySimd128B16:
  102. case TySimd128D2:
  103. return Js::OpCode::MOVUPS;
  104. default:
  105. return Js::OpCode::MOV;
  106. }
  107. }
  108. void
  109. LowererMDArch::Init(LowererMD *lowererMD)
  110. {
  111. this->lowererMD = lowererMD;
  112. this->helperCallArgsCount = 0;
  113. }
  114. ///----------------------------------------------------------------------------
  115. ///
  116. /// LowererMD::LoadInputParamPtr
  117. ///
  118. /// Load the address of the start of the passed-in parameters not including
  119. /// the this parameter.
  120. ///
  121. ///----------------------------------------------------------------------------
  122. IR::Instr *
  123. LowererMDArch::LoadInputParamPtr(IR::Instr *instrInsert, IR::RegOpnd *optionalDstOpnd /* = nullptr */)
  124. {
  125. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  126. {
  127. IR::RegOpnd * argPtrRegOpnd = Lowerer::LoadGeneratorArgsPtr(instrInsert);
  128. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(argPtrRegOpnd, 1 * MachPtr, TyMachPtr, this->m_func);
  129. IR::RegOpnd * dstOpnd = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachPtr, this->m_func);
  130. return Lowerer::InsertLea(dstOpnd, indirOpnd, instrInsert);
  131. }
  132. else
  133. {
  134. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  135. StackSym *paramSym = StackSym::New(TyMachReg, this->m_func);
  136. this->m_func->SetArgOffset(paramSym, 5 * MachPtr);
  137. IR::Instr *instr = this->lowererMD->LoadStackAddress(paramSym, optionalDstOpnd);
  138. instrInsert->InsertBefore(instr);
  139. return instr;
  140. }
  141. }
  142. IR::Instr *
  143. LowererMDArch::LoadStackArgPtr(IR::Instr * instrArgPtr)
  144. {
  145. // Get the args pointer relative to the frame pointer.
  146. // NOTE: This code is sufficient for the apply-args optimization, but not for StackArguments,
  147. // if and when that is enabled.
  148. // dst = LEA &[rbp + "this" offset + sizeof(var)]
  149. IR::Instr * instr = LoadInputParamPtr(instrArgPtr, instrArgPtr->UnlinkDst()->AsRegOpnd());
  150. instrArgPtr->Remove();
  151. return instr->m_prev;
  152. }
  153. IR::Instr *
  154. LowererMDArch::LoadHeapArgsCached(IR::Instr *instrArgs)
  155. {
  156. ASSERT_INLINEE_FUNC(instrArgs);
  157. Func *func = instrArgs->m_func;
  158. IR::Instr *instrPrev = instrArgs->m_prev;
  159. if (instrArgs->m_func->IsStackArgsEnabled())
  160. {
  161. instrArgs->m_opcode = Js::OpCode::MOV;
  162. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  163. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  164. {
  165. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  166. Output::Flush();
  167. }
  168. }
  169. else
  170. {
  171. // s7 = formals are let decls
  172. // s6 = memory context
  173. // s5 = local frame instance
  174. // s4 = address of first actual argument (after "this")
  175. // s3 = formal argument count
  176. // s2 = actual argument count
  177. // s1 = current function
  178. // dst = JavascriptOperators::LoadArguments(s1, s2, s3, s4, s5, s6, s7)
  179. // s7 = formals are let decls
  180. IR::Opnd * formalsAreLetDecls = IR::IntConstOpnd::New((IntConstType)(instrArgs->m_opcode == Js::OpCode::LdLetHeapArgsCached), TyUint8, func);
  181. this->LoadHelperArgument(instrArgs, formalsAreLetDecls);
  182. // s6 = memory context
  183. this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  184. // s5 = local frame instance
  185. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  186. this->LoadHelperArgument(instrArgs, frameObj);
  187. if (func->IsInlinee())
  188. {
  189. // s4 = address of first actual argument (after "this").
  190. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  191. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  192. IR::Instr *instr = this->lowererMD->LoadStackAddress(firstRealArgSlotSym);
  193. instrArgs->InsertBefore(instr);
  194. this->LoadHelperArgument(instrArgs, instr->GetDst());
  195. // s3 = formal argument count (without counting "this").
  196. uint32 formalsCount = func->GetJITFunctionBody()->GetInParamsCount() - 1;
  197. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyUint32, func));
  198. // s2 = actual argument count (without counting "this").
  199. instr = IR::Instr::New(Js::OpCode::MOV,
  200. IR::RegOpnd::New(TyMachReg, func),
  201. IR::IntConstOpnd::New(func->actualCount - 1, TyMachReg, func),
  202. func);
  203. instrArgs->InsertBefore(instr);
  204. this->LoadHelperArgument(instrArgs, instr->GetDst());
  205. // s1 = current function.
  206. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  207. // Save the newly-created args object to its dedicated stack slot.
  208. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  209. instr = IR::Instr::New(Js::OpCode::MOV,
  210. argObjSlotOpnd,
  211. instrArgs->GetDst(),
  212. func);
  213. instrArgs->InsertAfter(instr);
  214. }
  215. else
  216. {
  217. // s4 = address of first actual argument (after "this")
  218. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  219. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  220. this->LoadHelperArgument(instrArgs, instr->GetDst());
  221. // s3 = formal argument count (without counting "this")
  222. uint32 formalsCount = func->GetInParamsCount() - 1;
  223. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyInt32, func));
  224. // s2 = actual argument count (without counting "this")
  225. instr = this->lowererMD->LoadInputParamCount(instrArgs);
  226. instr = IR::Instr::New(Js::OpCode::DEC, instr->GetDst(), instr->GetDst(), func);
  227. instrArgs->InsertBefore(instr);
  228. this->LoadHelperArgument(instrArgs, instr->GetDst());
  229. // s1 = current function
  230. StackSym *paramSym = StackSym::New(TyMachReg, func);
  231. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  232. IR::Opnd * srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  233. this->LoadHelperArgument(instrArgs, srcOpnd);
  234. // Save the newly-created args object to its dedicated stack slot.
  235. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  236. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  237. instrArgs->InsertAfter(instr);
  238. }
  239. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArgsCached);
  240. }
  241. return instrPrev;
  242. }
  243. ///----------------------------------------------------------------------------
  244. ///
  245. /// LowererMDArch::LoadHeapArguments
  246. ///
  247. /// Load the arguments object
  248. /// NOTE: The same caveat regarding arguments passed on the stack applies here
  249. /// as in LoadInputParamCount above.
  250. ///----------------------------------------------------------------------------
  251. IR::Instr *
  252. LowererMDArch::LoadHeapArguments(IR::Instr *instrArgs)
  253. {
  254. ASSERT_INLINEE_FUNC(instrArgs);
  255. Func *func = instrArgs->m_func;
  256. IR::Instr *instrPrev = instrArgs->m_prev;
  257. if (func->IsStackArgsEnabled())
  258. {
  259. instrArgs->m_opcode = Js::OpCode::MOV;
  260. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  261. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  262. {
  263. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  264. Output::Flush();
  265. }
  266. }
  267. else
  268. {
  269. // s7 = formals are let decls
  270. // s6 = memory context
  271. // s5 = array of property ID's
  272. // s4 = local frame instance
  273. // s3 = address of first actual argument (after "this")
  274. // s2 = actual argument count
  275. // s1 = current function
  276. // dst = JavascriptOperators::LoadHeapArguments(s1, s2, s3, s4, s5, s6, s7)
  277. // s7 = formals are let decls
  278. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(instrArgs->m_opcode == Js::OpCode::LdLetHeapArguments ? TRUE : FALSE, TyUint8, func));
  279. // s6 = memory context
  280. instrPrev = this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  281. // s5 = array of property ID's
  282. intptr_t formalsPropIdArray = instrArgs->m_func->GetJITFunctionBody()->GetFormalsPropIdArrayAddr();
  283. if (!formalsPropIdArray)
  284. {
  285. formalsPropIdArray = instrArgs->m_func->GetScriptContextInfo()->GetNullAddr();
  286. }
  287. IR::Opnd * argArray = IR::AddrOpnd::New(formalsPropIdArray, IR::AddrOpndKindDynamicMisc, m_func);
  288. this->LoadHelperArgument(instrArgs, argArray);
  289. // s4 = local frame instance
  290. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  291. this->LoadHelperArgument(instrArgs, frameObj);
  292. if (func->IsInlinee())
  293. {
  294. // s3 = address of first actual argument (after "this").
  295. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  296. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  297. IR::Instr *instr = this->lowererMD->LoadStackAddress(firstRealArgSlotSym);
  298. instrArgs->InsertBefore(instr);
  299. this->LoadHelperArgument(instrArgs, instr->GetDst());
  300. // s2 = actual argument count (without counting "this").
  301. instr = IR::Instr::New(Js::OpCode::MOV,
  302. IR::RegOpnd::New(TyUint32, func),
  303. IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func),
  304. func);
  305. instrArgs->InsertBefore(instr);
  306. this->LoadHelperArgument(instrArgs, instr->GetDst());
  307. // s1 = current function.
  308. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  309. // Save the newly-created args object to its dedicated stack slot.
  310. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  311. instr = IR::Instr::New(Js::OpCode::MOV,
  312. argObjSlotOpnd,
  313. instrArgs->GetDst(),
  314. func);
  315. instrArgs->InsertAfter(instr);
  316. }
  317. else
  318. {
  319. // s3 = address of first actual argument (after "this")
  320. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  321. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  322. this->LoadHelperArgument(instrArgs, instr->GetDst());
  323. // s2 = actual argument count (without counting "this")
  324. instr = this->lowererMD->LoadInputParamCount(instrArgs, -1);
  325. IR::Opnd * opndInputParamCount = instr->GetDst();
  326. this->LoadHelperArgument(instrArgs, opndInputParamCount);
  327. // s1 = current function
  328. StackSym * paramSym = StackSym::New(TyMachReg, func);
  329. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  330. IR::Opnd * srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  331. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  332. {
  333. // the function object for generator calls is a GeneratorVirtualScriptFunction object
  334. // and we need to pass the real JavascriptGeneratorFunction object so grab it instead
  335. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
  336. LowererMD::CreateAssign(tmpOpnd, srcOpnd, instrArgs);
  337. srcOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
  338. }
  339. this->LoadHelperArgument(instrArgs, srcOpnd);
  340. // Save the newly-created args object to its dedicated stack slot.
  341. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  342. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  343. instrArgs->InsertAfter(instr);
  344. }
  345. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArguments);
  346. }
  347. return instrPrev;
  348. }
  349. //
  350. // Load the parameter in the first argument slot
  351. //
  352. IR::Instr *
  353. LowererMDArch::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * dst, ushort extraArgs)
  354. {
  355. // Spread moves down the argument slot by one.
  356. IR::Opnd * argOpnd = this->GetArgSlotOpnd(3 + extraArgs);
  357. IR::Instr * argInstr = LowererMD::CreateAssign(argOpnd, dst, instr);
  358. return argInstr;
  359. }
  360. inline static RegNum GetRegFromArgPosition(const bool isFloatArg, const uint16 argPosition)
  361. {
  362. RegNum reg = RegNOREG;
  363. if (!isFloatArg && argPosition <= IntArgRegsCount)
  364. {
  365. switch (argPosition)
  366. {
  367. #define REG_INT_ARG(Index, Name) \
  368. case ((Index) + 1): \
  369. reg = Reg ## Name; \
  370. break;
  371. #include "RegList.h"
  372. default:
  373. Assume(UNREACHED);
  374. }
  375. }
  376. else if (isFloatArg && argPosition <= XmmArgRegsCount)
  377. {
  378. switch (argPosition)
  379. {
  380. #define REG_XMM_ARG(Index, Name) \
  381. case ((Index) + 1): \
  382. reg = Reg ## Name; \
  383. break;
  384. #include "RegList.h"
  385. default:
  386. Assume(UNREACHED);
  387. }
  388. }
  389. return reg;
  390. }
  391. int32
  392. LowererMDArch::LowerCallArgs(IR::Instr *callInstr, ushort callFlags, Js::ArgSlot extraParams, IR::IntConstOpnd **callInfoOpndRef /* = nullptr */)
  393. {
  394. AssertMsg(this->helperCallArgsCount == 0, "We don't support nested helper calls yet");
  395. const Js::ArgSlot argOffset = 1;
  396. uint32 argCount = 0;
  397. // Lower args and look for StartCall
  398. IR::Instr * argInstr = callInstr;
  399. IR::Instr * cfgInsertLoc = callInstr->GetPrevRealInstr();
  400. IR::Opnd *src2 = argInstr->UnlinkSrc2();
  401. while (src2->IsSymOpnd())
  402. {
  403. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  404. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  405. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  406. argLinkOpnd->Free(this->m_func);
  407. argInstr = argLinkSym->m_instrDef;
  408. src2 = argInstr->UnlinkSrc2();
  409. this->lowererMD->ChangeToAssign(argInstr);
  410. // Mov each arg to its argSlot
  411. Js::ArgSlot argPosition = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  412. Js::ArgSlot index = argOffset + argPosition;
  413. if(index < argPosition)
  414. {
  415. Js::Throw::OutOfMemory();
  416. }
  417. index += extraParams;
  418. if(index < extraParams)
  419. {
  420. Js::Throw::OutOfMemory();
  421. }
  422. IR::Opnd * dstOpnd = this->GetArgSlotOpnd(index, argLinkSym);
  423. argInstr->ReplaceDst(dstOpnd);
  424. cfgInsertLoc = argInstr->GetPrevRealInstr();
  425. // The arg sym isn't assigned a constant directly anymore
  426. // TODO: We can just move the instruction down next to the call if it is just a constant assignment
  427. // but AMD64 doesn't have the MOV mem,imm64 encoding, and we have no code to detect if the value can fit
  428. // into imm32 and hoist the src if it is not.
  429. argLinkSym->m_isConst = false;
  430. argLinkSym->m_isIntConst = false;
  431. argLinkSym->m_isTaggableIntConst = false;
  432. argInstr->Unlink();
  433. callInstr->InsertBefore(argInstr);
  434. argCount++;
  435. }
  436. IR::RegOpnd * argLinkOpnd = src2->AsRegOpnd();
  437. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  438. AssertMsg(!argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  439. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  440. if (callInstr->m_opcode == Js::OpCode::NewScObject ||
  441. callInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
  442. callInstr->m_opcode == Js::OpCode::NewScObjectLiteral ||
  443. callInstr->m_opcode == Js::OpCode::NewScObjArray ||
  444. callInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
  445. {
  446. // These push an extra arg.
  447. argCount++;
  448. }
  449. AssertMsg(startCallInstr->m_opcode == Js::OpCode::StartCall ||
  450. startCallInstr->m_opcode == Js::OpCode::LoweredStartCall,
  451. "Problem with arg chain.");
  452. AssertMsg(startCallInstr->GetArgOutCount(/*getInterpreterArgOutCount*/ false) == argCount ||
  453. m_func->GetJITFunctionBody()->IsAsmJsMode(),
  454. "ArgCount doesn't match StartCall count");
  455. //
  456. // Machine dependent lowering
  457. //
  458. if (callInstr->m_opcode != Js::OpCode::AsmJsCallI)
  459. {
  460. // Push argCount
  461. IR::IntConstOpnd *argCountOpnd = Lowerer::MakeCallInfoConst(callFlags, argCount, m_func);
  462. if (callInfoOpndRef)
  463. {
  464. argCountOpnd->Use(m_func);
  465. *callInfoOpndRef = argCountOpnd;
  466. }
  467. Lowerer::InsertMove(this->GetArgSlotOpnd(1 + extraParams), argCountOpnd, callInstr);
  468. }
  469. startCallInstr = this->LowerStartCall(startCallInstr);
  470. const uint32 argSlots = argCount + 1 + extraParams; // + 1 for call flags
  471. this->m_func->m_argSlotsForFunctionsCalled = max(this->m_func->m_argSlotsForFunctionsCalled, argSlots);
  472. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  473. {
  474. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  475. GeneratePreCall(callInstr, functionObjOpnd, cfgInsertLoc->GetNextRealInstr());
  476. }
  477. return argSlots;
  478. }
  479. void
  480. LowererMDArch::SetMaxArgSlots(Js::ArgSlot actualCount /*including this*/)
  481. {
  482. Js::ArgSlot offset = 3;//For function object & callInfo & this
  483. if (this->m_func->m_argSlotsForFunctionsCalled < (uint32) (actualCount + offset))
  484. {
  485. this->m_func->m_argSlotsForFunctionsCalled = (uint32)(actualCount + offset);
  486. }
  487. return;
  488. }
  489. void
  490. LowererMDArch::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed)
  491. {
  492. IRType type = TyVar;
  493. if (isZeroed)
  494. {
  495. if (value == 0)
  496. {
  497. // Recycler memory are zero initialized
  498. return;
  499. }
  500. type = value <= UINT_MAX ?
  501. (value <= USHORT_MAX ?
  502. (value <= UCHAR_MAX ? TyUint8 : TyUint16) :
  503. TyUint32) :
  504. type;
  505. }
  506. Func * func = this->m_func;
  507. lowererMD->GetLowerer()->InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  508. }
  509. IR::Instr *
  510. LowererMDArch::LowerCallIDynamic(IR::Instr *callInstr, IR::Instr*saveThisArgOutInstr, IR::Opnd *argsLength, ushort callFlags, IR::Instr * insertBeforeInstrForCFG)
  511. {
  512. callInstr->InsertBefore(saveThisArgOutInstr); //Move this Argout next to call;
  513. this->LoadDynamicArgument(saveThisArgOutInstr, 3); //this pointer is the 3rd argument
  514. /*callInfo*/
  515. if (callInstr->m_func->IsInlinee())
  516. {
  517. Assert(argsLength->AsIntConstOpnd()->GetValue() == callInstr->m_func->actualCount);
  518. this->SetMaxArgSlots((Js::ArgSlot)callInstr->m_func->actualCount);
  519. }
  520. else
  521. {
  522. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, argsLength, argsLength, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), this->m_func));
  523. this->SetMaxArgSlots(Js::InlineeCallInfo::MaxInlineeArgoutCount);
  524. }
  525. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, this->GetArgSlotOpnd(2), argsLength, this->m_func));
  526. IR::Opnd *funcObjOpnd = callInstr->UnlinkSrc1();
  527. GeneratePreCall(callInstr, funcObjOpnd, insertBeforeInstrForCFG);
  528. // Normally for dynamic calls we move 4 args to registers and push remaining
  529. // args onto stack (Windows convention, and unchanged on xplat). We need to
  530. // manully home 4 args. inlinees lower differently and follow platform ABI.
  531. // So we need to manually home actualArgsCount + 2 args (function, callInfo).
  532. const uint32 homeArgs = callInstr->m_func->IsInlinee() ?
  533. callInstr->m_func->actualCount + 2 : 4;
  534. LowerCall(callInstr, homeArgs);
  535. return callInstr;
  536. }
  537. void
  538. LowererMDArch::GenerateFunctionObjectTest(IR::Instr * callInstr, IR::RegOpnd *functionObjOpnd, bool isHelper, IR::LabelInstr* continueAfterExLabel /* = nullptr */)
  539. {
  540. AssertMsg(!m_func->IsJitInDebugMode() || continueAfterExLabel, "When jit is in debug mode, continueAfterExLabel must be provided otherwise continue after exception may cause AV.");
  541. IR::RegOpnd *functionObjRegOpnd = functionObjOpnd->AsRegOpnd();
  542. IR::Instr * insertBeforeInstr = callInstr;
  543. // Need check and error if we are calling a tagged int.
  544. if (!functionObjRegOpnd->IsNotTaggedValue())
  545. {
  546. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  547. if (this->lowererMD->GenerateObjectTest(functionObjRegOpnd, callInstr, helperLabel))
  548. {
  549. IR::LabelInstr * callLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  550. IR::Instr* instr = IR::BranchInstr::New(Js::OpCode::JMP, callLabel, this->m_func);
  551. callInstr->InsertBefore(instr);
  552. callInstr->InsertBefore(helperLabel);
  553. callInstr->InsertBefore(callLabel);
  554. insertBeforeInstr = callLabel;
  555. lowererMD->m_lowerer->GenerateRuntimeError(insertBeforeInstr, JSERR_NeedFunction);
  556. if (continueAfterExLabel)
  557. {
  558. // Under debugger the RuntimeError (exception) can be ignored, generate branch to jmp to safe place
  559. // (which would normally be debugger bailout check).
  560. IR::BranchInstr* continueAfterEx = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueAfterExLabel, this->m_func);
  561. insertBeforeInstr->InsertBefore(continueAfterEx);
  562. }
  563. }
  564. }
  565. }
  566. void
  567. LowererMDArch::GeneratePreCall(IR::Instr * callInstr, IR::Opnd *functionObjOpnd, IR::Instr * insertBeforeInstrForCFGCheck)
  568. {
  569. if (insertBeforeInstrForCFGCheck == nullptr)
  570. {
  571. insertBeforeInstrForCFGCheck = callInstr;
  572. }
  573. IR::RegOpnd * functionTypeRegOpnd = nullptr;
  574. IR::IndirOpnd * entryPointIndirOpnd = nullptr;
  575. if (callInstr->m_opcode == Js::OpCode::AsmJsCallI)
  576. {
  577. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  578. IR::IndirOpnd* functionInfoIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
  579. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  580. insertBeforeInstrForCFGCheck->InsertBefore(instr);
  581. functionInfoIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::ScriptFunctionType::GetEntryPointInfoOffset(), TyMachReg, m_func);
  582. instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  583. insertBeforeInstrForCFGCheck->InsertBefore(instr);
  584. uint32 entryPointOffset = Js::ProxyEntryPointInfo::GetAddressOffset();
  585. entryPointIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, entryPointOffset, TyMachReg, m_func);
  586. }
  587. else
  588. {
  589. // For calls to fixed functions we load the function's type directly from the known (hard-coded) function object address.
  590. // For other calls, we need to load it from the function object stored in a register operand.
  591. if (functionObjOpnd->IsAddrOpnd() && functionObjOpnd->AsAddrOpnd()->m_isFunction)
  592. {
  593. functionTypeRegOpnd = this->lowererMD->m_lowerer->GenerateFunctionTypeFromFixedFunctionObject(insertBeforeInstrForCFGCheck, functionObjOpnd);
  594. }
  595. else if (functionObjOpnd->IsRegOpnd())
  596. {
  597. AssertMsg(functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym(), "Expected call target to be a stack symbol.");
  598. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  599. // functionTypeRegOpnd(RAX) = MOV function->type
  600. {
  601. IR::IndirOpnd * functionTypeIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  602. Js::DynamicObject::GetOffsetOfType(), TyMachReg, m_func);
  603. IR::Instr * mov = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionTypeIndirOpnd, m_func);
  604. insertBeforeInstrForCFGCheck->InsertBefore(mov);
  605. }
  606. }
  607. else
  608. {
  609. AnalysisAssertMsg(false, "Unexpected call target operand type.");
  610. }
  611. // entryPointRegOpnd(RAX) = MOV type->entryPoint
  612. entryPointIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::Type::GetOffsetOfEntryPoint(), TyMachPtr, m_func);
  613. }
  614. IR::RegOpnd *entryPointRegOpnd = functionTypeRegOpnd;
  615. entryPointRegOpnd->m_isCallArg = true;
  616. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, entryPointRegOpnd, entryPointIndirOpnd, m_func);
  617. insertBeforeInstrForCFGCheck->InsertBefore(mov);
  618. // entryPointRegOpnd(RAX) = CALL entryPointRegOpnd(RAX)
  619. callInstr->SetSrc1(entryPointRegOpnd);
  620. #if defined(_CONTROL_FLOW_GUARD)
  621. // verify that the call target is valid (CFG Check)
  622. if (!PHASE_OFF(Js::CFGInJitPhase, this->m_func))
  623. {
  624. this->lowererMD->GenerateCFGCheck(entryPointRegOpnd, insertBeforeInstrForCFGCheck);
  625. }
  626. #endif
  627. // Setup the first call argument - pointer to the function being called.
  628. IR::Instr * instrMovArg1 = IR::Instr::New(Js::OpCode::MOV, GetArgSlotOpnd(1), functionObjOpnd, m_func);
  629. callInstr->InsertBefore(instrMovArg1);
  630. }
  631. IR::Instr *
  632. LowererMDArch::LowerCallI(IR::Instr * callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  633. {
  634. AssertMsg(this->helperCallArgsCount == 0, "We don't support nested helper calls yet");
  635. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  636. IR::Instr * insertBeforeInstrForCFGCheck = callInstr;
  637. // If this is a call for new, we already pass the function operand through NewScObject,
  638. // which checks if the function operand is a real function or not, don't need to add a check again
  639. // If this is a call to a fixed function, we've already verified that the target is, indeed, a function.
  640. if (callInstr->m_opcode != Js::OpCode::CallIFixed && !(callFlags & Js::CallFlags_New))
  641. {
  642. Assert(functionObjOpnd->IsRegOpnd());
  643. IR::LabelInstr* continueAfterExLabel = Lowerer::InsertContinueAfterExceptionLabelForDebugger(m_func, callInstr, isHelper);
  644. GenerateFunctionObjectTest(callInstr, functionObjOpnd->AsRegOpnd(), isHelper, continueAfterExLabel);
  645. }
  646. else if (insertBeforeInstrForCFG != nullptr)
  647. {
  648. RegNum dstReg = insertBeforeInstrForCFG->GetDst()->AsRegOpnd()->GetReg();
  649. AssertMsg(dstReg == RegArg2 || dstReg == RegArg3, "NewScObject should insert the first Argument in RegArg2/RegArg3 only based on Spread call or not.");
  650. insertBeforeInstrForCFGCheck = insertBeforeInstrForCFG;
  651. }
  652. GeneratePreCall(callInstr, functionObjOpnd, insertBeforeInstrForCFGCheck);
  653. // We need to get the calculated CallInfo in SimpleJit because that doesn't include any changes for stack alignment
  654. IR::IntConstOpnd *callInfo = nullptr;
  655. int32 argCount = LowerCallArgs(callInstr, callFlags, 1, &callInfo);
  656. IR::Opnd *const finalDst = callInstr->GetDst();
  657. // x64 keeps track of argCount for us, so pass just an arbitrary value there
  658. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  659. IR::AutoReuseOpnd autoReuseSavedFunctionObjOpnd;
  660. if (callInstr->IsJitProfilingInstr())
  661. {
  662. Assert(callInstr->m_func->IsSimpleJit());
  663. Assert(!CONFIG_FLAG(NewSimpleJit));
  664. if(finalDst &&
  665. finalDst->IsRegOpnd() &&
  666. functionObjOpnd->IsRegOpnd() &&
  667. finalDst->AsRegOpnd()->m_sym == functionObjOpnd->AsRegOpnd()->m_sym)
  668. {
  669. // The function object sym is going to be overwritten, so save it in a temp for profiling
  670. IR::RegOpnd *const savedFunctionObjOpnd = IR::RegOpnd::New(functionObjOpnd->GetType(), callInstr->m_func);
  671. autoReuseSavedFunctionObjOpnd.Initialize(savedFunctionObjOpnd, callInstr->m_func);
  672. Lowerer::InsertMove(savedFunctionObjOpnd, functionObjOpnd, callInstr->m_next);
  673. functionObjOpnd = savedFunctionObjOpnd;
  674. }
  675. auto instr = callInstr->AsJitProfilingInstr();
  676. ret = this->lowererMD->m_lowerer->GenerateCallProfiling(
  677. instr->profileId,
  678. instr->inlineCacheIndex,
  679. instr->GetDst(),
  680. functionObjOpnd,
  681. callInfo,
  682. instr->isProfiledReturnCall,
  683. callInstr,
  684. ret);
  685. }
  686. return ret;
  687. }
  688. IR::Instr *
  689. LowererMDArch::LowerCallPut(IR::Instr *callInstr)
  690. {
  691. // Note: what we have to do here is call a helper with the Jscript calling convention,
  692. // so we need to factor the lowering of arguments out of the CallI expansion.
  693. AssertMsg(FALSE, "TODO: LowerCallPut not implemented");
  694. return nullptr;
  695. }
  696. static inline IRType ExtendHelperArg(IRType type)
  697. {
  698. #ifdef __clang__
  699. // clang expects caller to extend arg size to int
  700. switch (type)
  701. {
  702. case TyInt8:
  703. case TyInt16:
  704. return TyInt32;
  705. case TyUint8:
  706. case TyUint16:
  707. return TyUint32;
  708. }
  709. #endif
  710. return type;
  711. }
  712. IR::Instr *
  713. LowererMDArch::LowerCall(IR::Instr * callInstr, uint32 argCount)
  714. {
  715. UNREFERENCED_PARAMETER(argCount);
  716. IR::Instr *retInstr = callInstr;
  717. callInstr->m_opcode = Js::OpCode::CALL;
  718. // This is required here due to calls create during lowering
  719. callInstr->m_func->SetHasCalls();
  720. if (callInstr->GetDst())
  721. {
  722. IR::Opnd * dstOpnd;
  723. this->lowererMD->ForceDstToReg(callInstr);
  724. dstOpnd = callInstr->GetDst();
  725. IRType dstType = dstOpnd->GetType();
  726. Js::OpCode assignOp = GetAssignOp(dstType);
  727. if (callInstr->GetSrc1()->IsHelperCallOpnd())
  728. {
  729. // Truncate the result of a conversion to 32-bit int, because the C++ code doesn't.
  730. IR::HelperCallOpnd *helperOpnd = callInstr->GetSrc1()->AsHelperCallOpnd();
  731. if (helperOpnd->m_fnHelper == IR::HelperConv_ToInt32 ||
  732. helperOpnd->m_fnHelper == IR::HelperConv_ToInt32_Full ||
  733. helperOpnd->m_fnHelper == IR::HelperConv_ToInt32Core ||
  734. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32 ||
  735. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32_Full ||
  736. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32Core)
  737. {
  738. assignOp = Js::OpCode::MOV_TRUNC;
  739. }
  740. }
  741. IR::Instr * movInstr = callInstr->SinkDst(assignOp);
  742. RegNum reg = GetRegReturn(dstType);
  743. callInstr->GetDst()->AsRegOpnd()->SetReg(reg);
  744. movInstr->GetSrc1()->AsRegOpnd()->SetReg(reg);
  745. retInstr = movInstr;
  746. }
  747. //
  748. // assign the arguments to appropriate positions
  749. //
  750. AssertMsg(this->helperCallArgsCount >= 0, "Fatal. helper call arguments ought to be positive");
  751. AssertMsg(this->helperCallArgsCount < MaxArgumentsToHelper && MaxArgumentsToHelper < 255, "Too many helper call arguments");
  752. uint16 argsLeft = static_cast<uint16>(this->helperCallArgsCount);
  753. // Sys V x64 ABI assigns int and xmm arg registers separately.
  754. // e.g. args: int, double, int, double, int, double
  755. // Windows: int0, xmm1, int2, xmm3, stack, stack
  756. // Sys V: int0, xmm0, int1, xmm1, int2, xmm2
  757. #ifdef _WIN32
  758. #define _V_ARG_INDEX(index) index
  759. #else
  760. uint16 _vindex[MaxArgumentsToHelper];
  761. {
  762. uint16 intIndex = 1, doubleIndex = 1, stackIndex = IntArgRegsCount + 1;
  763. for (int i = 0; i < this->helperCallArgsCount; i++)
  764. {
  765. IR::Opnd * helperSrc = this->helperCallArgs[this->helperCallArgsCount - 1 - i];
  766. IRType type = helperSrc->GetType();
  767. if (IRType_IsFloat(type) || IRType_IsSimd128(type))
  768. {
  769. if (doubleIndex <= XmmArgRegsCount)
  770. {
  771. _vindex[i] = doubleIndex++;
  772. }
  773. else
  774. {
  775. _vindex[i] = stackIndex++;
  776. }
  777. }
  778. else
  779. {
  780. if (intIndex <= IntArgRegsCount)
  781. {
  782. _vindex[i] = intIndex++;
  783. }
  784. else
  785. {
  786. _vindex[i] = stackIndex++;
  787. }
  788. }
  789. }
  790. }
  791. #define _V_ARG_INDEX(index) _vindex[(index) - 1]
  792. #endif
  793. // xplat NOTE: Lower often loads "known args" with LoadHelperArgument() and
  794. // variadic JS runtime args with LowerCallArgs(). So the full args length is
  795. // this->helperCallArgsCount + argCount
  796. // "argCount > 0" indicates we have variadic JS runtime args and needs to
  797. // manually home registers on xplat.
  798. const bool shouldHomeParams = argCount > 0;
  799. while (argsLeft > 0)
  800. {
  801. IR::Opnd * helperSrc = this->helperCallArgs[this->helperCallArgsCount - argsLeft];
  802. uint16 index = _V_ARG_INDEX(argsLeft);
  803. StackSym * helperSym = m_func->m_symTable->GetArgSlotSym(index);
  804. helperSym->m_type = ExtendHelperArg(helperSrc->GetType());
  805. Lowerer::InsertMove(
  806. this->GetArgSlotOpnd(index, helperSym, /*isHelper*/!shouldHomeParams),
  807. helperSrc,
  808. callInstr, false);
  809. --argsLeft;
  810. }
  811. #ifndef _WIN32
  812. // Manually home args
  813. if (shouldHomeParams)
  814. {
  815. const int callArgCount = this->helperCallArgsCount + static_cast<int>(argCount);
  816. int argRegs = min(callArgCount, static_cast<int>(XmmArgRegsCount));
  817. for (int i = argRegs; i > 0; i--)
  818. {
  819. IRType type = this->xplatCallArgs.args[i];
  820. bool isFloatArg = this->xplatCallArgs.IsFloat(i);
  821. if ( i > IntArgRegsCount && !isFloatArg ) continue;
  822. StackSym * sym = this->m_func->m_symTable->GetArgSlotSym(static_cast<uint16>(i));
  823. RegNum reg = GetRegFromArgPosition(isFloatArg, i);
  824. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  825. regOpnd->m_isCallArg = true;
  826. Lowerer::InsertMove(
  827. IR::SymOpnd::New(sym, type, this->m_func),
  828. regOpnd,
  829. callInstr, false);
  830. }
  831. }
  832. this->xplatCallArgs.Reset();
  833. #endif // !_WIN32
  834. //
  835. // load the address into a register because we cannot directly access 64 bit constants
  836. // in CALL instruction. Non helper call methods will already be accessed indirectly.
  837. //
  838. // Skip this for bailout calls. The register allocator will lower that as appropriate, without affecting spill choices.
  839. //
  840. // Also skip this for relocatable helper calls. These will be turned into indirect
  841. // calls in lower.
  842. if (callInstr->GetSrc1()->IsHelperCallOpnd() && !callInstr->HasBailOutInfo())
  843. {
  844. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr,m_func), RegRAX, TyMachPtr, this->m_func);
  845. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::MOV, targetOpnd, callInstr->GetSrc1(), this->m_func);
  846. targetOpnd->m_isCallArg = true;
  847. callInstr->UnlinkSrc1();
  848. callInstr->SetSrc1(targetOpnd);
  849. callInstr->InsertBefore(movInstr);
  850. }
  851. //
  852. // Reset the call
  853. //
  854. this->m_func->m_argSlotsForFunctionsCalled = max(this->m_func->m_argSlotsForFunctionsCalled , (uint32)this->helperCallArgsCount);
  855. this->helperCallArgsCount = 0;
  856. return retInstr;
  857. }
  858. //
  859. // Returns the opnd where the corresponding argument would have been stored. On amd64,
  860. // the first 4 arguments go in registers and the rest are on stack.
  861. //
  862. IR::Opnd *
  863. LowererMDArch::GetArgSlotOpnd(uint16 index, StackSym * argSym, bool isHelper /*= false*/)
  864. {
  865. Assert(index != 0);
  866. uint16 argPosition = index;
  867. #ifdef ENABLE_SIMDJS
  868. // Without SIMD the index is the Var offset and is also the argument index. Since each arg = 1 Var.
  869. // With SIMD, args are of variable length and we need to the argument position in the args list.
  870. if (m_func->IsSIMDEnabled() &&
  871. m_func->GetJITFunctionBody()->IsAsmJsMode() &&
  872. argSym != nullptr &&
  873. argSym->m_argPosition != 0)
  874. {
  875. argPosition = (uint16)argSym->m_argPosition;
  876. }
  877. #endif
  878. IR::Opnd *argSlotOpnd = nullptr;
  879. if (argSym != nullptr)
  880. {
  881. argSym->m_offset = (index - 1) * MachPtr;
  882. argSym->m_allocated = true;
  883. }
  884. IRType type = argSym ? argSym->GetType() : TyMachReg;
  885. const bool isFloatArg = IRType_IsFloat(type) || IRType_IsSimd128(type);
  886. RegNum reg = GetRegFromArgPosition(isFloatArg, argPosition);
  887. #ifndef _WIN32
  888. if (isFloatArg && argPosition <= XmmArgRegsCount)
  889. {
  890. this->xplatCallArgs.SetFloat(argPosition);
  891. }
  892. #endif
  893. if (reg != RegNOREG)
  894. {
  895. IR::RegOpnd *regOpnd = IR::RegOpnd::New(argSym, reg, type, m_func);
  896. regOpnd->m_isCallArg = true;
  897. argSlotOpnd = regOpnd;
  898. }
  899. else
  900. {
  901. if (argSym == nullptr)
  902. {
  903. argSym = this->m_func->m_symTable->GetArgSlotSym(index);
  904. }
  905. #ifndef _WIN32
  906. // helper does not home args, adjust stack offset
  907. if (isHelper)
  908. {
  909. const uint16 argIndex = index - IntArgRegsCount;
  910. argSym->m_offset = (argIndex - 1) * MachPtr;
  911. }
  912. #endif
  913. argSlotOpnd = IR::SymOpnd::New(argSym, type, this->m_func);
  914. }
  915. return argSlotOpnd;
  916. }
  917. IR::Instr *
  918. LowererMDArch::LowerAsmJsCallE(IR::Instr *callInstr)
  919. {
  920. IR::IntConstOpnd *callInfo = nullptr;
  921. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_Value, 1, &callInfo);
  922. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  923. return ret;
  924. }
  925. IR::Instr *
  926. LowererMDArch::LowerAsmJsCallI(IR::Instr * callInstr)
  927. {
  928. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_Value, 0);
  929. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  930. return ret;
  931. }
  932. IR::Instr *
  933. LowererMDArch::LowerWasmMemOp(IR::Instr * instr, IR::Opnd *addrOpnd)
  934. {
  935. #if ENABLE_FAST_ARRAYBUFFER
  936. if (CONFIG_FLAG(WasmFastArray))
  937. {
  938. return instr;
  939. }
  940. #endif
  941. Assert(instr->GetSrc2());
  942. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  943. IR::LabelInstr * loadLabel = Lowerer::InsertLabel(false, instr);
  944. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  945. // Find array buffer length
  946. IR::IndirOpnd * indirOpnd = addrOpnd->AsIndirOpnd();
  947. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  948. uint32 offset = indirOpnd->GetOffset();
  949. IR::Opnd *arrayLenOpnd = instr->GetSrc2();
  950. IR::Int64ConstOpnd * constOffsetOpnd = IR::Int64ConstOpnd::New((int64)addrOpnd->GetSize() + (int64)offset, TyInt64, m_func);
  951. IR::Opnd *cmpOpnd;
  952. if (indexOpnd != nullptr)
  953. {
  954. // Compare index + memop access length and array buffer length, and generate RuntimeError if greater
  955. cmpOpnd = IR::RegOpnd::New(TyInt64, m_func);
  956. Lowerer::InsertAdd(true, cmpOpnd, indexOpnd, constOffsetOpnd, helperLabel);
  957. }
  958. else
  959. {
  960. cmpOpnd = constOffsetOpnd;
  961. }
  962. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, arrayLenOpnd, Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  963. lowererMD->m_lowerer->GenerateThrow(IR::IntConstOpnd::New(WASMERR_ArrayIndexOutOfRange, TyInt32, m_func), loadLabel);
  964. Lowerer::InsertBranch(Js::OpCode::Br, loadLabel, helperLabel);
  965. return doneLabel;
  966. }
  967. IR::Instr*
  968. LowererMDArch::LowerAsmJsLdElemHelper(IR::Instr * instr, bool isSimdLoad /*= false*/, bool checkEndOffset /*= false*/)
  969. {
  970. IR::Instr* done;
  971. IR::Opnd * src1 = instr->UnlinkSrc1();
  972. IRType type = src1->GetType();
  973. IR::RegOpnd * indexOpnd = src1->AsIndirOpnd()->GetIndexOpnd();
  974. const uint8 dataWidth = instr->dataWidth;
  975. Assert(isSimdLoad == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  976. #ifdef _WIN32
  977. // For x64, bound checks are required only for SIMD loads.
  978. if (isSimdLoad)
  979. #else
  980. // xplat: Always do bound check. We don't support out-of-bound access violation recovery.
  981. if (true)
  982. #endif
  983. {
  984. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  985. IR::LabelInstr * loadLabel = Lowerer::InsertLabel(false, instr);
  986. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  987. IR::Opnd *cmpOpnd;
  988. if (indexOpnd)
  989. {
  990. cmpOpnd = indexOpnd;
  991. }
  992. else
  993. {
  994. cmpOpnd = IR::IntConstOpnd::New(src1->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  995. }
  996. // if dataWidth != byte per element, we need to check end offset
  997. if (checkEndOffset)
  998. {
  999. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  1000. // MOV tmp, cmpOnd
  1001. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  1002. // ADD tmp, dataWidth
  1003. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, tmp->GetType(), m_func, true), helperLabel);
  1004. // JB helper
  1005. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  1006. // CMP tmp, size
  1007. // JG $helper
  1008. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  1009. }
  1010. else
  1011. {
  1012. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  1013. }
  1014. Lowerer::InsertBranch(Js::OpCode::Br, loadLabel, helperLabel);
  1015. if (isSimdLoad)
  1016. {
  1017. lowererMD->m_lowerer->GenerateRuntimeError(loadLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  1018. }
  1019. else
  1020. {
  1021. if (IRType_IsFloat(type))
  1022. {
  1023. Lowerer::InsertMove(instr->UnlinkDst(), IR::FloatConstOpnd::New(Js::NumberConstants::NaN, type, m_func), loadLabel);
  1024. }
  1025. else
  1026. {
  1027. Lowerer::InsertMove(instr->UnlinkDst(), IR::IntConstOpnd::New(0, TyInt8, m_func), loadLabel);
  1028. }
  1029. }
  1030. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, loadLabel);
  1031. done = doneLabel;
  1032. }
  1033. else
  1034. {
  1035. Assert(!instr->GetSrc2());
  1036. done = instr;
  1037. }
  1038. return done;
  1039. }
  1040. IR::Instr*
  1041. LowererMDArch::LowerAsmJsStElemHelper(IR::Instr * instr, bool isSimdStore /*= false*/, bool checkEndOffset /*= false*/)
  1042. {
  1043. IR::Instr* done;
  1044. IR::Opnd * dst = instr->UnlinkDst();
  1045. IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  1046. const uint8 dataWidth = instr->dataWidth;
  1047. Assert(isSimdStore == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  1048. #ifdef _WIN32
  1049. // For x64, bound checks are required only for SIMD loads.
  1050. if (isSimdStore)
  1051. #else
  1052. // xplat: Always do bound check. We don't support out-of-bound access violation recovery.
  1053. if (true)
  1054. #endif
  1055. {
  1056. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  1057. IR::LabelInstr * storeLabel = Lowerer::InsertLabel(false, instr);
  1058. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  1059. IR::Opnd * cmpOpnd;
  1060. if (indexOpnd)
  1061. {
  1062. cmpOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  1063. }
  1064. else
  1065. {
  1066. cmpOpnd = IR::IntConstOpnd::New(dst->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  1067. }
  1068. // if dataWidth != byte per element, we need to check end offset
  1069. if (checkEndOffset)
  1070. {
  1071. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  1072. // MOV tmp, cmpOnd
  1073. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  1074. // ADD tmp, dataWidth
  1075. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, tmp->GetType(), m_func, true), helperLabel);
  1076. // JB helper
  1077. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  1078. // CMP tmp, size
  1079. // JG $helper
  1080. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  1081. }
  1082. else
  1083. {
  1084. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  1085. }
  1086. Lowerer::InsertBranch(Js::OpCode::Br, storeLabel, helperLabel);
  1087. if (isSimdStore)
  1088. {
  1089. lowererMD->m_lowerer->GenerateRuntimeError(storeLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  1090. }
  1091. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, storeLabel);
  1092. done = doneLabel;
  1093. }
  1094. else
  1095. {
  1096. Assert(!instr->GetSrc2());
  1097. done = instr;
  1098. }
  1099. return done;
  1100. }
  1101. ///----------------------------------------------------------------------------
  1102. ///
  1103. /// LowererMDArch::LowerStartCall
  1104. ///
  1105. ///
  1106. ///----------------------------------------------------------------------------
  1107. IR::Instr *
  1108. LowererMDArch::LowerStartCall(IR::Instr * startCallInstr)
  1109. {
  1110. startCallInstr->m_opcode = Js::OpCode::LoweredStartCall;
  1111. return startCallInstr;
  1112. }
  1113. IR::Instr *
  1114. LowererMDArch::LoadInt64HelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1115. {
  1116. return LoadHelperArgument(instrInsert, opndArg);
  1117. }
  1118. ///----------------------------------------------------------------------------
  1119. ///
  1120. /// LowererMDArch::LoadHelperArgument
  1121. ///
  1122. /// Assign register or push on stack as per AMD64 calling convention
  1123. ///
  1124. ///----------------------------------------------------------------------------
  1125. IR::Instr *
  1126. LowererMDArch::LoadHelperArgument(IR::Instr *instr, IR::Opnd *opndArg)
  1127. {
  1128. IR::Opnd *destOpnd;
  1129. IR::Instr *instrToReturn;
  1130. if(opndArg->IsImmediateOpnd())
  1131. {
  1132. destOpnd = opndArg;
  1133. instrToReturn = instr;
  1134. }
  1135. else
  1136. {
  1137. destOpnd = IR::RegOpnd::New(opndArg->GetType(), this->m_func);
  1138. instrToReturn = instr->m_prev;
  1139. Lowerer::InsertMove(destOpnd, opndArg, instr, false);
  1140. instrToReturn = instrToReturn->m_next;
  1141. }
  1142. helperCallArgs[helperCallArgsCount++] = destOpnd;
  1143. AssertMsg(helperCallArgsCount < LowererMDArch::MaxArgumentsToHelper,
  1144. "We do not yet support any no. of arguments to the helper");
  1145. return instrToReturn;
  1146. }
  1147. IR::Instr *
  1148. LowererMDArch::LoadDynamicArgument(IR::Instr *instr, uint argNumber)
  1149. {
  1150. Assert(instr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  1151. Assert(instr->GetSrc2() == nullptr);
  1152. instr->m_opcode = Js::OpCode::MOV;
  1153. IR::Opnd* dst = GetArgSlotOpnd((Js::ArgSlot) argNumber);
  1154. instr->SetDst(dst);
  1155. if (!dst->IsRegOpnd())
  1156. {
  1157. //TODO: Move it to legalizer.
  1158. IR::RegOpnd *tempOpnd = IR::RegOpnd::New(TyMachReg, instr->m_func);
  1159. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, tempOpnd, instr->GetSrc1(), instr->m_func));
  1160. instr->ReplaceSrc1(tempOpnd);
  1161. }
  1162. return instr;
  1163. }
  1164. IR::Instr *
  1165. LowererMDArch::LoadDynamicArgumentUsingLength(IR::Instr *instr)
  1166. {
  1167. Assert(instr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  1168. IR::RegOpnd* src2 = instr->UnlinkSrc2()->AsRegOpnd();
  1169. IR::Instr*mov = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyMachReg, this->m_func), src2, this->m_func);
  1170. instr->InsertBefore(mov);
  1171. //We need store nth actuals, so stack location is after function object, callinfo & this pointer
  1172. instr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, mov->GetDst(), mov->GetDst(), IR::IntConstOpnd::New(3, TyMachReg, this->m_func), this->m_func));
  1173. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  1174. IR::IndirOpnd *actualsLocation = IR::IndirOpnd::New(stackPointer, mov->GetDst()->AsRegOpnd(), GetDefaultIndirScale(), TyMachReg, this->m_func);
  1175. instr->SetDst(actualsLocation);
  1176. instr->m_opcode = Js::OpCode::MOV;
  1177. return instr;
  1178. }
  1179. IR::Instr *
  1180. LowererMDArch::LoadDoubleHelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1181. {
  1182. IR::Opnd * float64Opnd;
  1183. if (opndArg->GetType() == TyFloat32)
  1184. {
  1185. float64Opnd = IR::RegOpnd::New(TyFloat64, m_func);
  1186. IR::Instr * instr = IR::Instr::New(Js::OpCode::CVTSS2SD, float64Opnd, opndArg, this->m_func);
  1187. instrInsert->InsertBefore(instr);
  1188. }
  1189. else
  1190. {
  1191. float64Opnd = opndArg;
  1192. }
  1193. Assert(opndArg->IsFloat());
  1194. return LoadHelperArgument(instrInsert, opndArg);
  1195. }
  1196. IR::Instr *
  1197. LowererMDArch::LoadFloatHelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1198. {
  1199. Assert(opndArg->IsFloat32());
  1200. return LoadHelperArgument(instrInsert, opndArg);
  1201. }
  1202. //
  1203. // Emits the code to allocate 'size' amount of space on stack. for values smaller than PAGE_SIZE
  1204. // this will just emit sub rsp,size otherwise calls _chkstk.
  1205. //
  1206. void
  1207. LowererMDArch::GenerateStackAllocation(IR::Instr *instr, uint32 size)
  1208. {
  1209. Assert(size > 0);
  1210. IR::RegOpnd * rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1211. //review: size should fit in 32bits
  1212. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New(size, TyMachReg, this->m_func);
  1213. if (size <= PAGESIZE)
  1214. {
  1215. // Generate SUB RSP, stackSize
  1216. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB,
  1217. rspOpnd, rspOpnd, stackSizeOpnd, this->m_func);
  1218. instr->InsertAfter(subInstr);
  1219. }
  1220. else
  1221. {
  1222. // Generate _chkstk call
  1223. //
  1224. // REVIEW: Call to helper functions assume the address of the variable to be present in
  1225. // RAX. But _chkstk method accepts argument in RAX. Hence handling this one manually.
  1226. // fix this later when CALLHELPER leaved dependency on RAX.
  1227. //
  1228. IR::RegOpnd *raxOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, this->m_func);
  1229. IR::RegOpnd *rcxOpnd = IR::RegOpnd::New(nullptr, RegRCX, TyMachReg, this->m_func);
  1230. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB, rspOpnd, rspOpnd, stackSizeOpnd, this->m_func);
  1231. instr->InsertAfter(subInstr);
  1232. // Leave off the src until we've calculated it below.
  1233. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, raxOpnd, rcxOpnd, this->m_func);
  1234. instr->InsertAfter(callInstr);
  1235. this->LowerCall(callInstr, 0);
  1236. {
  1237. IR::Instr *movHelperAddrInstr = IR::Instr::New(
  1238. Js::OpCode::MOV,
  1239. rcxOpnd,
  1240. IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, this->m_func),
  1241. this->m_func);
  1242. instr->InsertAfter(movHelperAddrInstr);
  1243. }
  1244. LowererMD::CreateAssign(raxOpnd, stackSizeOpnd, instr->m_next);
  1245. }
  1246. }
  1247. void
  1248. LowererMDArch::MovArgFromReg2Stack(IR::Instr * instr, RegNum reg, uint16 slotNumber, IRType type)
  1249. {
  1250. StackSym * slotSym = this->m_func->m_symTable->GetArgSlotSym(slotNumber + 1);
  1251. slotSym->m_type = type;
  1252. IR::SymOpnd * dst = IR::SymOpnd::New(slotSym, type, this->m_func);
  1253. IR::RegOpnd * src = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  1254. IR::Instr * movInstr = IR::Instr::New(GetAssignOp(type), dst, src, this->m_func);
  1255. instr->InsertAfter(movInstr);
  1256. }
  1257. ///----------------------------------------------------------------------------
  1258. ///
  1259. /// LowererMDArch::LowerEntryInstr
  1260. ///
  1261. /// Emit prolog.
  1262. ///
  1263. ///----------------------------------------------------------------------------
  1264. IR::Instr *
  1265. LowererMDArch::LowerEntryInstr(IR::EntryInstr * entryInstr)
  1266. {
  1267. /*
  1268. * push rbp
  1269. * mov rbp, rsp
  1270. * sub rsp, localVariablesHeight + floatCalleeSavedRegsSize
  1271. * movsdx qword ptr [rsp + 0], xmm6 ------\
  1272. * movsdx qword ptr [rsp + 8], xmm7 |
  1273. * ... |
  1274. * movsdx qword ptr [rsp + (N * 8)], xmmN |- Callee saved registers.
  1275. * push rsi |
  1276. * ... |
  1277. * push rbx ------/
  1278. * sub rsp, ArgumentsBacking
  1279. */
  1280. uint savedRegSize = 0;
  1281. IR::Instr *firstPrologInstr = nullptr;
  1282. IR::Instr *lastPrologInstr = nullptr;
  1283. // PUSH used callee-saved registers.
  1284. IR::Instr *secondInstr = entryInstr->m_next;
  1285. AssertMsg(secondInstr, "Instruction chain broken.");
  1286. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  1287. unsigned xmmOffset = 0;
  1288. // PDATA doesn't seem to like two consecutive "SUB RSP, size" instructions. Temporarily save and
  1289. // restore RBX always so that the pattern doesn't occur in the prolog.
  1290. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg + 1))
  1291. {
  1292. if (LinearScan::IsCalleeSaved(reg) && (this->m_func->HasTry() || this->m_func->m_regsUsed.Test(reg)))
  1293. {
  1294. IRType type = RegTypes[reg];
  1295. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  1296. if (type == TyFloat64)
  1297. {
  1298. IR::Instr *saveInstr = IR::Instr::New(Js::OpCode::MOVAPS,
  1299. IR::IndirOpnd::New(stackPointer,
  1300. xmmOffset,
  1301. type,
  1302. this->m_func),
  1303. regOpnd,
  1304. this->m_func);
  1305. xmmOffset += (MachDouble * 2);
  1306. entryInstr->InsertAfter(saveInstr);
  1307. m_func->m_prologEncoder.RecordXmmRegSave();
  1308. }
  1309. else
  1310. {
  1311. Assert(type == TyInt64);
  1312. IR::Instr *pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1313. pushInstr->SetSrc1(regOpnd);
  1314. entryInstr->InsertAfter(pushInstr);
  1315. m_func->m_prologEncoder.RecordNonVolRegSave();
  1316. savedRegSize += MachPtr;
  1317. }
  1318. }
  1319. }
  1320. //
  1321. // Now that we know the exact stack size, lets fix it for alignment
  1322. // The stack on entry would be aligned. VC++ recommends that the stack
  1323. // should always be 16 byte aligned.
  1324. //
  1325. uint32 argSlotsForFunctionsCalled = this->m_func->m_argSlotsForFunctionsCalled;
  1326. if (Lowerer::IsArgSaveRequired(this->m_func))
  1327. {
  1328. if (argSlotsForFunctionsCalled < IntArgRegsCount)
  1329. argSlotsForFunctionsCalled = IntArgRegsCount;
  1330. }
  1331. else
  1332. {
  1333. argSlotsForFunctionsCalled = 0;
  1334. }
  1335. uint32 stackArgsSize = MachPtr * (argSlotsForFunctionsCalled + 1);
  1336. this->m_func->m_localStackHeight = Math::Align<int32>(this->m_func->m_localStackHeight, 8);
  1337. // Allocate the inlined arg out stack in the locals. Allocate an additional slot so that
  1338. // we can unconditionally clear the first slot past the current frame.
  1339. this->m_func->m_localStackHeight += ((this->m_func->GetMaxInlineeArgOutCount() + 1) * MachPtr);
  1340. uint32 stackLocalsSize = this->m_func->m_localStackHeight;
  1341. if(xmmOffset != 0)
  1342. {
  1343. // Xmm registers need to be saved to 16-byte-aligned addresses. The stack locals size is aligned here and the total
  1344. // size will be aligned below, which guarantees that the offset from rsp will be 16-byte-aligned.
  1345. stackLocalsSize = ::Math::Align(stackLocalsSize + xmmOffset, static_cast<uint32>(MachDouble * 2));
  1346. }
  1347. uint32 totalStackSize = stackLocalsSize +
  1348. stackArgsSize +
  1349. savedRegSize;
  1350. AssertMsg(0 == (totalStackSize % 8), "Stack should always be 8 byte aligned");
  1351. uint32 alignmentPadding = (totalStackSize % 16) ? MachPtr : 0;
  1352. stackArgsSize += alignmentPadding;
  1353. Assert(
  1354. xmmOffset == 0 ||
  1355. ::Math::Align(stackArgsSize + savedRegSize, static_cast<uint32>(MachDouble * 2)) == stackArgsSize + savedRegSize);
  1356. totalStackSize += alignmentPadding;
  1357. if(totalStackSize > (1u << 20)) // 1 MB
  1358. {
  1359. // Total stack size is > 1 MB, let's just bail. There are things that need to be changed to allow using large stack
  1360. // sizes, for instance in the unwind info, the offset to saved xmm registers can be (1 MB - 16) at most for the op-code
  1361. // we're currently using (UWOP_SAVE_XMM128). To support larger offsets, we need to use a FAR version of the op-code.
  1362. throw Js::OperationAbortedException();
  1363. }
  1364. if (this->m_func->GetMaxInlineeArgOutCount())
  1365. {
  1366. this->m_func->GetJITOutput()->SetFrameHeight(this->m_func->m_localStackHeight);
  1367. }
  1368. //
  1369. // This is the last instruction so should have been emitted before, register saves.
  1370. // But we did not have 'savedRegSize' by then. So we saved secondInstr. We now insert w.r.t that
  1371. // instruction.
  1372. //
  1373. this->m_func->SetArgsSize(stackArgsSize);
  1374. this->m_func->SetSavedRegSize(savedRegSize);
  1375. this->m_func->SetSpillSize(stackLocalsSize);
  1376. if (secondInstr == entryInstr->m_next)
  1377. {
  1378. // There is no register save at all, just combine the stack allocation
  1379. uint combineStackAllocationSize = stackArgsSize + stackLocalsSize;
  1380. this->GenerateStackAllocation(secondInstr->m_prev, combineStackAllocationSize);
  1381. m_func->m_prologEncoder.RecordAlloca(combineStackAllocationSize);
  1382. }
  1383. else
  1384. {
  1385. this->GenerateStackAllocation(secondInstr->m_prev, stackArgsSize);
  1386. m_func->m_prologEncoder.RecordAlloca(stackArgsSize);
  1387. // Allocate frame.
  1388. if (stackLocalsSize)
  1389. {
  1390. this->GenerateStackAllocation(entryInstr, stackLocalsSize);
  1391. m_func->m_prologEncoder.RecordAlloca(stackLocalsSize);
  1392. }
  1393. }
  1394. lastPrologInstr = secondInstr->m_prev;
  1395. Assert(lastPrologInstr != entryInstr);
  1396. // Zero-initialize dedicated arguments slot.
  1397. IR::Instr *movRax0 = nullptr;
  1398. IR::Opnd *raxOpnd = nullptr;
  1399. if ((this->m_func->HasArgumentSlot() &&
  1400. (this->m_func->IsStackArgsEnabled() ||
  1401. this->m_func->IsJitInDebugMode() ||
  1402. // disabling apply inlining leads to explicit load from the zero-inited slot
  1403. this->m_func->GetJITFunctionBody()->IsInlineApplyDisabled()))
  1404. #ifdef BAILOUT_INJECTION
  1405. || Js::Configuration::Global.flags.IsEnabled(Js::BailOutFlag)
  1406. || Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryLineFlag)
  1407. || Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryByteCodeFlag)
  1408. || Js::Configuration::Global.flags.IsEnabled(Js::BailOutByteCodeFlag)
  1409. #endif
  1410. )
  1411. {
  1412. // TODO: Support mov [rbp - n], IMM64
  1413. raxOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyUint32, this->m_func);
  1414. movRax0 = IR::Instr::New(Js::OpCode::XOR, raxOpnd, raxOpnd, raxOpnd, this->m_func);
  1415. secondInstr->m_prev->InsertAfter(movRax0);
  1416. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  1417. IR::Instr *movNullInstr = IR::Instr::New(Js::OpCode::MOV, opnd, raxOpnd->UseWithNewType(TyMachReg, this->m_func), this->m_func);
  1418. secondInstr->m_prev->InsertAfter(movNullInstr);
  1419. }
  1420. // Zero initialize the first inlinee frames argc.
  1421. if (this->m_func->GetMaxInlineeArgOutCount())
  1422. {
  1423. if(!movRax0)
  1424. {
  1425. raxOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyUint32, this->m_func);
  1426. movRax0 = IR::Instr::New(Js::OpCode::XOR, raxOpnd, raxOpnd, raxOpnd, this->m_func);
  1427. secondInstr->m_prev->InsertAfter(movRax0);
  1428. }
  1429. StackSym *sym = this->m_func->m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1430. sym->m_isInlinedArgSlot = true;
  1431. sym->m_offset = 0;
  1432. IR::Opnd *dst = IR::SymOpnd::New(sym, 0, TyMachReg, this->m_func);
  1433. secondInstr->m_prev->InsertAfter(IR::Instr::New(Js::OpCode::MOV,
  1434. dst,
  1435. raxOpnd->UseWithNewType(TyMachReg, this->m_func),
  1436. this->m_func));
  1437. }
  1438. // Generate MOV RBP, RSP
  1439. IR::RegOpnd * rbpOpnd = IR::RegOpnd::New(nullptr, RegRBP, TyMachReg, this->m_func);
  1440. IR::RegOpnd * rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1441. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, rbpOpnd, rspOpnd, this->m_func);
  1442. entryInstr->InsertAfter(movInstr);
  1443. // Generate PUSH RBP
  1444. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1445. pushInstr->SetSrc1(rbpOpnd);
  1446. entryInstr->InsertAfter(pushInstr);
  1447. m_func->m_prologEncoder.RecordNonVolRegSave();
  1448. firstPrologInstr = pushInstr;
  1449. //
  1450. // Insert pragmas that tell the prolog encoder the extent of the prolog.
  1451. //
  1452. firstPrologInstr->InsertBefore(IR::PragmaInstr::New(Js::OpCode::PrologStart, 0, m_func));
  1453. lastPrologInstr->InsertAfter(IR::PragmaInstr::New(Js::OpCode::PrologEnd, 0, m_func));
  1454. #ifdef _WIN32 // home registers
  1455. //
  1456. // Now store all the arguments in the register in the stack slots
  1457. //
  1458. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody())
  1459. {
  1460. uint16 offset = 2;
  1461. this->MovArgFromReg2Stack(entryInstr, RegRCX, 1);
  1462. for (uint16 i = 0; i < m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetArgCount() && i < 3; i++)
  1463. {
  1464. switch (m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetArgType(i))
  1465. {
  1466. case Js::AsmJsVarType::Int:
  1467. this->MovArgFromReg2Stack(entryInstr, i == 0 ? RegRDX : i == 1 ? RegR8 : RegR9, offset, TyInt32);
  1468. offset++;
  1469. break;
  1470. case Js::AsmJsVarType::Int64:
  1471. this->MovArgFromReg2Stack(entryInstr, i == 0 ? RegRDX : i == 1 ? RegR8 : RegR9, offset, TyInt64);
  1472. offset++;
  1473. break;
  1474. case Js::AsmJsVarType::Float:
  1475. // registers we need are contiguous, so calculate it from XMM1
  1476. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TyFloat32);
  1477. offset++;
  1478. break;
  1479. case Js::AsmJsVarType::Double:
  1480. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TyFloat64);
  1481. offset++;
  1482. break;
  1483. case Js::AsmJsVarType::Float32x4:
  1484. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128F4);
  1485. offset += 2;
  1486. break;
  1487. case Js::AsmJsVarType::Int32x4:
  1488. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128I4);
  1489. offset += 2;
  1490. break;
  1491. case Js::AsmJsVarType::Int16x8:
  1492. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128I8);
  1493. offset += 2;
  1494. break;
  1495. case Js::AsmJsVarType::Int8x16:
  1496. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128I16);
  1497. offset += 2;
  1498. break;
  1499. case Js::AsmJsVarType::Uint32x4:
  1500. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128U4);
  1501. offset += 2;
  1502. break;
  1503. case Js::AsmJsVarType::Uint16x8:
  1504. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128U8);
  1505. offset += 2;
  1506. break;
  1507. case Js::AsmJsVarType::Uint8x16:
  1508. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128U16);
  1509. offset += 2;
  1510. break;
  1511. case Js::AsmJsVarType::Bool32x4:
  1512. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128B4);
  1513. offset += 2;
  1514. break;
  1515. case Js::AsmJsVarType::Bool16x8:
  1516. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128B8);
  1517. offset += 2;
  1518. break;
  1519. case Js::AsmJsVarType::Bool8x16:
  1520. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128B16);
  1521. offset += 2;
  1522. break;
  1523. case Js::AsmJsVarType::Float64x2:
  1524. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128D2);
  1525. offset += 2;
  1526. break;
  1527. default:
  1528. Assume(UNREACHED);
  1529. }
  1530. }
  1531. }
  1532. else if (argSlotsForFunctionsCalled)
  1533. {
  1534. this->MovArgFromReg2Stack(entryInstr, RegRCX, 1);
  1535. this->MovArgFromReg2Stack(entryInstr, RegRDX, 2);
  1536. this->MovArgFromReg2Stack(entryInstr, RegR8, 3);
  1537. this->MovArgFromReg2Stack(entryInstr, RegR9, 4);
  1538. }
  1539. #endif // _WIN32
  1540. IntConstType frameSize = Js::Constants::MinStackJIT + stackArgsSize + stackLocalsSize + savedRegSize;
  1541. this->GeneratePrologueStackProbe(entryInstr, frameSize);
  1542. return entryInstr;
  1543. }
  1544. void
  1545. LowererMDArch::GeneratePrologueStackProbe(IR::Instr *entryInstr, IntConstType frameSize)
  1546. {
  1547. //
  1548. // Generate a stack overflow check. Since ProbeCurrentStack throws an exception it needs
  1549. // an unwindable stack. Should we need to call ProbeCurrentStack, instead of creating a new frame here,
  1550. // we make it appear like our caller directly called ProbeCurrentStack.
  1551. //
  1552. // For thread-bound thread context
  1553. // MOV rax, ThreadContext::scriptStackLimit + frameSize
  1554. // CMP rsp, rax
  1555. // JG $done
  1556. // MOV rax, ThreadContext::ProbeCurrentStack
  1557. // MOV rcx, frameSize
  1558. // MOV rdx, scriptContext
  1559. // JMP rax
  1560. // $done:
  1561. //
  1562. // For thread-agile thread context
  1563. // MOV rax, [ThreadContext::scriptStackLimit]
  1564. // ADD rax, frameSize
  1565. // CMP rsp, rax
  1566. // JG $done
  1567. // MOV rax, ThreadContext::ProbeCurrentStack
  1568. // MOV rcx, frameSize
  1569. // MOV rdx, scriptContext
  1570. // JMP rax
  1571. // $done:
  1572. //
  1573. // For thread context with script interrupt enabled
  1574. // MOV rax, [ThreadContext::scriptStackLimit]
  1575. // ADD rax, frameSize
  1576. // JO $helper
  1577. // CMP rsp, rax
  1578. // JG $done
  1579. // $helper:
  1580. // MOV rax, ThreadContext::ProbeCurrentStack
  1581. // MOV rcx, frameSize
  1582. // MOV rdx, scriptContext
  1583. // JMP rax
  1584. // $done:
  1585. //
  1586. // Do not insert stack probe for leaf functions which have low stack footprint
  1587. if (this->m_func->IsTrueLeaf() &&
  1588. frameSize - Js::Constants::MinStackJIT < Js::Constants::MaxStackSizeForNoProbe)
  1589. {
  1590. return;
  1591. }
  1592. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  1593. IR::Instr *insertInstr = entryInstr->m_next;
  1594. IR::Instr *instr;
  1595. IR::Opnd *stackLimitOpnd;
  1596. bool doInterruptProbe = m_func->GetJITFunctionBody()->DoInterruptProbe();
  1597. // MOV rax, ThreadContext::scriptStackLimit + frameSize
  1598. stackLimitOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, this->m_func);
  1599. if (doInterruptProbe || !m_func->GetThreadContextInfo()->IsThreadBound())
  1600. {
  1601. // Load the current stack limit from the ThreadContext and add the current frame size.
  1602. {
  1603. intptr_t pLimit = m_func->GetThreadContextInfo()->GetThreadStackLimitAddr();
  1604. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, this->m_func);
  1605. this->lowererMD->CreateAssign(baseOpnd, IR::AddrOpnd::New(pLimit, IR::AddrOpndKindDynamicMisc, this->m_func), insertInstr);
  1606. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, 0, TyMachReg, this->m_func);
  1607. this->lowererMD->CreateAssign(stackLimitOpnd, indirOpnd, insertInstr);
  1608. }
  1609. instr = IR::Instr::New(Js::OpCode::ADD, stackLimitOpnd, stackLimitOpnd,
  1610. IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func), this->m_func);
  1611. insertInstr->InsertBefore(instr);
  1612. if (doInterruptProbe)
  1613. {
  1614. // If the add overflows, call the probe helper.
  1615. instr = IR::BranchInstr::New(Js::OpCode::JO, helperLabel, this->m_func);
  1616. insertInstr->InsertBefore(instr);
  1617. }
  1618. }
  1619. else
  1620. {
  1621. // TODO: michhol, check this math
  1622. size_t scriptStackLimit = m_func->GetThreadContextInfo()->GetScriptStackLimit();
  1623. this->lowererMD->CreateAssign(stackLimitOpnd, IR::IntConstOpnd::New((frameSize + scriptStackLimit), TyMachReg, this->m_func), insertInstr);
  1624. }
  1625. // CMP rsp, rax
  1626. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  1627. instr->SetSrc1(IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, m_func));
  1628. instr->SetSrc2(stackLimitOpnd);
  1629. insertInstr->InsertBefore(instr);
  1630. IR::LabelInstr * doneLabel = nullptr;
  1631. if (!PHASE_OFF(Js::LayoutPhase, this->m_func))
  1632. {
  1633. // JLE $helper
  1634. instr = IR::BranchInstr::New(Js::OpCode::JLE, helperLabel, m_func);
  1635. insertInstr->InsertBefore(instr);
  1636. Security::InsertRandomFunctionPad(insertInstr);
  1637. // This is generated after layout. Generate the block at the end of the function manually
  1638. insertInstr = IR::PragmaInstr::New(Js::OpCode::StatementBoundary, Js::Constants::NoStatementIndex, m_func);
  1639. this->m_func->m_tailInstr->InsertAfter(insertInstr);
  1640. this->m_func->m_tailInstr = insertInstr;
  1641. }
  1642. else
  1643. {
  1644. doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  1645. // JGT $done
  1646. instr = IR::BranchInstr::New(Js::OpCode::JGT, doneLabel, m_func);
  1647. insertInstr->InsertBefore(instr);
  1648. }
  1649. insertInstr->InsertBefore(helperLabel);
  1650. IR::RegOpnd *target;
  1651. {
  1652. // MOV RegArg1, scriptContext
  1653. this->lowererMD->CreateAssign(
  1654. IR::RegOpnd::New(nullptr, RegArg1, TyMachReg, m_func),
  1655. this->lowererMD->m_lowerer->LoadScriptContextOpnd(insertInstr), insertInstr);
  1656. // MOV RegArg0, frameSize
  1657. this->lowererMD->CreateAssign(
  1658. IR::RegOpnd::New(nullptr, RegArg0, TyMachReg, this->m_func),
  1659. IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func), insertInstr);
  1660. // MOV rax, ThreadContext::ProbeCurrentStack
  1661. target = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, m_func);
  1662. this->lowererMD->CreateAssign(target, IR::HelperCallOpnd::New(IR::HelperProbeCurrentStack, m_func), insertInstr);
  1663. }
  1664. // JMP rax
  1665. instr = IR::MultiBranchInstr::New(Js::OpCode::JMP, target, m_func);
  1666. insertInstr->InsertBefore(instr);
  1667. if (doneLabel)
  1668. {
  1669. // $done:
  1670. insertInstr->InsertBefore(doneLabel);
  1671. Security::InsertRandomFunctionPad(doneLabel);
  1672. }
  1673. }
  1674. ///----------------------------------------------------------------------------
  1675. ///
  1676. /// LowererMDArch::LowerExitInstr
  1677. ///
  1678. /// Emit epilog.
  1679. ///
  1680. ///----------------------------------------------------------------------------
  1681. IR::Instr *
  1682. LowererMDArch::LowerExitInstr(IR::ExitInstr * exitInstr)
  1683. {
  1684. uint32 savedRegSize = 0;
  1685. // POP used callee-saved registers
  1686. IR::Instr * exitPrevInstr = exitInstr->m_prev;
  1687. AssertMsg(exitPrevInstr, "Can a function have only 1 instr ? Or is the instr chain broken");
  1688. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  1689. unsigned xmmOffset = 0;
  1690. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg+1))
  1691. {
  1692. if (LinearScan::IsCalleeSaved(reg) && (this->m_func->HasTry() || this->m_func->m_regsUsed.Test(reg)))
  1693. {
  1694. IRType type = RegTypes[reg];
  1695. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  1696. if (type == TyFloat64)
  1697. {
  1698. IR::Instr *restoreInstr = IR::Instr::New(Js::OpCode::MOVAPS,
  1699. regOpnd,
  1700. IR::IndirOpnd::New(stackPointer,
  1701. xmmOffset,
  1702. type,
  1703. this->m_func),
  1704. this->m_func);
  1705. xmmOffset += (MachDouble * 2);
  1706. exitInstr->InsertBefore(restoreInstr);
  1707. }
  1708. else
  1709. {
  1710. Assert(type == TyInt64);
  1711. IR::Instr *popInstr = IR::Instr::New(Js::OpCode::POP, regOpnd, this->m_func);
  1712. exitInstr->InsertBefore(popInstr);
  1713. savedRegSize += MachPtr;
  1714. }
  1715. }
  1716. }
  1717. Assert(savedRegSize == (uint)this->m_func->GetSavedRegSize());
  1718. // Generate ADD RSP, argsStackSize before the register restore (if there are any)
  1719. uint32 stackArgsSize = this->m_func->GetArgsSize();
  1720. Assert(stackArgsSize);
  1721. if (savedRegSize || xmmOffset)
  1722. {
  1723. IR::IntConstOpnd *stackSizeOpnd = IR::IntConstOpnd::New(stackArgsSize, TyMachReg, this->m_func);
  1724. IR::Instr *addInstr = IR::Instr::New(Js::OpCode::ADD, stackPointer, stackPointer, stackSizeOpnd, this->m_func);
  1725. exitPrevInstr->InsertAfter(addInstr);
  1726. }
  1727. //
  1728. // useful register operands
  1729. //
  1730. IR::RegOpnd * rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1731. IR::RegOpnd * rbpOpnd = IR::RegOpnd::New(nullptr, RegRBP, TyMachReg, this->m_func);
  1732. // Restore frame
  1733. // Generate MOV RSP, RBP
  1734. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, rspOpnd, rbpOpnd, this->m_func);
  1735. exitInstr->InsertBefore(movInstr);
  1736. // Generate POP RBP
  1737. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::POP, rbpOpnd, this->m_func);
  1738. exitInstr->InsertBefore(pushInstr);
  1739. // Insert RET
  1740. IR::IntConstOpnd * intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  1741. IR::RegOpnd *retReg = nullptr;
  1742. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody())
  1743. {
  1744. switch (m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetRetType())
  1745. {
  1746. case Js::AsmJsRetType::Double:
  1747. case Js::AsmJsRetType::Float:
  1748. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TyMachDouble), TyMachDouble, this->m_func);
  1749. break;
  1750. case Js::AsmJsRetType::Int32x4:
  1751. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128I4), TySimd128I4, this->m_func);
  1752. break;
  1753. case Js::AsmJsRetType::Int16x8:
  1754. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128I8), TySimd128I8, this->m_func);
  1755. break;
  1756. case Js::AsmJsRetType::Int8x16:
  1757. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128I16), TySimd128U16, this->m_func);
  1758. break;
  1759. case Js::AsmJsRetType::Uint32x4:
  1760. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128U4), TySimd128U4, this->m_func);
  1761. break;
  1762. case Js::AsmJsRetType::Uint16x8:
  1763. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128U8), TySimd128U8, this->m_func);
  1764. break;
  1765. case Js::AsmJsRetType::Uint8x16:
  1766. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128U16), TySimd128U16, this->m_func);
  1767. break;
  1768. case Js::AsmJsRetType::Bool32x4:
  1769. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128B4), TySimd128B4, this->m_func);
  1770. break;
  1771. case Js::AsmJsRetType::Bool16x8:
  1772. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128B8), TySimd128B8, this->m_func);
  1773. break;
  1774. case Js::AsmJsRetType::Bool8x16:
  1775. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128B16), TySimd128B16, this->m_func);
  1776. break;
  1777. case Js::AsmJsRetType::Float32x4:
  1778. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128F4), TySimd128F4, this->m_func);
  1779. break;
  1780. case Js::AsmJsRetType::Float64x2:
  1781. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128D2), TySimd128D2, this->m_func);
  1782. break;
  1783. case Js::AsmJsRetType::Int64:
  1784. case Js::AsmJsRetType::Signed:
  1785. case Js::AsmJsRetType::Void:
  1786. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  1787. break;
  1788. default:
  1789. Assume(UNREACHED);
  1790. }
  1791. }
  1792. else
  1793. {
  1794. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  1795. }
  1796. // Generate RET
  1797. IR::Instr * retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  1798. retInstr->SetSrc1(intSrc);
  1799. retInstr->SetSrc2(retReg);
  1800. exitInstr->InsertBefore(retInstr);
  1801. retInstr->m_opcode = Js::OpCode::RET;
  1802. return exitInstr;
  1803. }
  1804. IR::Instr *
  1805. LowererMDArch::LowerEntryInstrAsmJs(IR::EntryInstr * entryInstr)
  1806. {
  1807. // prologue is almost identical on x64, except for loading args
  1808. return LowerEntryInstr(entryInstr);
  1809. }
  1810. IR::Instr *
  1811. LowererMDArch::LowerExitInstrAsmJs(IR::ExitInstr * exitInstr)
  1812. {
  1813. // epilogue is almost identical on x64, except for return register
  1814. return LowerExitInstr(exitInstr);
  1815. }
  1816. IR::Instr *
  1817. LowererMDArch::LowerInt64Assign(IR::Instr * instr)
  1818. {
  1819. this->lowererMD->ChangeToAssign(instr);
  1820. return instr;
  1821. }
  1822. void
  1823. LowererMDArch::EmitInt4Instr(IR::Instr *instr, bool signExtend /* = false */)
  1824. {
  1825. IR::Opnd *dst = instr->GetDst();
  1826. IR::Opnd *src1 = instr->GetSrc1();
  1827. IR::Opnd *src2 = instr->GetSrc2();
  1828. IR::Instr *newInstr = nullptr;
  1829. IR::RegOpnd *regEDX;
  1830. bool legalize = false;
  1831. bool isInt64Instr = instr->AreAllOpndInt64();
  1832. if (!isInt64Instr)
  1833. {
  1834. if (dst && !dst->IsUInt32())
  1835. {
  1836. dst->SetType(TyInt32);
  1837. }
  1838. if (!src1->IsUInt32())
  1839. {
  1840. src1->SetType(TyInt32);
  1841. }
  1842. if (src2 && !src2->IsUInt32())
  1843. {
  1844. src2->SetType(TyInt32);
  1845. }
  1846. }
  1847. else
  1848. {
  1849. legalize = true;
  1850. }
  1851. switch (instr->m_opcode)
  1852. {
  1853. case Js::OpCode::Neg_I4:
  1854. instr->m_opcode = Js::OpCode::NEG;
  1855. break;
  1856. case Js::OpCode::Not_I4:
  1857. instr->m_opcode = Js::OpCode::NOT;
  1858. break;
  1859. case Js::OpCode::Add_I4:
  1860. LowererMD::ChangeToAdd(instr, false /* needFlags */);
  1861. legalize = true;
  1862. break;
  1863. case Js::OpCode::Sub_I4:
  1864. LowererMD::ChangeToSub(instr, false /* needFlags */);
  1865. legalize = true;
  1866. break;
  1867. case Js::OpCode::Mul_I4:
  1868. instr->m_opcode = Js::OpCode::IMUL2;
  1869. legalize = true;
  1870. break;
  1871. case Js::OpCode::DivU_I4:
  1872. case Js::OpCode::Div_I4:
  1873. instr->SinkDst(Js::OpCode::MOV, RegRAX);
  1874. goto idiv_common;
  1875. case Js::OpCode::RemU_I4:
  1876. case Js::OpCode::Rem_I4:
  1877. instr->SinkDst(Js::OpCode::MOV, RegRDX);
  1878. idiv_common:
  1879. {
  1880. bool isUnsigned = instr->GetSrc1()->IsUnsigned();
  1881. if (isUnsigned)
  1882. {
  1883. Assert(instr->GetSrc2()->IsUnsigned());
  1884. Assert(instr->m_opcode == Js::OpCode::RemU_I4 || instr->m_opcode == Js::OpCode::DivU_I4);
  1885. instr->m_opcode = Js::OpCode::DIV;
  1886. }
  1887. else
  1888. {
  1889. instr->m_opcode = Js::OpCode::IDIV;
  1890. }
  1891. instr->HoistSrc1(Js::OpCode::MOV, RegRAX);
  1892. regEDX = IR::RegOpnd::New(src1->GetType(), instr->m_func);
  1893. regEDX->SetReg(RegRDX);
  1894. if (isUnsigned)
  1895. {
  1896. // we need to ensure that register allocator doesn't muck about with rdx
  1897. instr->HoistSrc2(Js::OpCode::MOV, RegRCX);
  1898. newInstr = IR::Instr::New(Js::OpCode::Ld_I4, regEDX, IR::IntConstOpnd::New(0, src1->GetType(), instr->m_func), instr->m_func);
  1899. instr->InsertBefore(newInstr);
  1900. LowererMD::ChangeToAssign(newInstr);
  1901. // NOP ensures that the EDX = Ld_I4 0 doesn't get deadstored, will be removed in peeps
  1902. instr->InsertBefore(IR::Instr::New(Js::OpCode::NOP, regEDX, regEDX, instr->m_func));
  1903. }
  1904. else
  1905. {
  1906. if (instr->GetSrc2()->IsImmediateOpnd())
  1907. {
  1908. instr->HoistSrc2(Js::OpCode::MOV);
  1909. }
  1910. instr->InsertBefore(IR::Instr::New(isInt64Instr ? Js::OpCode::CQO : Js::OpCode::CDQ, regEDX, instr->m_func));
  1911. }
  1912. return;
  1913. }
  1914. case Js::OpCode::Or_I4:
  1915. instr->m_opcode = Js::OpCode::OR;
  1916. break;
  1917. case Js::OpCode::Xor_I4:
  1918. instr->m_opcode = Js::OpCode::XOR;
  1919. break;
  1920. case Js::OpCode::And_I4:
  1921. instr->m_opcode = Js::OpCode::AND;
  1922. break;
  1923. case Js::OpCode::Shl_I4:
  1924. case Js::OpCode::ShrU_I4:
  1925. case Js::OpCode::Shr_I4:
  1926. case Js::OpCode::Rol_I4:
  1927. case Js::OpCode::Ror_I4:
  1928. LowererMD::ChangeToShift(instr, false /* needFlags */);
  1929. legalize = true;
  1930. break;
  1931. case Js::OpCode::BrTrue_I4:
  1932. instr->m_opcode = Js::OpCode::JNE;
  1933. goto br1_Common;
  1934. case Js::OpCode::BrFalse_I4:
  1935. instr->m_opcode = Js::OpCode::JEQ;
  1936. br1_Common:
  1937. src1 = instr->UnlinkSrc1();
  1938. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  1939. instr->InsertBefore(newInstr);
  1940. newInstr->SetSrc1(src1);
  1941. newInstr->SetSrc2(src1);
  1942. return;
  1943. case Js::OpCode::BrEq_I4:
  1944. instr->m_opcode = Js::OpCode::JEQ;
  1945. goto br2_Common;
  1946. case Js::OpCode::BrNeq_I4:
  1947. instr->m_opcode = Js::OpCode::JNE;
  1948. goto br2_Common;
  1949. case Js::OpCode::BrUnGt_I4:
  1950. instr->m_opcode = Js::OpCode::JA;
  1951. goto br2_Common;
  1952. case Js::OpCode::BrUnGe_I4:
  1953. instr->m_opcode = Js::OpCode::JAE;
  1954. goto br2_Common;
  1955. case Js::OpCode::BrUnLe_I4:
  1956. instr->m_opcode = Js::OpCode::JBE;
  1957. goto br2_Common;
  1958. case Js::OpCode::BrUnLt_I4:
  1959. instr->m_opcode = Js::OpCode::JB;
  1960. goto br2_Common;
  1961. case Js::OpCode::BrGt_I4:
  1962. instr->m_opcode = Js::OpCode::JGT;
  1963. goto br2_Common;
  1964. case Js::OpCode::BrGe_I4:
  1965. instr->m_opcode = Js::OpCode::JGE;
  1966. goto br2_Common;
  1967. case Js::OpCode::BrLe_I4:
  1968. instr->m_opcode = Js::OpCode::JLE;
  1969. goto br2_Common;
  1970. case Js::OpCode::BrLt_I4:
  1971. instr->m_opcode = Js::OpCode::JLT;
  1972. br2_Common:
  1973. src1 = instr->UnlinkSrc1();
  1974. src2 = instr->UnlinkSrc2();
  1975. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  1976. instr->InsertBefore(newInstr);
  1977. newInstr->SetSrc1(src1);
  1978. newInstr->SetSrc2(src2);
  1979. return;
  1980. default:
  1981. AssertMsg(UNREACHED, "Un-implemented int4 opcode");
  1982. }
  1983. if (signExtend)
  1984. {
  1985. Assert(instr->GetDst());
  1986. IR::Opnd *dst64 = instr->GetDst()->Copy(instr->m_func);
  1987. dst64->SetType(TyMachReg);
  1988. instr->InsertAfter(IR::Instr::New(Js::OpCode::MOVSXD, dst64, instr->GetDst(), instr->m_func));
  1989. }
  1990. if(legalize)
  1991. {
  1992. LowererMD::Legalize(instr);
  1993. }
  1994. else
  1995. {
  1996. // OpEq's
  1997. LowererMD::MakeDstEquSrc1(instr);
  1998. }
  1999. }
  2000. #if !FLOATVAR
  2001. void
  2002. LowererMDArch::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  2003. {
  2004. // e1 = MOV e_src1
  2005. // e1 = SHL e1, Js::VarTag_Shift
  2006. // JO $ToVar
  2007. // JB $ToVar [isFromUint32]
  2008. // e1 = INC e1
  2009. // r_dst = MOVSXD e1
  2010. // JMP $done
  2011. // $ToVar:
  2012. // EmitLoadVarNoCheck
  2013. // $Done:
  2014. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  2015. Assert(instrLoad->GetDst()->GetType() == TyVar);
  2016. bool isInt = false;
  2017. bool isNotInt = false;
  2018. IR::Opnd *dst = instrLoad->GetDst();
  2019. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2020. IR::LabelInstr *toVar = nullptr;
  2021. IR::LabelInstr *done = nullptr;
  2022. // TODO: Fix bad lowering. We shouldn't get TyVars here.
  2023. // Assert(instrLoad->GetSrc1()->GetType() == TyInt32);
  2024. src1->SetType(TyInt32);
  2025. if (src1->IsTaggedInt())
  2026. {
  2027. isInt = true;
  2028. }
  2029. else if (src1->IsNotInt())
  2030. {
  2031. isNotInt = true;
  2032. }
  2033. if (!isNotInt)
  2034. {
  2035. // e1 = MOV e_src1
  2036. IR::RegOpnd *e1 = IR::RegOpnd::New(TyInt32, m_func);
  2037. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV, e1, instrLoad->GetSrc1(), m_func));
  2038. // e1 = SHL e1, Js::VarTag_Shift
  2039. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::SHL,
  2040. e1,
  2041. e1,
  2042. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, m_func), m_func));
  2043. if (!isInt)
  2044. {
  2045. // JO $ToVar
  2046. toVar = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2047. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, toVar, m_func));
  2048. if (isFromUint32)
  2049. {
  2050. // JB $ToVar [isFromUint32]
  2051. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JB, toVar, this->m_func));
  2052. }
  2053. }
  2054. // e1 = INC e1
  2055. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::INC, e1, e1, m_func));
  2056. // dst = MOVSXD e1
  2057. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOVSXD, dst, e1, m_func));
  2058. if (!isInt)
  2059. {
  2060. // JMP $done
  2061. done = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  2062. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2063. }
  2064. }
  2065. IR::Instr *insertInstr = instrLoad;
  2066. if (!isInt)
  2067. {
  2068. // $toVar:
  2069. if (toVar)
  2070. {
  2071. instrLoad->InsertBefore(toVar);
  2072. }
  2073. // ToVar()
  2074. this->lowererMD->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper || toVar != nullptr);
  2075. }
  2076. if (done)
  2077. {
  2078. instrLoad->InsertAfter(done);
  2079. }
  2080. instrLoad->Remove();
  2081. }
  2082. #else
  2083. void
  2084. LowererMDArch::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  2085. {
  2086. // MOV_TRUNC e1, e_src1
  2087. // CMP e1, 0 [uint32]
  2088. // JLT $Helper [uint32] -- overflows?
  2089. // BTS r1, VarTag_Shift
  2090. // MOV r_dst, r1
  2091. // JMP $done [uint32]
  2092. // $helper [uint32]
  2093. // EmitLoadVarNoCheck
  2094. // $done [uint32]
  2095. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  2096. Assert(instrLoad->GetDst()->GetType() == TyVar);
  2097. bool isInt = false;
  2098. IR::Opnd *dst = instrLoad->GetDst();
  2099. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2100. IR::LabelInstr *labelHelper = nullptr;
  2101. // TODO: Fix bad lowering. We shouldn't get TyVars here.
  2102. // Assert(instrLoad->GetSrc1()->GetType() == TyInt32);
  2103. src1->SetType(TyInt32);
  2104. if (src1->IsTaggedInt())
  2105. {
  2106. isInt = true;
  2107. }
  2108. else if (src1->IsNotInt())
  2109. {
  2110. // ToVar()
  2111. this->lowererMD->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper);
  2112. return;
  2113. }
  2114. IR::RegOpnd *r1 = IR::RegOpnd::New(TyVar, m_func);
  2115. // e1 = MOV_TRUNC e_src1
  2116. // (Use MOV_TRUNC here as we rely on the register copy to clear the upper 32 bits.)
  2117. IR::RegOpnd *e1 = r1->Copy(m_func)->AsRegOpnd();
  2118. e1->SetType(TyInt32);
  2119. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC,
  2120. e1,
  2121. src1,
  2122. m_func));
  2123. if (!isInt && isFromUint32)
  2124. {
  2125. // CMP e1, 0
  2126. IR::Instr *instr = IR::Instr::New(Js::OpCode::CMP, m_func);
  2127. instr->SetSrc1(e1);
  2128. instr->SetSrc2(IR::IntConstOpnd::New(0, TyInt32, m_func));
  2129. instrLoad->InsertBefore(instr);
  2130. Assert(!labelHelper);
  2131. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2132. // JLT $helper
  2133. instr = IR::BranchInstr::New(Js::OpCode::JLT, labelHelper, m_func);
  2134. instrLoad->InsertBefore(instr);
  2135. }
  2136. // The previous operation clears the top 32 bits.
  2137. // BTS r1, VarTag_Shift
  2138. this->lowererMD->GenerateInt32ToVarConversion(r1, instrLoad);
  2139. // REVIEW: We need r1 only if we could generate sn = Ld_A_I4 sn. i.e. the destination and
  2140. // source are the same.
  2141. // r_dst = MOV r1
  2142. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV,
  2143. dst,
  2144. r1,
  2145. m_func));
  2146. if (labelHelper)
  2147. {
  2148. Assert(isFromUint32);
  2149. // JMP $done
  2150. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  2151. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, labelDone, m_func));
  2152. // $helper
  2153. instrLoad->InsertBefore(labelHelper);
  2154. // ToVar()
  2155. this->lowererMD->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, true);
  2156. // $done
  2157. instrLoad->InsertBefore(labelDone);
  2158. }
  2159. instrLoad->Remove();
  2160. }
  2161. #endif
  2162. void
  2163. LowererMDArch::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2164. {
  2165. Assert(dst->IsRegOpnd() && dst->IsFloat());
  2166. Assert(src->IsRegOpnd() && src->IsInt32());
  2167. if (dst->IsFloat64())
  2168. {
  2169. // Use MOVD to make sure we sign extended the 32-bit src
  2170. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOVD, dst, src, this->m_func));
  2171. // Convert to float
  2172. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTDQ2PD, dst, dst, this->m_func));
  2173. }
  2174. else
  2175. {
  2176. Assert(dst->IsFloat32());
  2177. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTSI2SS, dst, src, this->m_func));
  2178. }
  2179. }
  2180. void
  2181. LowererMDArch::EmitIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2182. {
  2183. Assert(dst->IsRegOpnd() && dst->IsInt64());
  2184. Assert(src->IsInt32());
  2185. Lowerer::InsertMove(dst, src, instrInsert);
  2186. }
  2187. void
  2188. LowererMDArch::EmitUIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2189. {
  2190. Assert(dst->IsRegOpnd() && dst->IsInt64());
  2191. Assert(src->IsUInt32());
  2192. Lowerer::InsertMove(dst, src, instrInsert);
  2193. }
  2194. void
  2195. LowererMDArch::EmitLongToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2196. {
  2197. Assert(dst->IsRegOpnd() && dst->IsInt32());
  2198. Assert(src->IsInt64());
  2199. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC, dst, src, instrInsert->m_func));
  2200. }
  2201. void
  2202. LowererMDArch::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2203. {
  2204. Assert(dst->IsRegOpnd() && dst->IsFloat());
  2205. Assert(src->IsRegOpnd() && (src->IsInt32() || src->IsUInt32()));
  2206. // MOV tempReg.i32, src - make sure the top bits are 0
  2207. IR::RegOpnd * tempReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2208. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC, tempReg, src, this->m_func));
  2209. // CVTSI2SD dst, tempReg.i64 (Use the tempreg as if it is 64 bit without sign extension)
  2210. instrInsert->InsertBefore(IR::Instr::New(dst->IsFloat64() ? Js::OpCode::CVTSI2SD : Js::OpCode::CVTSI2SS, dst,
  2211. tempReg->UseWithNewType(TyInt64, this->m_func), this->m_func));
  2212. }
  2213. bool
  2214. LowererMDArch::EmitLoadInt32(IR::Instr *instrLoad, bool conversionFromObjectAllowed, bool bailOutOnHelper, IR::LabelInstr * labelBailOut)
  2215. {
  2216. //
  2217. // r1 = MOV src1
  2218. // rtest = MOV src1
  2219. // SHR rtest, AtomTag_Shift
  2220. // CMP rtest, 1
  2221. // JNE $helper or $float
  2222. // r_dst = MOV_TRUNC e_src1
  2223. // JMP $done
  2224. // $float:
  2225. // dst = ConvertToFloat(r1, $helper)
  2226. // $helper:
  2227. // r_dst = ToInt32()
  2228. //
  2229. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  2230. Assert(instrLoad->GetSrc1()->GetType() == TyVar);
  2231. // TODO: Fix bad lowering. We shouldn't see TyVars here.
  2232. // Assert(instrLoad->GetDst()->GetType() == TyInt32);
  2233. bool isInt = false;
  2234. bool isNotInt = false;
  2235. IR::Opnd *dst = instrLoad->GetDst();
  2236. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2237. IR::LabelInstr *helper = nullptr;
  2238. IR::LabelInstr *labelFloat = nullptr;
  2239. IR::LabelInstr *done = nullptr;
  2240. if (src1->IsTaggedInt())
  2241. {
  2242. isInt = true;
  2243. }
  2244. else if (src1->IsNotInt())
  2245. {
  2246. isNotInt = true;
  2247. }
  2248. if (src1->IsEqual(instrLoad->GetDst()) == false)
  2249. {
  2250. // r1 = MOV src1
  2251. IR::RegOpnd *r1 = IR::RegOpnd::New(TyVar, instrLoad->m_func);
  2252. r1->SetValueType(src1->GetValueType());
  2253. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r1, src1, instrLoad->m_func));
  2254. src1 = r1;
  2255. }
  2256. const ValueType src1ValueType(src1->GetValueType());
  2257. const bool doFloatToIntFastPath =
  2258. (src1ValueType.IsLikelyFloat() || src1ValueType.IsLikelyUntaggedInt()) &&
  2259. !(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger));
  2260. if (isNotInt)
  2261. {
  2262. // Known to be non-integer. If we are required to bail out on helper call, just re-jit.
  2263. if (!doFloatToIntFastPath && bailOutOnHelper)
  2264. {
  2265. if(!GlobOpt::DoEliminateArrayAccessHelperCall(this->m_func))
  2266. {
  2267. // Array access helper call removal is already off for some reason. Prevent trying to rejit again
  2268. // because it won't help and the same thing will happen again. Just abort jitting this function.
  2269. if(PHASE_TRACE(Js::BailOutPhase, this->m_func))
  2270. {
  2271. Output::Print(_u(" Aborting JIT because EliminateArrayAccessHelperCall is already off\n"));
  2272. Output::Flush();
  2273. }
  2274. throw Js::OperationAbortedException();
  2275. }
  2276. throw Js::RejitException(RejitReason::ArrayAccessHelperCallEliminationDisabled);
  2277. }
  2278. }
  2279. else
  2280. {
  2281. // It could be an integer in this case.
  2282. if (!isInt)
  2283. {
  2284. if(doFloatToIntFastPath)
  2285. {
  2286. labelFloat = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, false);
  2287. }
  2288. else
  2289. {
  2290. helper = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, true);
  2291. }
  2292. this->lowererMD->GenerateSmIntTest(src1, instrLoad, labelFloat ? labelFloat : helper);
  2293. }
  2294. IR::RegOpnd *src132 = src1->UseWithNewType(TyInt32, instrLoad->m_func)->AsRegOpnd();
  2295. #if !INT32VAR
  2296. // src1 = SAR src1, VarTag_Shift
  2297. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::SAR,
  2298. src132,
  2299. src132,
  2300. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrLoad->m_func),
  2301. instrLoad->m_func));
  2302. // r_dst = MOV src1
  2303. // This is only a MOV (and not a MOVSXD) because we do a signed shift right, but we'll copy
  2304. // all 64 bits.
  2305. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV,
  2306. dst->UseWithNewType(TyMachReg, instrLoad->m_func),
  2307. src1,
  2308. instrLoad->m_func));
  2309. #else
  2310. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC,
  2311. dst->UseWithNewType(TyInt32, instrLoad->m_func),
  2312. src132,
  2313. instrLoad->m_func));
  2314. #endif
  2315. if (!isInt)
  2316. {
  2317. // JMP $done
  2318. done = instrLoad->GetOrCreateContinueLabel();
  2319. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2320. }
  2321. }
  2322. if (!isInt)
  2323. {
  2324. if(doFloatToIntFastPath)
  2325. {
  2326. if(labelFloat)
  2327. {
  2328. instrLoad->InsertBefore(labelFloat);
  2329. }
  2330. if(!helper)
  2331. {
  2332. helper = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, true);
  2333. }
  2334. if(!done)
  2335. {
  2336. done = instrLoad->GetOrCreateContinueLabel();
  2337. }
  2338. #if FLOATVAR
  2339. IR::RegOpnd* floatOpnd = this->lowererMD->CheckFloatAndUntag(src1, instrLoad, helper);
  2340. #else
  2341. this->lowererMD->GenerateFloatTest(src1, instrLoad, helper, instrLoad->HasBailOutInfo());
  2342. IR::IndirOpnd* floatOpnd = IR::IndirOpnd::New(src1, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  2343. #endif
  2344. this->lowererMD->ConvertFloatToInt32(instrLoad->GetDst(), floatOpnd, helper, done, instrLoad);
  2345. }
  2346. // $helper:
  2347. if (helper)
  2348. {
  2349. instrLoad->InsertBefore(helper);
  2350. }
  2351. if(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger))
  2352. {
  2353. // Avoid bailout if we have a JavascriptNumber whose value is a signed 32-bit integer
  2354. lowererMD->m_lowerer->LoadInt32FromUntaggedVar(instrLoad);
  2355. // Need to bail out instead of calling a helper
  2356. return true;
  2357. }
  2358. if (bailOutOnHelper)
  2359. {
  2360. Assert(labelBailOut);
  2361. lowererMD->m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrLoad);
  2362. instrLoad->Remove();
  2363. }
  2364. else if (conversionFromObjectAllowed)
  2365. {
  2366. lowererMD->m_lowerer->LowerUnaryHelperMem(instrLoad, IR::HelperConv_ToInt32);
  2367. }
  2368. else
  2369. {
  2370. lowererMD->m_lowerer->LowerUnaryHelperMemWithBoolReference(instrLoad, IR::HelperConv_ToInt32_NoObjects, true /*useBoolForBailout*/);
  2371. }
  2372. }
  2373. else
  2374. {
  2375. instrLoad->Remove();
  2376. }
  2377. return false;
  2378. }
  2379. IR::Instr *
  2380. LowererMDArch::LoadCheckedFloat(IR::RegOpnd *opndOrig, IR::RegOpnd *opndFloat, IR::LabelInstr *labelInline, IR::LabelInstr *labelHelper, IR::Instr *instrInsert, const bool checkForNullInLoopBody)
  2381. {
  2382. //
  2383. // if (TaggedInt::Is(opndOrig))
  2384. // opndFloat = CVTSI2SD opndOrig_32
  2385. // JMP $labelInline
  2386. // else
  2387. // JMP $labelOpndIsNotInt
  2388. //
  2389. // $labelOpndIsNotInt:
  2390. // if (TaggedFloat::Is(opndOrig))
  2391. // s2 = MOV opndOrig
  2392. // s2 = XOR FloatTag_Value
  2393. // opndFloat = MOVD s2
  2394. // else
  2395. // JMP $labelHelper
  2396. //
  2397. // $labelInline:
  2398. //
  2399. IR::Instr *instrFirst = nullptr;
  2400. IR::LabelInstr *labelOpndIsNotInt = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2401. lowererMD->GenerateSmIntTest(opndOrig, instrInsert, labelOpndIsNotInt, &instrFirst);
  2402. if (opndOrig->GetValueType().IsLikelyFloat())
  2403. {
  2404. // Make this path helper if value is likely a float
  2405. instrInsert->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true));
  2406. }
  2407. IR::Opnd *opndOrig_32 = opndOrig->UseWithNewType(TyInt32, this->m_func);
  2408. IR::Instr *cvtsi2sd = IR::Instr::New(Js::OpCode::CVTSI2SD, opndFloat, opndOrig_32, this->m_func);
  2409. instrInsert->InsertBefore(cvtsi2sd);
  2410. IR::Instr *jmpInline = IR::BranchInstr::New(Js::OpCode::JMP, labelInline, this->m_func);
  2411. instrInsert->InsertBefore(jmpInline);
  2412. instrInsert->InsertBefore(labelOpndIsNotInt);
  2413. lowererMD->GenerateFloatTest(opndOrig, instrInsert, labelHelper, checkForNullInLoopBody);
  2414. IR::RegOpnd *s2 = IR::RegOpnd::New(TyMachReg, this->m_func);
  2415. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, s2, opndOrig, this->m_func);
  2416. instrInsert->InsertBefore(mov);
  2417. IR::Instr *xorTag = IR::Instr::New(Js::OpCode::XOR,
  2418. s2,
  2419. s2,
  2420. IR::IntConstOpnd::New(Js::FloatTag_Value,
  2421. TyMachReg,
  2422. this->m_func,
  2423. /* dontEncode = */ true),
  2424. this->m_func);
  2425. instrInsert->InsertBefore(xorTag);
  2426. LowererMD::Legalize(xorTag);
  2427. IR::Instr *movFloat = IR::Instr::New(Js::OpCode::MOVD, opndFloat, s2, this->m_func);
  2428. instrInsert->InsertBefore(movFloat);
  2429. return instrFirst;
  2430. }
  2431. IR::LabelInstr *
  2432. LowererMDArch::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  2433. {
  2434. return exitTargetInstr;
  2435. }
  2436. bool LowererMDArch::GenerateFastAnd(IR::Instr * instrAnd)
  2437. {
  2438. return true;
  2439. }
  2440. bool LowererMDArch::GenerateFastXor(IR::Instr * instrXor)
  2441. {
  2442. return true;
  2443. }
  2444. bool LowererMDArch::GenerateFastOr(IR::Instr * instrOr)
  2445. {
  2446. return true;
  2447. }
  2448. bool LowererMDArch::GenerateFastNot(IR::Instr * instrNot)
  2449. {
  2450. return true;
  2451. }
  2452. bool LowererMDArch::GenerateFastShiftLeft(IR::Instr * instrShift)
  2453. {
  2454. return true;
  2455. }
  2456. bool LowererMDArch::GenerateFastShiftRight(IR::Instr * instrShift)
  2457. {
  2458. // Given:
  2459. //
  2460. // dst = Shr/ShrU src1, src2
  2461. //
  2462. // Generate:
  2463. //
  2464. // (If not 2 Int31's, jump to $helper.)
  2465. // s1 = MOV src1
  2466. //RCX = MOV src2
  2467. // TEST RCX, 0x1F [unsigned only] // Bail if unsigned and not shifting,
  2468. // JEQ $helper [unsigned only] // as we may not end up with a taggable int
  2469. // s1 = SAR/SHR s1, RCX
  2470. // BTS s1, VarTag_Shift
  2471. //dst = MOV s1
  2472. // JMP $fallthru
  2473. // $helper:
  2474. // (caller generates helper call)
  2475. // $fallthru:
  2476. IR::Instr * instr;
  2477. IR::LabelInstr * labelHelper;
  2478. IR::LabelInstr * labelFallThru;
  2479. IR::Opnd * opndReg;
  2480. IR::Opnd * opndSrc1;
  2481. IR::Opnd * opndSrc2;
  2482. Assert(instrShift->m_opcode == Js::OpCode::ShrU_A || instrShift->m_opcode == Js::OpCode::Shr_A);
  2483. bool isUnsigned = (instrShift->m_opcode == Js::OpCode::ShrU_A);
  2484. opndSrc1 = instrShift->GetSrc1();
  2485. opndSrc2 = instrShift->GetSrc2();
  2486. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Add instruction");
  2487. // Not int?
  2488. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2489. {
  2490. return true;
  2491. }
  2492. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2493. {
  2494. return true;
  2495. }
  2496. // Tagged ints?
  2497. bool isTaggedInts = false;
  2498. if (opndSrc1->IsTaggedInt())
  2499. {
  2500. if (opndSrc2->IsTaggedInt())
  2501. {
  2502. isTaggedInts = true;
  2503. }
  2504. }
  2505. IntConstType s2Value = 0;
  2506. bool src2IsIntConst = false;
  2507. if (isUnsigned)
  2508. {
  2509. if (opndSrc2->IsRegOpnd())
  2510. {
  2511. src2IsIntConst = opndSrc2->AsRegOpnd()->m_sym->IsTaggableIntConst();
  2512. if (src2IsIntConst)
  2513. {
  2514. s2Value = opndSrc2->AsRegOpnd()->m_sym->GetIntConstValue();
  2515. }
  2516. }
  2517. else
  2518. {
  2519. AssertMsg(opndSrc2->IsAddrOpnd() && Js::TaggedInt::Is(opndSrc2->AsAddrOpnd()->m_address),
  2520. "Expect src2 of shift right to be reg or Var.");
  2521. src2IsIntConst = true;
  2522. s2Value = Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address);
  2523. }
  2524. // 32-bit Shifts only uses the bottom 5 bits.
  2525. s2Value &= 0x1F;
  2526. // Unsigned shift by 0 could yield a value not encodable as a tagged int.
  2527. if (isUnsigned && src2IsIntConst && s2Value == 0)
  2528. {
  2529. return true;
  2530. }
  2531. }
  2532. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2533. if (!isTaggedInts)
  2534. {
  2535. // (If not 2 Int31's, jump to $helper.)
  2536. this->lowererMD->GenerateSmIntPairTest(instrShift, opndSrc1, opndSrc2, labelHelper);
  2537. }
  2538. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2539. if (src2IsIntConst)
  2540. {
  2541. opndSrc2 = IR::IntConstOpnd::New(s2Value, TyInt32, this->m_func);
  2542. }
  2543. else
  2544. {
  2545. // RCX = MOV src2
  2546. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2547. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2548. opndReg->AsRegOpnd()->SetReg(this->GetRegShiftCount());
  2549. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc2, this->m_func);
  2550. instrShift->InsertBefore(instr);
  2551. opndSrc2 = opndReg;
  2552. }
  2553. if (!src2IsIntConst && isUnsigned)
  2554. {
  2555. // TEST RCX, 0x1F [unsigned only] // Bail if unsigned and not shifting,
  2556. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2557. instr->SetSrc1(opndSrc2);
  2558. instr->SetSrc2(IR::IntConstOpnd::New(0x1F, TyInt32, this->m_func));
  2559. instrShift->InsertBefore(instr);
  2560. // JEQ $helper [unsigned only] // as we may not end up with a taggable int
  2561. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  2562. instrShift->InsertBefore(instr);
  2563. }
  2564. // s1 = MOV src1
  2565. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2566. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2567. instrShift->InsertBefore(instr);
  2568. // s1 = SAR/SHR s1, RCX
  2569. instr = IR::Instr::New(isUnsigned ? Js::OpCode::SHR : Js::OpCode::SAR, opndReg, opndReg, opndSrc2, this->m_func);
  2570. instrShift->InsertBefore(instr);
  2571. //
  2572. // Convert TyInt32 operand, back to TyMachPtr type.
  2573. //
  2574. if(TyMachReg != opndReg->GetType())
  2575. {
  2576. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2577. }
  2578. // BTS s1, VarTag_Shift
  2579. this->lowererMD->GenerateInt32ToVarConversion(opndReg, instrShift);
  2580. // dst = MOV s1
  2581. instr = IR::Instr::New(Js::OpCode::MOV, instrShift->GetDst(), opndReg, this->m_func);
  2582. instrShift->InsertBefore(instr);
  2583. // JMP $fallthru
  2584. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2585. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2586. instrShift->InsertBefore(instr);
  2587. // $helper:
  2588. // (caller generates helper call)
  2589. // $fallthru:
  2590. instrShift->InsertBefore(labelHelper);
  2591. instrShift->InsertAfter(labelFallThru);
  2592. return true;
  2593. }
  2594. void
  2595. LowererMDArch::FinalLower()
  2596. {
  2597. IR::IntConstOpnd *intOpnd;
  2598. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, this->m_func->m_tailInstr, this->m_func->m_headInstr)
  2599. {
  2600. switch (instr->m_opcode)
  2601. {
  2602. case Js::OpCode::Ret:
  2603. instr->Remove();
  2604. break;
  2605. case Js::OpCode::LdArgSize:
  2606. Assert(this->m_func->HasTry());
  2607. instr->m_opcode = Js::OpCode::MOV;
  2608. intOpnd = IR::IntConstOpnd::New(this->m_func->GetArgsSize(), TyUint32, this->m_func);
  2609. instr->SetSrc1(intOpnd);
  2610. LowererMD::Legalize(instr);
  2611. break;
  2612. case Js::OpCode::LdSpillSize:
  2613. Assert(this->m_func->HasTry());
  2614. instr->m_opcode = Js::OpCode::MOV;
  2615. intOpnd = IR::IntConstOpnd::New(this->m_func->GetSpillSize(), TyUint32, this->m_func);
  2616. instr->SetSrc1(intOpnd);
  2617. LowererMD::Legalize(instr);
  2618. break;
  2619. case Js::OpCode::Leave:
  2620. Assert(this->m_func->DoOptimizeTry() && !this->m_func->IsLoopBodyInTry());
  2621. instrPrev = this->lowererMD->LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), true /*fromFinalLower*/);
  2622. break;
  2623. case Js::OpCode::CMOVA:
  2624. case Js::OpCode::CMOVAE:
  2625. case Js::OpCode::CMOVB:
  2626. case Js::OpCode::CMOVBE:
  2627. case Js::OpCode::CMOVE:
  2628. case Js::OpCode::CMOVG:
  2629. case Js::OpCode::CMOVGE:
  2630. case Js::OpCode::CMOVL:
  2631. case Js::OpCode::CMOVLE:
  2632. case Js::OpCode::CMOVNE:
  2633. case Js::OpCode::CMOVNO:
  2634. case Js::OpCode::CMOVNP:
  2635. case Js::OpCode::CMOVNS:
  2636. case Js::OpCode::CMOVO:
  2637. case Js::OpCode::CMOVP:
  2638. case Js::OpCode::CMOVS:
  2639. // Get rid of fake src1.
  2640. if (instr->GetSrc2())
  2641. {
  2642. // CMOV inserted before regalloc have a dummy src1 to simulate the fact that
  2643. // CMOV is not a definite def of the dst.
  2644. instr->SwapOpnds();
  2645. instr->FreeSrc2();
  2646. }
  2647. break;
  2648. }
  2649. } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  2650. }
  2651. IR::Opnd*
  2652. LowererMDArch::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  2653. {
  2654. return this->lowererMD->m_lowerer->GenerateArgOutForStackArgs(callInstr, stackArgsInstr);
  2655. }
  2656. void
  2657. LowererMDArch::LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  2658. {
  2659. this->lowererMD->m_lowerer->LowerInlineSpreadArgOutLoopUsingRegisters(callInstr, indexOpnd, arrayElementsStartOpnd);
  2660. }
  2661. IR::Instr *
  2662. LowererMDArch::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  2663. {
  2664. IR::RegOpnd *retReg = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  2665. // Load the continuation address into the return register.
  2666. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, retReg, targetOpnd, this->m_func));
  2667. // MOV REG_EH_SPILL_SIZE, spillSize
  2668. IR::Instr *movSpillSize = IR::Instr::New(Js::OpCode::LdSpillSize,
  2669. IR::RegOpnd::New(nullptr, REG_EH_SPILL_SIZE, TyMachReg, m_func),
  2670. m_func);
  2671. insertBeforeInstr->InsertBefore(movSpillSize);
  2672. // MOV REG_EH_ARGS_SIZE, argsSize
  2673. IR::Instr *movArgsSize = IR::Instr::New(Js::OpCode::LdArgSize,
  2674. IR::RegOpnd::New(nullptr, REG_EH_ARGS_SIZE, TyMachReg, m_func),
  2675. m_func);
  2676. insertBeforeInstr->InsertBefore(movArgsSize);
  2677. // MOV REG_EH_TARGET, amd64_ReturnFromCallWithFakeFrame
  2678. // PUSH REG_EH_TARGET
  2679. // RET
  2680. IR::Opnd *endCallWithFakeFrame = endCallWithFakeFrame =
  2681. IR::RegOpnd::New(nullptr, REG_EH_TARGET, TyMachReg, m_func);
  2682. IR::Instr *movTarget = IR::Instr::New(Js::OpCode::MOV,
  2683. endCallWithFakeFrame,
  2684. IR::HelperCallOpnd::New(IR::HelperOp_ReturnFromCallWithFakeFrame, m_func),
  2685. m_func);
  2686. insertBeforeInstr->InsertBefore(movTarget);
  2687. IR::Instr *push = IR::Instr::New(Js::OpCode::PUSH, m_func);
  2688. push->SetSrc1(endCallWithFakeFrame);
  2689. insertBeforeInstr->InsertBefore(push);
  2690. #if 0
  2691. // TODO: This block gets deleted if we emit a JMP instead of a RET.
  2692. IR::BranchInstr *jmp = IR::BranchInstr::New(Js::OpCode::JMP,
  2693. nullptr,
  2694. targetOpnd,
  2695. m_func);
  2696. leaveInstr->InsertBefore(jmp);
  2697. #endif
  2698. IR::IntConstOpnd *intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  2699. IR::Instr * retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  2700. retInstr->SetSrc1(intSrc);
  2701. retInstr->SetSrc2(retReg);
  2702. insertBeforeInstr->InsertBefore(retInstr);
  2703. // return the last instruction inserted
  2704. return retInstr;
  2705. }