valgrind.h 278 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792
  1. /* -*- c -*-
  2. ----------------------------------------------------------------
  3. Notice that the following BSD-style license applies to this one
  4. file (valgrind.h) only. The rest of Valgrind is licensed under the
  5. terms of the GNU General Public License, version 2, unless
  6. otherwise indicated. See the COPYING file in the source
  7. distribution for details.
  8. ----------------------------------------------------------------
  9. This file is part of Valgrind, a dynamic binary instrumentation
  10. framework.
  11. Copyright (C) 2000-2010 Julian Seward. All rights reserved.
  12. Redistribution and use in source and binary forms, with or without
  13. modification, are permitted provided that the following conditions
  14. are met:
  15. 1. Redistributions of source code must retain the above copyright
  16. notice, this list of conditions and the following disclaimer.
  17. 2. The origin of this software must not be misrepresented; you must
  18. not claim that you wrote the original software. If you use this
  19. software in a product, an acknowledgment in the product
  20. documentation would be appreciated but is not required.
  21. 3. Altered source versions must be plainly marked as such, and must
  22. not be misrepresented as being the original software.
  23. 4. The name of the author may not be used to endorse or promote
  24. products derived from this software without specific prior written
  25. permission.
  26. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
  27. OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
  28. WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  29. ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
  30. DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  31. DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
  32. GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
  33. INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
  34. WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
  35. NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
  36. SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  37. ----------------------------------------------------------------
  38. Notice that the above BSD-style license applies to this one file
  39. (valgrind.h) only. The entire rest of Valgrind is licensed under
  40. the terms of the GNU General Public License, version 2. See the
  41. COPYING file in the source distribution for details.
  42. ----------------------------------------------------------------
  43. */
  44. /* This file is for inclusion into client (your!) code.
  45. You can use these macros to manipulate and query Valgrind's
  46. execution inside your own programs.
  47. The resulting executables will still run without Valgrind, just a
  48. little bit more slowly than they otherwise would, but otherwise
  49. unchanged. When not running on valgrind, each client request
  50. consumes very few (eg. 7) instructions, so the resulting performance
  51. loss is negligible unless you plan to execute client requests
  52. millions of times per second. Nevertheless, if that is still a
  53. problem, you can compile with the NVALGRIND symbol defined (gcc
  54. -DNVALGRIND) so that client requests are not even compiled in. */
  55. #ifndef __VALGRIND_H
  56. #define __VALGRIND_H
  57. /* ------------------------------------------------------------------ */
  58. /* VERSION NUMBER OF VALGRIND */
  59. /* ------------------------------------------------------------------ */
  60. /* Specify Valgrind's version number, so that user code can
  61. conditionally compile based on our version number. Note that these
  62. were introduced at version 3.6 and so do not exist in version 3.5
  63. or earlier. The recommended way to use them to check for "version
  64. X.Y or later" is (eg)
  65. #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
  66. && (__VALGRIND_MAJOR__ > 3 \
  67. || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
  68. */
  69. #define __VALGRIND_MAJOR__ 3
  70. #define __VALGRIND_MINOR__ 6
  71. #include <stdarg.h>
  72. /* Nb: this file might be included in a file compiled with -ansi. So
  73. we can't use C++ style "//" comments nor the "asm" keyword (instead
  74. use "__asm__"). */
  75. /* Derive some tags indicating what the target platform is. Note
  76. that in this file we're using the compiler's CPP symbols for
  77. identifying architectures, which are different to the ones we use
  78. within the rest of Valgrind. Note, __powerpc__ is active for both
  79. 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
  80. latter (on Linux, that is).
  81. Misc note: how to find out what's predefined in gcc by default:
  82. gcc -Wp,-dM somefile.c
  83. */
  84. #undef PLAT_ppc64_aix5
  85. #undef PLAT_ppc32_aix5
  86. #undef PLAT_x86_darwin
  87. #undef PLAT_amd64_darwin
  88. #undef PLAT_x86_win32
  89. #undef PLAT_x86_linux
  90. #undef PLAT_amd64_linux
  91. #undef PLAT_ppc32_linux
  92. #undef PLAT_ppc64_linux
  93. #undef PLAT_arm_linux
  94. #if defined(_AIX) && defined(__64BIT__)
  95. # define PLAT_ppc64_aix5 1
  96. #elif defined(_AIX) && !defined(__64BIT__)
  97. # define PLAT_ppc32_aix5 1
  98. #elif defined(__APPLE__) && defined(__i386__)
  99. # define PLAT_x86_darwin 1
  100. #elif defined(__APPLE__) && defined(__x86_64__)
  101. # define PLAT_amd64_darwin 1
  102. #elif defined(__MINGW32__) || defined(__CYGWIN32__) || defined(_WIN32) && defined(_M_IX86)
  103. # define PLAT_x86_win32 1
  104. #elif defined(__linux__) && defined(__i386__)
  105. # define PLAT_x86_linux 1
  106. #elif defined(__linux__) && defined(__x86_64__)
  107. # define PLAT_amd64_linux 1
  108. #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
  109. # define PLAT_ppc32_linux 1
  110. #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
  111. # define PLAT_ppc64_linux 1
  112. #elif defined(__linux__) && defined(__arm__)
  113. # define PLAT_arm_linux 1
  114. #else
  115. /* If we're not compiling for our target platform, don't generate
  116. any inline asms. */
  117. # if !defined(NVALGRIND)
  118. # define NVALGRIND 1
  119. # endif
  120. #endif
  121. /* ------------------------------------------------------------------ */
  122. /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
  123. /* in here of use to end-users -- skip to the next section. */
  124. /* ------------------------------------------------------------------ */
  125. #if defined(NVALGRIND)
  126. /* Define NVALGRIND to completely remove the Valgrind magic sequence
  127. from the compiled code (analogous to NDEBUG's effects on
  128. assert()) */
  129. #define VALGRIND_DO_CLIENT_REQUEST( \
  130. _zzq_rlval, _zzq_default, _zzq_request, \
  131. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  132. { \
  133. (_zzq_rlval) = (_zzq_default); \
  134. }
  135. #else /* ! NVALGRIND */
  136. /* The following defines the magic code sequences which the JITter
  137. spots and handles magically. Don't look too closely at them as
  138. they will rot your brain.
  139. The assembly code sequences for all architectures is in this one
  140. file. This is because this file must be stand-alone, and we don't
  141. want to have multiple files.
  142. For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
  143. value gets put in the return slot, so that everything works when
  144. this is executed not under Valgrind. Args are passed in a memory
  145. block, and so there's no intrinsic limit to the number that could
  146. be passed, but it's currently five.
  147. The macro args are:
  148. _zzq_rlval result lvalue
  149. _zzq_default default value (result returned when running on real CPU)
  150. _zzq_request request code
  151. _zzq_arg1..5 request params
  152. The other two macros are used to support function wrapping, and are
  153. a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
  154. guest's NRADDR pseudo-register and whatever other information is
  155. needed to safely run the call original from the wrapper: on
  156. ppc64-linux, the R2 value at the divert point is also needed. This
  157. information is abstracted into a user-visible type, OrigFn.
  158. VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
  159. guest, but guarantees that the branch instruction will not be
  160. redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
  161. branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
  162. complete inline asm, since it needs to be combined with more magic
  163. inline asm stuff to be useful.
  164. */
  165. /* ------------------------- x86-{linux,darwin} ---------------- */
  166. #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
  167. || (defined(PLAT_x86_win32) && defined(__GNUC__))
  168. typedef
  169. struct {
  170. unsigned int nraddr; /* where's the code? */
  171. }
  172. OrigFn;
  173. #define __SPECIAL_INSTRUCTION_PREAMBLE \
  174. "roll $3, %%edi ; roll $13, %%edi\n\t" \
  175. "roll $29, %%edi ; roll $19, %%edi\n\t"
  176. #define VALGRIND_DO_CLIENT_REQUEST( \
  177. _zzq_rlval, _zzq_default, _zzq_request, \
  178. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  179. { volatile unsigned int _zzq_args[6]; \
  180. volatile unsigned int _zzq_result; \
  181. _zzq_args[0] = (unsigned int)(_zzq_request); \
  182. _zzq_args[1] = (unsigned int)(_zzq_arg1); \
  183. _zzq_args[2] = (unsigned int)(_zzq_arg2); \
  184. _zzq_args[3] = (unsigned int)(_zzq_arg3); \
  185. _zzq_args[4] = (unsigned int)(_zzq_arg4); \
  186. _zzq_args[5] = (unsigned int)(_zzq_arg5); \
  187. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  188. /* %EDX = client_request ( %EAX ) */ \
  189. "xchgl %%ebx,%%ebx" \
  190. : "=d" (_zzq_result) \
  191. : "a" (&_zzq_args[0]), "0" (_zzq_default) \
  192. : "cc", "memory" \
  193. ); \
  194. _zzq_rlval = _zzq_result; \
  195. }
  196. #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
  197. { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
  198. volatile unsigned int __addr; \
  199. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  200. /* %EAX = guest_NRADDR */ \
  201. "xchgl %%ecx,%%ecx" \
  202. : "=a" (__addr) \
  203. : \
  204. : "cc", "memory" \
  205. ); \
  206. _zzq_orig->nraddr = __addr; \
  207. }
  208. #define VALGRIND_CALL_NOREDIR_EAX \
  209. __SPECIAL_INSTRUCTION_PREAMBLE \
  210. /* call-noredir *%EAX */ \
  211. "xchgl %%edx,%%edx\n\t"
  212. #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
  213. /* ------------------------- x86-Win32 ------------------------- */
  214. #if defined(PLAT_x86_win32) && !defined(__GNUC__)
  215. typedef
  216. struct {
  217. unsigned int nraddr; /* where's the code? */
  218. }
  219. OrigFn;
  220. #if defined(_MSC_VER)
  221. #define __SPECIAL_INSTRUCTION_PREAMBLE \
  222. __asm rol edi, 3 __asm rol edi, 13 \
  223. __asm rol edi, 29 __asm rol edi, 19
  224. #define VALGRIND_DO_CLIENT_REQUEST( \
  225. _zzq_rlval, _zzq_default, _zzq_request, \
  226. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  227. { volatile uintptr_t _zzq_args[6]; \
  228. volatile unsigned int _zzq_result; \
  229. _zzq_args[0] = (uintptr_t)(_zzq_request); \
  230. _zzq_args[1] = (uintptr_t)(_zzq_arg1); \
  231. _zzq_args[2] = (uintptr_t)(_zzq_arg2); \
  232. _zzq_args[3] = (uintptr_t)(_zzq_arg3); \
  233. _zzq_args[4] = (uintptr_t)(_zzq_arg4); \
  234. _zzq_args[5] = (uintptr_t)(_zzq_arg5); \
  235. __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default \
  236. __SPECIAL_INSTRUCTION_PREAMBLE \
  237. /* %EDX = client_request ( %EAX ) */ \
  238. __asm xchg ebx,ebx \
  239. __asm mov _zzq_result, edx \
  240. } \
  241. _zzq_rlval = _zzq_result; \
  242. }
  243. #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
  244. { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
  245. volatile unsigned int __addr; \
  246. __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
  247. /* %EAX = guest_NRADDR */ \
  248. __asm xchg ecx,ecx \
  249. __asm mov __addr, eax \
  250. } \
  251. _zzq_orig->nraddr = __addr; \
  252. }
  253. #define VALGRIND_CALL_NOREDIR_EAX ERROR
  254. #else
  255. #error Unsupported compiler.
  256. #endif
  257. #endif /* PLAT_x86_win32 */
  258. /* ------------------------ amd64-{linux,darwin} --------------- */
  259. #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
  260. typedef
  261. struct {
  262. unsigned long long int nraddr; /* where's the code? */
  263. }
  264. OrigFn;
  265. #define __SPECIAL_INSTRUCTION_PREAMBLE \
  266. "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
  267. "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
  268. #define VALGRIND_DO_CLIENT_REQUEST( \
  269. _zzq_rlval, _zzq_default, _zzq_request, \
  270. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  271. { volatile unsigned long long int _zzq_args[6]; \
  272. volatile unsigned long long int _zzq_result; \
  273. _zzq_args[0] = (unsigned long long int)(_zzq_request); \
  274. _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
  275. _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
  276. _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
  277. _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
  278. _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
  279. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  280. /* %RDX = client_request ( %RAX ) */ \
  281. "xchgq %%rbx,%%rbx" \
  282. : "=d" (_zzq_result) \
  283. : "a" (&_zzq_args[0]), "0" (_zzq_default) \
  284. : "cc", "memory" \
  285. ); \
  286. _zzq_rlval = _zzq_result; \
  287. }
  288. #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
  289. { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
  290. volatile unsigned long long int __addr; \
  291. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  292. /* %RAX = guest_NRADDR */ \
  293. "xchgq %%rcx,%%rcx" \
  294. : "=a" (__addr) \
  295. : \
  296. : "cc", "memory" \
  297. ); \
  298. _zzq_orig->nraddr = __addr; \
  299. }
  300. #define VALGRIND_CALL_NOREDIR_RAX \
  301. __SPECIAL_INSTRUCTION_PREAMBLE \
  302. /* call-noredir *%RAX */ \
  303. "xchgq %%rdx,%%rdx\n\t"
  304. #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
  305. /* ------------------------ ppc32-linux ------------------------ */
  306. #if defined(PLAT_ppc32_linux)
  307. typedef
  308. struct {
  309. unsigned int nraddr; /* where's the code? */
  310. }
  311. OrigFn;
  312. #define __SPECIAL_INSTRUCTION_PREAMBLE \
  313. "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
  314. "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
  315. #define VALGRIND_DO_CLIENT_REQUEST( \
  316. _zzq_rlval, _zzq_default, _zzq_request, \
  317. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  318. \
  319. { unsigned int _zzq_args[6]; \
  320. unsigned int _zzq_result; \
  321. unsigned int* _zzq_ptr; \
  322. _zzq_args[0] = (unsigned int)(_zzq_request); \
  323. _zzq_args[1] = (unsigned int)(_zzq_arg1); \
  324. _zzq_args[2] = (unsigned int)(_zzq_arg2); \
  325. _zzq_args[3] = (unsigned int)(_zzq_arg3); \
  326. _zzq_args[4] = (unsigned int)(_zzq_arg4); \
  327. _zzq_args[5] = (unsigned int)(_zzq_arg5); \
  328. _zzq_ptr = _zzq_args; \
  329. __asm__ volatile("mr 3,%1\n\t" /*default*/ \
  330. "mr 4,%2\n\t" /*ptr*/ \
  331. __SPECIAL_INSTRUCTION_PREAMBLE \
  332. /* %R3 = client_request ( %R4 ) */ \
  333. "or 1,1,1\n\t" \
  334. "mr %0,3" /*result*/ \
  335. : "=b" (_zzq_result) \
  336. : "b" (_zzq_default), "b" (_zzq_ptr) \
  337. : "cc", "memory", "r3", "r4"); \
  338. _zzq_rlval = _zzq_result; \
  339. }
  340. #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
  341. { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
  342. unsigned int __addr; \
  343. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  344. /* %R3 = guest_NRADDR */ \
  345. "or 2,2,2\n\t" \
  346. "mr %0,3" \
  347. : "=b" (__addr) \
  348. : \
  349. : "cc", "memory", "r3" \
  350. ); \
  351. _zzq_orig->nraddr = __addr; \
  352. }
  353. #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  354. __SPECIAL_INSTRUCTION_PREAMBLE \
  355. /* branch-and-link-to-noredir *%R11 */ \
  356. "or 3,3,3\n\t"
  357. #endif /* PLAT_ppc32_linux */
  358. /* ------------------------ ppc64-linux ------------------------ */
  359. #if defined(PLAT_ppc64_linux)
  360. typedef
  361. struct {
  362. unsigned long long int nraddr; /* where's the code? */
  363. unsigned long long int r2; /* what tocptr do we need? */
  364. }
  365. OrigFn;
  366. #define __SPECIAL_INSTRUCTION_PREAMBLE \
  367. "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
  368. "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
  369. #define VALGRIND_DO_CLIENT_REQUEST( \
  370. _zzq_rlval, _zzq_default, _zzq_request, \
  371. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  372. \
  373. { unsigned long long int _zzq_args[6]; \
  374. register unsigned long long int _zzq_result __asm__("r3"); \
  375. register unsigned long long int* _zzq_ptr __asm__("r4"); \
  376. _zzq_args[0] = (unsigned long long int)(_zzq_request); \
  377. _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
  378. _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
  379. _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
  380. _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
  381. _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
  382. _zzq_ptr = _zzq_args; \
  383. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  384. /* %R3 = client_request ( %R4 ) */ \
  385. "or 1,1,1" \
  386. : "=r" (_zzq_result) \
  387. : "0" (_zzq_default), "r" (_zzq_ptr) \
  388. : "cc", "memory"); \
  389. _zzq_rlval = _zzq_result; \
  390. }
  391. #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
  392. { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
  393. register unsigned long long int __addr __asm__("r3"); \
  394. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  395. /* %R3 = guest_NRADDR */ \
  396. "or 2,2,2" \
  397. : "=r" (__addr) \
  398. : \
  399. : "cc", "memory" \
  400. ); \
  401. _zzq_orig->nraddr = __addr; \
  402. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  403. /* %R3 = guest_NRADDR_GPR2 */ \
  404. "or 4,4,4" \
  405. : "=r" (__addr) \
  406. : \
  407. : "cc", "memory" \
  408. ); \
  409. _zzq_orig->r2 = __addr; \
  410. }
  411. #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  412. __SPECIAL_INSTRUCTION_PREAMBLE \
  413. /* branch-and-link-to-noredir *%R11 */ \
  414. "or 3,3,3\n\t"
  415. #endif /* PLAT_ppc64_linux */
  416. /* ------------------------- arm-linux ------------------------- */
  417. #if defined(PLAT_arm_linux)
  418. typedef
  419. struct {
  420. unsigned int nraddr; /* where's the code? */
  421. }
  422. OrigFn;
  423. #define __SPECIAL_INSTRUCTION_PREAMBLE \
  424. "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
  425. "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
  426. #define VALGRIND_DO_CLIENT_REQUEST( \
  427. _zzq_rlval, _zzq_default, _zzq_request, \
  428. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  429. \
  430. { volatile unsigned int _zzq_args[6]; \
  431. volatile unsigned int _zzq_result; \
  432. _zzq_args[0] = (unsigned int)(_zzq_request); \
  433. _zzq_args[1] = (unsigned int)(_zzq_arg1); \
  434. _zzq_args[2] = (unsigned int)(_zzq_arg2); \
  435. _zzq_args[3] = (unsigned int)(_zzq_arg3); \
  436. _zzq_args[4] = (unsigned int)(_zzq_arg4); \
  437. _zzq_args[5] = (unsigned int)(_zzq_arg5); \
  438. __asm__ volatile("mov r3, %1\n\t" /*default*/ \
  439. "mov r4, %2\n\t" /*ptr*/ \
  440. __SPECIAL_INSTRUCTION_PREAMBLE \
  441. /* R3 = client_request ( R4 ) */ \
  442. "orr r10, r10, r10\n\t" \
  443. "mov %0, r3" /*result*/ \
  444. : "=r" (_zzq_result) \
  445. : "r" (_zzq_default), "r" (&_zzq_args[0]) \
  446. : "cc","memory", "r3", "r4"); \
  447. _zzq_rlval = _zzq_result; \
  448. }
  449. #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
  450. { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
  451. unsigned int __addr; \
  452. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  453. /* R3 = guest_NRADDR */ \
  454. "orr r11, r11, r11\n\t" \
  455. "mov %0, r3" \
  456. : "=r" (__addr) \
  457. : \
  458. : "cc", "memory", "r3" \
  459. ); \
  460. _zzq_orig->nraddr = __addr; \
  461. }
  462. #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  463. __SPECIAL_INSTRUCTION_PREAMBLE \
  464. /* branch-and-link-to-noredir *%R4 */ \
  465. "orr r12, r12, r12\n\t"
  466. #endif /* PLAT_arm_linux */
  467. /* ------------------------ ppc32-aix5 ------------------------- */
  468. #if defined(PLAT_ppc32_aix5)
  469. typedef
  470. struct {
  471. unsigned int nraddr; /* where's the code? */
  472. unsigned int r2; /* what tocptr do we need? */
  473. }
  474. OrigFn;
  475. #define __SPECIAL_INSTRUCTION_PREAMBLE \
  476. "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
  477. "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
  478. #define VALGRIND_DO_CLIENT_REQUEST( \
  479. _zzq_rlval, _zzq_default, _zzq_request, \
  480. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  481. \
  482. { unsigned int _zzq_args[7]; \
  483. register unsigned int _zzq_result; \
  484. register unsigned int* _zzq_ptr; \
  485. _zzq_args[0] = (unsigned int)(_zzq_request); \
  486. _zzq_args[1] = (unsigned int)(_zzq_arg1); \
  487. _zzq_args[2] = (unsigned int)(_zzq_arg2); \
  488. _zzq_args[3] = (unsigned int)(_zzq_arg3); \
  489. _zzq_args[4] = (unsigned int)(_zzq_arg4); \
  490. _zzq_args[5] = (unsigned int)(_zzq_arg5); \
  491. _zzq_args[6] = (unsigned int)(_zzq_default); \
  492. _zzq_ptr = _zzq_args; \
  493. __asm__ volatile("mr 4,%1\n\t" \
  494. "lwz 3, 24(4)\n\t" \
  495. __SPECIAL_INSTRUCTION_PREAMBLE \
  496. /* %R3 = client_request ( %R4 ) */ \
  497. "or 1,1,1\n\t" \
  498. "mr %0,3" \
  499. : "=b" (_zzq_result) \
  500. : "b" (_zzq_ptr) \
  501. : "r3", "r4", "cc", "memory"); \
  502. _zzq_rlval = _zzq_result; \
  503. }
  504. #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
  505. { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
  506. register unsigned int __addr; \
  507. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  508. /* %R3 = guest_NRADDR */ \
  509. "or 2,2,2\n\t" \
  510. "mr %0,3" \
  511. : "=b" (__addr) \
  512. : \
  513. : "r3", "cc", "memory" \
  514. ); \
  515. _zzq_orig->nraddr = __addr; \
  516. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  517. /* %R3 = guest_NRADDR_GPR2 */ \
  518. "or 4,4,4\n\t" \
  519. "mr %0,3" \
  520. : "=b" (__addr) \
  521. : \
  522. : "r3", "cc", "memory" \
  523. ); \
  524. _zzq_orig->r2 = __addr; \
  525. }
  526. #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  527. __SPECIAL_INSTRUCTION_PREAMBLE \
  528. /* branch-and-link-to-noredir *%R11 */ \
  529. "or 3,3,3\n\t"
  530. #endif /* PLAT_ppc32_aix5 */
  531. /* ------------------------ ppc64-aix5 ------------------------- */
  532. #if defined(PLAT_ppc64_aix5)
  533. typedef
  534. struct {
  535. unsigned long long int nraddr; /* where's the code? */
  536. unsigned long long int r2; /* what tocptr do we need? */
  537. }
  538. OrigFn;
  539. #define __SPECIAL_INSTRUCTION_PREAMBLE \
  540. "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
  541. "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
  542. #define VALGRIND_DO_CLIENT_REQUEST( \
  543. _zzq_rlval, _zzq_default, _zzq_request, \
  544. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  545. \
  546. { unsigned long long int _zzq_args[7]; \
  547. register unsigned long long int _zzq_result; \
  548. register unsigned long long int* _zzq_ptr; \
  549. _zzq_args[0] = (unsigned int long long)(_zzq_request); \
  550. _zzq_args[1] = (unsigned int long long)(_zzq_arg1); \
  551. _zzq_args[2] = (unsigned int long long)(_zzq_arg2); \
  552. _zzq_args[3] = (unsigned int long long)(_zzq_arg3); \
  553. _zzq_args[4] = (unsigned int long long)(_zzq_arg4); \
  554. _zzq_args[5] = (unsigned int long long)(_zzq_arg5); \
  555. _zzq_args[6] = (unsigned int long long)(_zzq_default); \
  556. _zzq_ptr = _zzq_args; \
  557. __asm__ volatile("mr 4,%1\n\t" \
  558. "ld 3, 48(4)\n\t" \
  559. __SPECIAL_INSTRUCTION_PREAMBLE \
  560. /* %R3 = client_request ( %R4 ) */ \
  561. "or 1,1,1\n\t" \
  562. "mr %0,3" \
  563. : "=b" (_zzq_result) \
  564. : "b" (_zzq_ptr) \
  565. : "r3", "r4", "cc", "memory"); \
  566. _zzq_rlval = _zzq_result; \
  567. }
  568. #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
  569. { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
  570. register unsigned long long int __addr; \
  571. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  572. /* %R3 = guest_NRADDR */ \
  573. "or 2,2,2\n\t" \
  574. "mr %0,3" \
  575. : "=b" (__addr) \
  576. : \
  577. : "r3", "cc", "memory" \
  578. ); \
  579. _zzq_orig->nraddr = __addr; \
  580. __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
  581. /* %R3 = guest_NRADDR_GPR2 */ \
  582. "or 4,4,4\n\t" \
  583. "mr %0,3" \
  584. : "=b" (__addr) \
  585. : \
  586. : "r3", "cc", "memory" \
  587. ); \
  588. _zzq_orig->r2 = __addr; \
  589. }
  590. #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  591. __SPECIAL_INSTRUCTION_PREAMBLE \
  592. /* branch-and-link-to-noredir *%R11 */ \
  593. "or 3,3,3\n\t"
  594. #endif /* PLAT_ppc64_aix5 */
  595. /* Insert assembly code for other platforms here... */
  596. #endif /* NVALGRIND */
  597. /* ------------------------------------------------------------------ */
  598. /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
  599. /* ugly. It's the least-worst tradeoff I can think of. */
  600. /* ------------------------------------------------------------------ */
  601. /* This section defines magic (a.k.a appalling-hack) macros for doing
  602. guaranteed-no-redirection macros, so as to get from function
  603. wrappers to the functions they are wrapping. The whole point is to
  604. construct standard call sequences, but to do the call itself with a
  605. special no-redirect call pseudo-instruction that the JIT
  606. understands and handles specially. This section is long and
  607. repetitious, and I can't see a way to make it shorter.
  608. The naming scheme is as follows:
  609. CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
  610. 'W' stands for "word" and 'v' for "void". Hence there are
  611. different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
  612. and for each, the possibility of returning a word-typed result, or
  613. no result.
  614. */
  615. /* Use these to write the name of your wrapper. NOTE: duplicates
  616. VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. */
  617. /* Use an extra level of macroisation so as to ensure the soname/fnname
  618. args are fully macro-expanded before pasting them together. */
  619. #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
  620. #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
  621. VG_CONCAT4(_vgwZU_,soname,_,fnname)
  622. #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
  623. VG_CONCAT4(_vgwZZ_,soname,_,fnname)
  624. /* Use this macro from within a wrapper function to collect the
  625. context (address and possibly other info) of the original function.
  626. Once you have that you can then use it in one of the CALL_FN_
  627. macros. The type of the argument _lval is OrigFn. */
  628. #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
  629. /* Derivatives of the main macros below, for calling functions
  630. returning void. */
  631. #define CALL_FN_v_v(fnptr) \
  632. do { volatile unsigned long _junk; \
  633. CALL_FN_W_v(_junk,fnptr); } while (0)
  634. #define CALL_FN_v_W(fnptr, arg1) \
  635. do { volatile unsigned long _junk; \
  636. CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
  637. #define CALL_FN_v_WW(fnptr, arg1,arg2) \
  638. do { volatile unsigned long _junk; \
  639. CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
  640. #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
  641. do { volatile unsigned long _junk; \
  642. CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
  643. #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
  644. do { volatile unsigned long _junk; \
  645. CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
  646. #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
  647. do { volatile unsigned long _junk; \
  648. CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
  649. #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
  650. do { volatile unsigned long _junk; \
  651. CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
  652. #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
  653. do { volatile unsigned long _junk; \
  654. CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
  655. /* ------------------------- x86-{linux,darwin} ---------------- */
  656. #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
  657. /* These regs are trashed by the hidden call. No need to mention eax
  658. as gcc can already see that, plus causes gcc to bomb. */
  659. #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
  660. /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
  661. long) == 4. */
  662. #define CALL_FN_W_v(lval, orig) \
  663. do { \
  664. volatile OrigFn _orig = (orig); \
  665. volatile unsigned long _argvec[1]; \
  666. volatile unsigned long _res; \
  667. _argvec[0] = (unsigned long)_orig.nraddr; \
  668. __asm__ volatile( \
  669. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  670. VALGRIND_CALL_NOREDIR_EAX \
  671. : /*out*/ "=a" (_res) \
  672. : /*in*/ "a" (&_argvec[0]) \
  673. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  674. ); \
  675. lval = (__typeof__(lval)) _res; \
  676. } while (0)
  677. #define CALL_FN_W_W(lval, orig, arg1) \
  678. do { \
  679. volatile OrigFn _orig = (orig); \
  680. volatile unsigned long _argvec[2]; \
  681. volatile unsigned long _res; \
  682. _argvec[0] = (unsigned long)_orig.nraddr; \
  683. _argvec[1] = (unsigned long)(arg1); \
  684. __asm__ volatile( \
  685. "subl $12, %%esp\n\t" \
  686. "pushl 4(%%eax)\n\t" \
  687. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  688. VALGRIND_CALL_NOREDIR_EAX \
  689. "addl $16, %%esp\n" \
  690. : /*out*/ "=a" (_res) \
  691. : /*in*/ "a" (&_argvec[0]) \
  692. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  693. ); \
  694. lval = (__typeof__(lval)) _res; \
  695. } while (0)
  696. #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
  697. do { \
  698. volatile OrigFn _orig = (orig); \
  699. volatile unsigned long _argvec[3]; \
  700. volatile unsigned long _res; \
  701. _argvec[0] = (unsigned long)_orig.nraddr; \
  702. _argvec[1] = (unsigned long)(arg1); \
  703. _argvec[2] = (unsigned long)(arg2); \
  704. __asm__ volatile( \
  705. "subl $8, %%esp\n\t" \
  706. "pushl 8(%%eax)\n\t" \
  707. "pushl 4(%%eax)\n\t" \
  708. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  709. VALGRIND_CALL_NOREDIR_EAX \
  710. "addl $16, %%esp\n" \
  711. : /*out*/ "=a" (_res) \
  712. : /*in*/ "a" (&_argvec[0]) \
  713. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  714. ); \
  715. lval = (__typeof__(lval)) _res; \
  716. } while (0)
  717. #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
  718. do { \
  719. volatile OrigFn _orig = (orig); \
  720. volatile unsigned long _argvec[4]; \
  721. volatile unsigned long _res; \
  722. _argvec[0] = (unsigned long)_orig.nraddr; \
  723. _argvec[1] = (unsigned long)(arg1); \
  724. _argvec[2] = (unsigned long)(arg2); \
  725. _argvec[3] = (unsigned long)(arg3); \
  726. __asm__ volatile( \
  727. "subl $4, %%esp\n\t" \
  728. "pushl 12(%%eax)\n\t" \
  729. "pushl 8(%%eax)\n\t" \
  730. "pushl 4(%%eax)\n\t" \
  731. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  732. VALGRIND_CALL_NOREDIR_EAX \
  733. "addl $16, %%esp\n" \
  734. : /*out*/ "=a" (_res) \
  735. : /*in*/ "a" (&_argvec[0]) \
  736. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  737. ); \
  738. lval = (__typeof__(lval)) _res; \
  739. } while (0)
  740. #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
  741. do { \
  742. volatile OrigFn _orig = (orig); \
  743. volatile unsigned long _argvec[5]; \
  744. volatile unsigned long _res; \
  745. _argvec[0] = (unsigned long)_orig.nraddr; \
  746. _argvec[1] = (unsigned long)(arg1); \
  747. _argvec[2] = (unsigned long)(arg2); \
  748. _argvec[3] = (unsigned long)(arg3); \
  749. _argvec[4] = (unsigned long)(arg4); \
  750. __asm__ volatile( \
  751. "pushl 16(%%eax)\n\t" \
  752. "pushl 12(%%eax)\n\t" \
  753. "pushl 8(%%eax)\n\t" \
  754. "pushl 4(%%eax)\n\t" \
  755. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  756. VALGRIND_CALL_NOREDIR_EAX \
  757. "addl $16, %%esp\n" \
  758. : /*out*/ "=a" (_res) \
  759. : /*in*/ "a" (&_argvec[0]) \
  760. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  761. ); \
  762. lval = (__typeof__(lval)) _res; \
  763. } while (0)
  764. #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
  765. do { \
  766. volatile OrigFn _orig = (orig); \
  767. volatile unsigned long _argvec[6]; \
  768. volatile unsigned long _res; \
  769. _argvec[0] = (unsigned long)_orig.nraddr; \
  770. _argvec[1] = (unsigned long)(arg1); \
  771. _argvec[2] = (unsigned long)(arg2); \
  772. _argvec[3] = (unsigned long)(arg3); \
  773. _argvec[4] = (unsigned long)(arg4); \
  774. _argvec[5] = (unsigned long)(arg5); \
  775. __asm__ volatile( \
  776. "subl $12, %%esp\n\t" \
  777. "pushl 20(%%eax)\n\t" \
  778. "pushl 16(%%eax)\n\t" \
  779. "pushl 12(%%eax)\n\t" \
  780. "pushl 8(%%eax)\n\t" \
  781. "pushl 4(%%eax)\n\t" \
  782. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  783. VALGRIND_CALL_NOREDIR_EAX \
  784. "addl $32, %%esp\n" \
  785. : /*out*/ "=a" (_res) \
  786. : /*in*/ "a" (&_argvec[0]) \
  787. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  788. ); \
  789. lval = (__typeof__(lval)) _res; \
  790. } while (0)
  791. #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
  792. do { \
  793. volatile OrigFn _orig = (orig); \
  794. volatile unsigned long _argvec[7]; \
  795. volatile unsigned long _res; \
  796. _argvec[0] = (unsigned long)_orig.nraddr; \
  797. _argvec[1] = (unsigned long)(arg1); \
  798. _argvec[2] = (unsigned long)(arg2); \
  799. _argvec[3] = (unsigned long)(arg3); \
  800. _argvec[4] = (unsigned long)(arg4); \
  801. _argvec[5] = (unsigned long)(arg5); \
  802. _argvec[6] = (unsigned long)(arg6); \
  803. __asm__ volatile( \
  804. "subl $8, %%esp\n\t" \
  805. "pushl 24(%%eax)\n\t" \
  806. "pushl 20(%%eax)\n\t" \
  807. "pushl 16(%%eax)\n\t" \
  808. "pushl 12(%%eax)\n\t" \
  809. "pushl 8(%%eax)\n\t" \
  810. "pushl 4(%%eax)\n\t" \
  811. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  812. VALGRIND_CALL_NOREDIR_EAX \
  813. "addl $32, %%esp\n" \
  814. : /*out*/ "=a" (_res) \
  815. : /*in*/ "a" (&_argvec[0]) \
  816. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  817. ); \
  818. lval = (__typeof__(lval)) _res; \
  819. } while (0)
  820. #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  821. arg7) \
  822. do { \
  823. volatile OrigFn _orig = (orig); \
  824. volatile unsigned long _argvec[8]; \
  825. volatile unsigned long _res; \
  826. _argvec[0] = (unsigned long)_orig.nraddr; \
  827. _argvec[1] = (unsigned long)(arg1); \
  828. _argvec[2] = (unsigned long)(arg2); \
  829. _argvec[3] = (unsigned long)(arg3); \
  830. _argvec[4] = (unsigned long)(arg4); \
  831. _argvec[5] = (unsigned long)(arg5); \
  832. _argvec[6] = (unsigned long)(arg6); \
  833. _argvec[7] = (unsigned long)(arg7); \
  834. __asm__ volatile( \
  835. "subl $4, %%esp\n\t" \
  836. "pushl 28(%%eax)\n\t" \
  837. "pushl 24(%%eax)\n\t" \
  838. "pushl 20(%%eax)\n\t" \
  839. "pushl 16(%%eax)\n\t" \
  840. "pushl 12(%%eax)\n\t" \
  841. "pushl 8(%%eax)\n\t" \
  842. "pushl 4(%%eax)\n\t" \
  843. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  844. VALGRIND_CALL_NOREDIR_EAX \
  845. "addl $32, %%esp\n" \
  846. : /*out*/ "=a" (_res) \
  847. : /*in*/ "a" (&_argvec[0]) \
  848. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  849. ); \
  850. lval = (__typeof__(lval)) _res; \
  851. } while (0)
  852. #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  853. arg7,arg8) \
  854. do { \
  855. volatile OrigFn _orig = (orig); \
  856. volatile unsigned long _argvec[9]; \
  857. volatile unsigned long _res; \
  858. _argvec[0] = (unsigned long)_orig.nraddr; \
  859. _argvec[1] = (unsigned long)(arg1); \
  860. _argvec[2] = (unsigned long)(arg2); \
  861. _argvec[3] = (unsigned long)(arg3); \
  862. _argvec[4] = (unsigned long)(arg4); \
  863. _argvec[5] = (unsigned long)(arg5); \
  864. _argvec[6] = (unsigned long)(arg6); \
  865. _argvec[7] = (unsigned long)(arg7); \
  866. _argvec[8] = (unsigned long)(arg8); \
  867. __asm__ volatile( \
  868. "pushl 32(%%eax)\n\t" \
  869. "pushl 28(%%eax)\n\t" \
  870. "pushl 24(%%eax)\n\t" \
  871. "pushl 20(%%eax)\n\t" \
  872. "pushl 16(%%eax)\n\t" \
  873. "pushl 12(%%eax)\n\t" \
  874. "pushl 8(%%eax)\n\t" \
  875. "pushl 4(%%eax)\n\t" \
  876. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  877. VALGRIND_CALL_NOREDIR_EAX \
  878. "addl $32, %%esp\n" \
  879. : /*out*/ "=a" (_res) \
  880. : /*in*/ "a" (&_argvec[0]) \
  881. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  882. ); \
  883. lval = (__typeof__(lval)) _res; \
  884. } while (0)
  885. #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  886. arg7,arg8,arg9) \
  887. do { \
  888. volatile OrigFn _orig = (orig); \
  889. volatile unsigned long _argvec[10]; \
  890. volatile unsigned long _res; \
  891. _argvec[0] = (unsigned long)_orig.nraddr; \
  892. _argvec[1] = (unsigned long)(arg1); \
  893. _argvec[2] = (unsigned long)(arg2); \
  894. _argvec[3] = (unsigned long)(arg3); \
  895. _argvec[4] = (unsigned long)(arg4); \
  896. _argvec[5] = (unsigned long)(arg5); \
  897. _argvec[6] = (unsigned long)(arg6); \
  898. _argvec[7] = (unsigned long)(arg7); \
  899. _argvec[8] = (unsigned long)(arg8); \
  900. _argvec[9] = (unsigned long)(arg9); \
  901. __asm__ volatile( \
  902. "subl $12, %%esp\n\t" \
  903. "pushl 36(%%eax)\n\t" \
  904. "pushl 32(%%eax)\n\t" \
  905. "pushl 28(%%eax)\n\t" \
  906. "pushl 24(%%eax)\n\t" \
  907. "pushl 20(%%eax)\n\t" \
  908. "pushl 16(%%eax)\n\t" \
  909. "pushl 12(%%eax)\n\t" \
  910. "pushl 8(%%eax)\n\t" \
  911. "pushl 4(%%eax)\n\t" \
  912. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  913. VALGRIND_CALL_NOREDIR_EAX \
  914. "addl $48, %%esp\n" \
  915. : /*out*/ "=a" (_res) \
  916. : /*in*/ "a" (&_argvec[0]) \
  917. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  918. ); \
  919. lval = (__typeof__(lval)) _res; \
  920. } while (0)
  921. #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  922. arg7,arg8,arg9,arg10) \
  923. do { \
  924. volatile OrigFn _orig = (orig); \
  925. volatile unsigned long _argvec[11]; \
  926. volatile unsigned long _res; \
  927. _argvec[0] = (unsigned long)_orig.nraddr; \
  928. _argvec[1] = (unsigned long)(arg1); \
  929. _argvec[2] = (unsigned long)(arg2); \
  930. _argvec[3] = (unsigned long)(arg3); \
  931. _argvec[4] = (unsigned long)(arg4); \
  932. _argvec[5] = (unsigned long)(arg5); \
  933. _argvec[6] = (unsigned long)(arg6); \
  934. _argvec[7] = (unsigned long)(arg7); \
  935. _argvec[8] = (unsigned long)(arg8); \
  936. _argvec[9] = (unsigned long)(arg9); \
  937. _argvec[10] = (unsigned long)(arg10); \
  938. __asm__ volatile( \
  939. "subl $8, %%esp\n\t" \
  940. "pushl 40(%%eax)\n\t" \
  941. "pushl 36(%%eax)\n\t" \
  942. "pushl 32(%%eax)\n\t" \
  943. "pushl 28(%%eax)\n\t" \
  944. "pushl 24(%%eax)\n\t" \
  945. "pushl 20(%%eax)\n\t" \
  946. "pushl 16(%%eax)\n\t" \
  947. "pushl 12(%%eax)\n\t" \
  948. "pushl 8(%%eax)\n\t" \
  949. "pushl 4(%%eax)\n\t" \
  950. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  951. VALGRIND_CALL_NOREDIR_EAX \
  952. "addl $48, %%esp\n" \
  953. : /*out*/ "=a" (_res) \
  954. : /*in*/ "a" (&_argvec[0]) \
  955. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  956. ); \
  957. lval = (__typeof__(lval)) _res; \
  958. } while (0)
  959. #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
  960. arg6,arg7,arg8,arg9,arg10, \
  961. arg11) \
  962. do { \
  963. volatile OrigFn _orig = (orig); \
  964. volatile unsigned long _argvec[12]; \
  965. volatile unsigned long _res; \
  966. _argvec[0] = (unsigned long)_orig.nraddr; \
  967. _argvec[1] = (unsigned long)(arg1); \
  968. _argvec[2] = (unsigned long)(arg2); \
  969. _argvec[3] = (unsigned long)(arg3); \
  970. _argvec[4] = (unsigned long)(arg4); \
  971. _argvec[5] = (unsigned long)(arg5); \
  972. _argvec[6] = (unsigned long)(arg6); \
  973. _argvec[7] = (unsigned long)(arg7); \
  974. _argvec[8] = (unsigned long)(arg8); \
  975. _argvec[9] = (unsigned long)(arg9); \
  976. _argvec[10] = (unsigned long)(arg10); \
  977. _argvec[11] = (unsigned long)(arg11); \
  978. __asm__ volatile( \
  979. "subl $4, %%esp\n\t" \
  980. "pushl 44(%%eax)\n\t" \
  981. "pushl 40(%%eax)\n\t" \
  982. "pushl 36(%%eax)\n\t" \
  983. "pushl 32(%%eax)\n\t" \
  984. "pushl 28(%%eax)\n\t" \
  985. "pushl 24(%%eax)\n\t" \
  986. "pushl 20(%%eax)\n\t" \
  987. "pushl 16(%%eax)\n\t" \
  988. "pushl 12(%%eax)\n\t" \
  989. "pushl 8(%%eax)\n\t" \
  990. "pushl 4(%%eax)\n\t" \
  991. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  992. VALGRIND_CALL_NOREDIR_EAX \
  993. "addl $48, %%esp\n" \
  994. : /*out*/ "=a" (_res) \
  995. : /*in*/ "a" (&_argvec[0]) \
  996. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  997. ); \
  998. lval = (__typeof__(lval)) _res; \
  999. } while (0)
  1000. #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
  1001. arg6,arg7,arg8,arg9,arg10, \
  1002. arg11,arg12) \
  1003. do { \
  1004. volatile OrigFn _orig = (orig); \
  1005. volatile unsigned long _argvec[13]; \
  1006. volatile unsigned long _res; \
  1007. _argvec[0] = (unsigned long)_orig.nraddr; \
  1008. _argvec[1] = (unsigned long)(arg1); \
  1009. _argvec[2] = (unsigned long)(arg2); \
  1010. _argvec[3] = (unsigned long)(arg3); \
  1011. _argvec[4] = (unsigned long)(arg4); \
  1012. _argvec[5] = (unsigned long)(arg5); \
  1013. _argvec[6] = (unsigned long)(arg6); \
  1014. _argvec[7] = (unsigned long)(arg7); \
  1015. _argvec[8] = (unsigned long)(arg8); \
  1016. _argvec[9] = (unsigned long)(arg9); \
  1017. _argvec[10] = (unsigned long)(arg10); \
  1018. _argvec[11] = (unsigned long)(arg11); \
  1019. _argvec[12] = (unsigned long)(arg12); \
  1020. __asm__ volatile( \
  1021. "pushl 48(%%eax)\n\t" \
  1022. "pushl 44(%%eax)\n\t" \
  1023. "pushl 40(%%eax)\n\t" \
  1024. "pushl 36(%%eax)\n\t" \
  1025. "pushl 32(%%eax)\n\t" \
  1026. "pushl 28(%%eax)\n\t" \
  1027. "pushl 24(%%eax)\n\t" \
  1028. "pushl 20(%%eax)\n\t" \
  1029. "pushl 16(%%eax)\n\t" \
  1030. "pushl 12(%%eax)\n\t" \
  1031. "pushl 8(%%eax)\n\t" \
  1032. "pushl 4(%%eax)\n\t" \
  1033. "movl (%%eax), %%eax\n\t" /* target->%eax */ \
  1034. VALGRIND_CALL_NOREDIR_EAX \
  1035. "addl $48, %%esp\n" \
  1036. : /*out*/ "=a" (_res) \
  1037. : /*in*/ "a" (&_argvec[0]) \
  1038. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1039. ); \
  1040. lval = (__typeof__(lval)) _res; \
  1041. } while (0)
  1042. #endif /* PLAT_x86_linux || PLAT_x86_darwin */
  1043. /* ------------------------ amd64-{linux,darwin} --------------- */
  1044. #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
  1045. /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
  1046. /* These regs are trashed by the hidden call. */
  1047. #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
  1048. "rdi", "r8", "r9", "r10", "r11"
  1049. /* This is all pretty complex. It's so as to make stack unwinding
  1050. work reliably. See bug 243270. The basic problem is the sub and
  1051. add of 128 of %rsp in all of the following macros. If gcc believes
  1052. the CFA is in %rsp, then unwinding may fail, because what's at the
  1053. CFA is not what gcc "expected" when it constructs the CFIs for the
  1054. places where the macros are instantiated.
  1055. But we can't just add a CFI annotation to increase the CFA offset
  1056. by 128, to match the sub of 128 from %rsp, because we don't know
  1057. whether gcc has chosen %rsp as the CFA at that point, or whether it
  1058. has chosen some other register (eg, %rbp). In the latter case,
  1059. adding a CFI annotation to change the CFA offset is simply wrong.
  1060. So the solution is to get hold of the CFA using
  1061. __builtin_dwarf_cfa(), put it in a known register, and add a
  1062. CFI annotation to say what the register is. We choose %rbp for
  1063. this (perhaps perversely), because:
  1064. (1) %rbp is already subject to unwinding. If a new register was
  1065. chosen then the unwinder would have to unwind it in all stack
  1066. traces, which is expensive, and
  1067. (2) %rbp is already subject to precise exception updates in the
  1068. JIT. If a new register was chosen, we'd have to have precise
  1069. exceptions for it too, which reduces performance of the
  1070. generated code.
  1071. However .. one extra complication. We can't just whack the result
  1072. of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
  1073. list of trashed registers at the end of the inline assembly
  1074. fragments; gcc won't allow %rbp to appear in that list. Hence
  1075. instead we need to stash %rbp in %r15 for the duration of the asm,
  1076. and say that %r15 is trashed instead. gcc seems happy to go with
  1077. that.
  1078. Oh .. and this all needs to be conditionalised so that it is
  1079. unchanged from before this commit, when compiled with older gccs
  1080. that don't support __builtin_dwarf_cfa. Furthermore, since
  1081. this header file is freestanding, it has to be independent of
  1082. config.h, and so the following conditionalisation cannot depend on
  1083. configure time checks.
  1084. Although it's not clear from
  1085. 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
  1086. this expression excludes Darwin.
  1087. .cfi directives in Darwin assembly appear to be completely
  1088. different and I haven't investigated how they work.
  1089. For even more entertainment value, note we have to use the
  1090. completely undocumented __builtin_dwarf_cfa(), which appears to
  1091. really compute the CFA, whereas __builtin_frame_address(0) claims
  1092. to but actually doesn't. See
  1093. https://bugs.kde.org/show_bug.cgi?id=243270#c47
  1094. */
  1095. #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
  1096. # define __FRAME_POINTER \
  1097. ,"r"(__builtin_dwarf_cfa())
  1098. # define VALGRIND_CFI_PROLOGUE \
  1099. "movq %%rbp, %%r15\n\t" \
  1100. "movq %2, %%rbp\n\t" \
  1101. ".cfi_remember_state\n\t" \
  1102. ".cfi_def_cfa rbp, 0\n\t"
  1103. # define VALGRIND_CFI_EPILOGUE \
  1104. "movq %%r15, %%rbp\n\t" \
  1105. ".cfi_restore_state\n\t"
  1106. #else
  1107. # define __FRAME_POINTER
  1108. # define VALGRIND_CFI_PROLOGUE
  1109. # define VALGRIND_CFI_EPILOGUE
  1110. #endif
  1111. /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
  1112. long) == 8. */
  1113. /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
  1114. macros. In order not to trash the stack redzone, we need to drop
  1115. %rsp by 128 before the hidden call, and restore afterwards. The
  1116. nastyness is that it is only by luck that the stack still appears
  1117. to be unwindable during the hidden call - since then the behaviour
  1118. of any routine using this macro does not match what the CFI data
  1119. says. Sigh.
  1120. Why is this important? Imagine that a wrapper has a stack
  1121. allocated local, and passes to the hidden call, a pointer to it.
  1122. Because gcc does not know about the hidden call, it may allocate
  1123. that local in the redzone. Unfortunately the hidden call may then
  1124. trash it before it comes to use it. So we must step clear of the
  1125. redzone, for the duration of the hidden call, to make it safe.
  1126. Probably the same problem afflicts the other redzone-style ABIs too
  1127. (ppc64-linux, ppc32-aix5, ppc64-aix5); but for those, the stack is
  1128. self describing (none of this CFI nonsense) so at least messing
  1129. with the stack pointer doesn't give a danger of non-unwindable
  1130. stack. */
  1131. #define CALL_FN_W_v(lval, orig) \
  1132. do { \
  1133. volatile OrigFn _orig = (orig); \
  1134. volatile unsigned long _argvec[1]; \
  1135. volatile unsigned long _res; \
  1136. _argvec[0] = (unsigned long)_orig.nraddr; \
  1137. __asm__ volatile( \
  1138. VALGRIND_CFI_PROLOGUE \
  1139. "subq $128,%%rsp\n\t" \
  1140. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1141. VALGRIND_CALL_NOREDIR_RAX \
  1142. "addq $128,%%rsp\n\t" \
  1143. VALGRIND_CFI_EPILOGUE \
  1144. : /*out*/ "=a" (_res) \
  1145. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1146. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1147. ); \
  1148. lval = (__typeof__(lval)) _res; \
  1149. } while (0)
  1150. #define CALL_FN_W_W(lval, orig, arg1) \
  1151. do { \
  1152. volatile OrigFn _orig = (orig); \
  1153. volatile unsigned long _argvec[2]; \
  1154. volatile unsigned long _res; \
  1155. _argvec[0] = (unsigned long)_orig.nraddr; \
  1156. _argvec[1] = (unsigned long)(arg1); \
  1157. __asm__ volatile( \
  1158. VALGRIND_CFI_PROLOGUE \
  1159. "subq $128,%%rsp\n\t" \
  1160. "movq 8(%%rax), %%rdi\n\t" \
  1161. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1162. VALGRIND_CALL_NOREDIR_RAX \
  1163. "addq $128,%%rsp\n\t" \
  1164. VALGRIND_CFI_EPILOGUE \
  1165. : /*out*/ "=a" (_res) \
  1166. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1167. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1168. ); \
  1169. lval = (__typeof__(lval)) _res; \
  1170. } while (0)
  1171. #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
  1172. do { \
  1173. volatile OrigFn _orig = (orig); \
  1174. volatile unsigned long _argvec[3]; \
  1175. volatile unsigned long _res; \
  1176. _argvec[0] = (unsigned long)_orig.nraddr; \
  1177. _argvec[1] = (unsigned long)(arg1); \
  1178. _argvec[2] = (unsigned long)(arg2); \
  1179. __asm__ volatile( \
  1180. VALGRIND_CFI_PROLOGUE \
  1181. "subq $128,%%rsp\n\t" \
  1182. "movq 16(%%rax), %%rsi\n\t" \
  1183. "movq 8(%%rax), %%rdi\n\t" \
  1184. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1185. VALGRIND_CALL_NOREDIR_RAX \
  1186. "addq $128,%%rsp\n\t" \
  1187. VALGRIND_CFI_EPILOGUE \
  1188. : /*out*/ "=a" (_res) \
  1189. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1190. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1191. ); \
  1192. lval = (__typeof__(lval)) _res; \
  1193. } while (0)
  1194. #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
  1195. do { \
  1196. volatile OrigFn _orig = (orig); \
  1197. volatile unsigned long _argvec[4]; \
  1198. volatile unsigned long _res; \
  1199. _argvec[0] = (unsigned long)_orig.nraddr; \
  1200. _argvec[1] = (unsigned long)(arg1); \
  1201. _argvec[2] = (unsigned long)(arg2); \
  1202. _argvec[3] = (unsigned long)(arg3); \
  1203. __asm__ volatile( \
  1204. VALGRIND_CFI_PROLOGUE \
  1205. "subq $128,%%rsp\n\t" \
  1206. "movq 24(%%rax), %%rdx\n\t" \
  1207. "movq 16(%%rax), %%rsi\n\t" \
  1208. "movq 8(%%rax), %%rdi\n\t" \
  1209. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1210. VALGRIND_CALL_NOREDIR_RAX \
  1211. "addq $128,%%rsp\n\t" \
  1212. VALGRIND_CFI_EPILOGUE \
  1213. : /*out*/ "=a" (_res) \
  1214. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1215. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1216. ); \
  1217. lval = (__typeof__(lval)) _res; \
  1218. } while (0)
  1219. #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
  1220. do { \
  1221. volatile OrigFn _orig = (orig); \
  1222. volatile unsigned long _argvec[5]; \
  1223. volatile unsigned long _res; \
  1224. _argvec[0] = (unsigned long)_orig.nraddr; \
  1225. _argvec[1] = (unsigned long)(arg1); \
  1226. _argvec[2] = (unsigned long)(arg2); \
  1227. _argvec[3] = (unsigned long)(arg3); \
  1228. _argvec[4] = (unsigned long)(arg4); \
  1229. __asm__ volatile( \
  1230. VALGRIND_CFI_PROLOGUE \
  1231. "subq $128,%%rsp\n\t" \
  1232. "movq 32(%%rax), %%rcx\n\t" \
  1233. "movq 24(%%rax), %%rdx\n\t" \
  1234. "movq 16(%%rax), %%rsi\n\t" \
  1235. "movq 8(%%rax), %%rdi\n\t" \
  1236. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1237. VALGRIND_CALL_NOREDIR_RAX \
  1238. "addq $128,%%rsp\n\t" \
  1239. VALGRIND_CFI_EPILOGUE \
  1240. : /*out*/ "=a" (_res) \
  1241. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1242. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1243. ); \
  1244. lval = (__typeof__(lval)) _res; \
  1245. } while (0)
  1246. #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
  1247. do { \
  1248. volatile OrigFn _orig = (orig); \
  1249. volatile unsigned long _argvec[6]; \
  1250. volatile unsigned long _res; \
  1251. _argvec[0] = (unsigned long)_orig.nraddr; \
  1252. _argvec[1] = (unsigned long)(arg1); \
  1253. _argvec[2] = (unsigned long)(arg2); \
  1254. _argvec[3] = (unsigned long)(arg3); \
  1255. _argvec[4] = (unsigned long)(arg4); \
  1256. _argvec[5] = (unsigned long)(arg5); \
  1257. __asm__ volatile( \
  1258. VALGRIND_CFI_PROLOGUE \
  1259. "subq $128,%%rsp\n\t" \
  1260. "movq 40(%%rax), %%r8\n\t" \
  1261. "movq 32(%%rax), %%rcx\n\t" \
  1262. "movq 24(%%rax), %%rdx\n\t" \
  1263. "movq 16(%%rax), %%rsi\n\t" \
  1264. "movq 8(%%rax), %%rdi\n\t" \
  1265. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1266. VALGRIND_CALL_NOREDIR_RAX \
  1267. "addq $128,%%rsp\n\t" \
  1268. VALGRIND_CFI_EPILOGUE \
  1269. : /*out*/ "=a" (_res) \
  1270. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1271. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1272. ); \
  1273. lval = (__typeof__(lval)) _res; \
  1274. } while (0)
  1275. #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
  1276. do { \
  1277. volatile OrigFn _orig = (orig); \
  1278. volatile unsigned long _argvec[7]; \
  1279. volatile unsigned long _res; \
  1280. _argvec[0] = (unsigned long)_orig.nraddr; \
  1281. _argvec[1] = (unsigned long)(arg1); \
  1282. _argvec[2] = (unsigned long)(arg2); \
  1283. _argvec[3] = (unsigned long)(arg3); \
  1284. _argvec[4] = (unsigned long)(arg4); \
  1285. _argvec[5] = (unsigned long)(arg5); \
  1286. _argvec[6] = (unsigned long)(arg6); \
  1287. __asm__ volatile( \
  1288. VALGRIND_CFI_PROLOGUE \
  1289. "subq $128,%%rsp\n\t" \
  1290. "movq 48(%%rax), %%r9\n\t" \
  1291. "movq 40(%%rax), %%r8\n\t" \
  1292. "movq 32(%%rax), %%rcx\n\t" \
  1293. "movq 24(%%rax), %%rdx\n\t" \
  1294. "movq 16(%%rax), %%rsi\n\t" \
  1295. "movq 8(%%rax), %%rdi\n\t" \
  1296. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1297. VALGRIND_CALL_NOREDIR_RAX \
  1298. "addq $128,%%rsp\n\t" \
  1299. VALGRIND_CFI_EPILOGUE \
  1300. : /*out*/ "=a" (_res) \
  1301. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1302. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1303. ); \
  1304. lval = (__typeof__(lval)) _res; \
  1305. } while (0)
  1306. #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  1307. arg7) \
  1308. do { \
  1309. volatile OrigFn _orig = (orig); \
  1310. volatile unsigned long _argvec[8]; \
  1311. volatile unsigned long _res; \
  1312. _argvec[0] = (unsigned long)_orig.nraddr; \
  1313. _argvec[1] = (unsigned long)(arg1); \
  1314. _argvec[2] = (unsigned long)(arg2); \
  1315. _argvec[3] = (unsigned long)(arg3); \
  1316. _argvec[4] = (unsigned long)(arg4); \
  1317. _argvec[5] = (unsigned long)(arg5); \
  1318. _argvec[6] = (unsigned long)(arg6); \
  1319. _argvec[7] = (unsigned long)(arg7); \
  1320. __asm__ volatile( \
  1321. VALGRIND_CFI_PROLOGUE \
  1322. "subq $136,%%rsp\n\t" \
  1323. "pushq 56(%%rax)\n\t" \
  1324. "movq 48(%%rax), %%r9\n\t" \
  1325. "movq 40(%%rax), %%r8\n\t" \
  1326. "movq 32(%%rax), %%rcx\n\t" \
  1327. "movq 24(%%rax), %%rdx\n\t" \
  1328. "movq 16(%%rax), %%rsi\n\t" \
  1329. "movq 8(%%rax), %%rdi\n\t" \
  1330. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1331. VALGRIND_CALL_NOREDIR_RAX \
  1332. "addq $8, %%rsp\n" \
  1333. "addq $136,%%rsp\n\t" \
  1334. VALGRIND_CFI_EPILOGUE \
  1335. : /*out*/ "=a" (_res) \
  1336. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1337. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1338. ); \
  1339. lval = (__typeof__(lval)) _res; \
  1340. } while (0)
  1341. #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  1342. arg7,arg8) \
  1343. do { \
  1344. volatile OrigFn _orig = (orig); \
  1345. volatile unsigned long _argvec[9]; \
  1346. volatile unsigned long _res; \
  1347. _argvec[0] = (unsigned long)_orig.nraddr; \
  1348. _argvec[1] = (unsigned long)(arg1); \
  1349. _argvec[2] = (unsigned long)(arg2); \
  1350. _argvec[3] = (unsigned long)(arg3); \
  1351. _argvec[4] = (unsigned long)(arg4); \
  1352. _argvec[5] = (unsigned long)(arg5); \
  1353. _argvec[6] = (unsigned long)(arg6); \
  1354. _argvec[7] = (unsigned long)(arg7); \
  1355. _argvec[8] = (unsigned long)(arg8); \
  1356. __asm__ volatile( \
  1357. VALGRIND_CFI_PROLOGUE \
  1358. "subq $128,%%rsp\n\t" \
  1359. "pushq 64(%%rax)\n\t" \
  1360. "pushq 56(%%rax)\n\t" \
  1361. "movq 48(%%rax), %%r9\n\t" \
  1362. "movq 40(%%rax), %%r8\n\t" \
  1363. "movq 32(%%rax), %%rcx\n\t" \
  1364. "movq 24(%%rax), %%rdx\n\t" \
  1365. "movq 16(%%rax), %%rsi\n\t" \
  1366. "movq 8(%%rax), %%rdi\n\t" \
  1367. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1368. VALGRIND_CALL_NOREDIR_RAX \
  1369. "addq $16, %%rsp\n" \
  1370. "addq $128,%%rsp\n\t" \
  1371. VALGRIND_CFI_EPILOGUE \
  1372. : /*out*/ "=a" (_res) \
  1373. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1374. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1375. ); \
  1376. lval = (__typeof__(lval)) _res; \
  1377. } while (0)
  1378. #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  1379. arg7,arg8,arg9) \
  1380. do { \
  1381. volatile OrigFn _orig = (orig); \
  1382. volatile unsigned long _argvec[10]; \
  1383. volatile unsigned long _res; \
  1384. _argvec[0] = (unsigned long)_orig.nraddr; \
  1385. _argvec[1] = (unsigned long)(arg1); \
  1386. _argvec[2] = (unsigned long)(arg2); \
  1387. _argvec[3] = (unsigned long)(arg3); \
  1388. _argvec[4] = (unsigned long)(arg4); \
  1389. _argvec[5] = (unsigned long)(arg5); \
  1390. _argvec[6] = (unsigned long)(arg6); \
  1391. _argvec[7] = (unsigned long)(arg7); \
  1392. _argvec[8] = (unsigned long)(arg8); \
  1393. _argvec[9] = (unsigned long)(arg9); \
  1394. __asm__ volatile( \
  1395. VALGRIND_CFI_PROLOGUE \
  1396. "subq $136,%%rsp\n\t" \
  1397. "pushq 72(%%rax)\n\t" \
  1398. "pushq 64(%%rax)\n\t" \
  1399. "pushq 56(%%rax)\n\t" \
  1400. "movq 48(%%rax), %%r9\n\t" \
  1401. "movq 40(%%rax), %%r8\n\t" \
  1402. "movq 32(%%rax), %%rcx\n\t" \
  1403. "movq 24(%%rax), %%rdx\n\t" \
  1404. "movq 16(%%rax), %%rsi\n\t" \
  1405. "movq 8(%%rax), %%rdi\n\t" \
  1406. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1407. VALGRIND_CALL_NOREDIR_RAX \
  1408. "addq $24, %%rsp\n" \
  1409. "addq $136,%%rsp\n\t" \
  1410. VALGRIND_CFI_EPILOGUE \
  1411. : /*out*/ "=a" (_res) \
  1412. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1413. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1414. ); \
  1415. lval = (__typeof__(lval)) _res; \
  1416. } while (0)
  1417. #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  1418. arg7,arg8,arg9,arg10) \
  1419. do { \
  1420. volatile OrigFn _orig = (orig); \
  1421. volatile unsigned long _argvec[11]; \
  1422. volatile unsigned long _res; \
  1423. _argvec[0] = (unsigned long)_orig.nraddr; \
  1424. _argvec[1] = (unsigned long)(arg1); \
  1425. _argvec[2] = (unsigned long)(arg2); \
  1426. _argvec[3] = (unsigned long)(arg3); \
  1427. _argvec[4] = (unsigned long)(arg4); \
  1428. _argvec[5] = (unsigned long)(arg5); \
  1429. _argvec[6] = (unsigned long)(arg6); \
  1430. _argvec[7] = (unsigned long)(arg7); \
  1431. _argvec[8] = (unsigned long)(arg8); \
  1432. _argvec[9] = (unsigned long)(arg9); \
  1433. _argvec[10] = (unsigned long)(arg10); \
  1434. __asm__ volatile( \
  1435. VALGRIND_CFI_PROLOGUE \
  1436. "subq $128,%%rsp\n\t" \
  1437. "pushq 80(%%rax)\n\t" \
  1438. "pushq 72(%%rax)\n\t" \
  1439. "pushq 64(%%rax)\n\t" \
  1440. "pushq 56(%%rax)\n\t" \
  1441. "movq 48(%%rax), %%r9\n\t" \
  1442. "movq 40(%%rax), %%r8\n\t" \
  1443. "movq 32(%%rax), %%rcx\n\t" \
  1444. "movq 24(%%rax), %%rdx\n\t" \
  1445. "movq 16(%%rax), %%rsi\n\t" \
  1446. "movq 8(%%rax), %%rdi\n\t" \
  1447. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1448. VALGRIND_CALL_NOREDIR_RAX \
  1449. "addq $32, %%rsp\n" \
  1450. "addq $128,%%rsp\n\t" \
  1451. VALGRIND_CFI_EPILOGUE \
  1452. : /*out*/ "=a" (_res) \
  1453. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1454. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1455. ); \
  1456. lval = (__typeof__(lval)) _res; \
  1457. } while (0)
  1458. #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  1459. arg7,arg8,arg9,arg10,arg11) \
  1460. do { \
  1461. volatile OrigFn _orig = (orig); \
  1462. volatile unsigned long _argvec[12]; \
  1463. volatile unsigned long _res; \
  1464. _argvec[0] = (unsigned long)_orig.nraddr; \
  1465. _argvec[1] = (unsigned long)(arg1); \
  1466. _argvec[2] = (unsigned long)(arg2); \
  1467. _argvec[3] = (unsigned long)(arg3); \
  1468. _argvec[4] = (unsigned long)(arg4); \
  1469. _argvec[5] = (unsigned long)(arg5); \
  1470. _argvec[6] = (unsigned long)(arg6); \
  1471. _argvec[7] = (unsigned long)(arg7); \
  1472. _argvec[8] = (unsigned long)(arg8); \
  1473. _argvec[9] = (unsigned long)(arg9); \
  1474. _argvec[10] = (unsigned long)(arg10); \
  1475. _argvec[11] = (unsigned long)(arg11); \
  1476. __asm__ volatile( \
  1477. VALGRIND_CFI_PROLOGUE \
  1478. "subq $136,%%rsp\n\t" \
  1479. "pushq 88(%%rax)\n\t" \
  1480. "pushq 80(%%rax)\n\t" \
  1481. "pushq 72(%%rax)\n\t" \
  1482. "pushq 64(%%rax)\n\t" \
  1483. "pushq 56(%%rax)\n\t" \
  1484. "movq 48(%%rax), %%r9\n\t" \
  1485. "movq 40(%%rax), %%r8\n\t" \
  1486. "movq 32(%%rax), %%rcx\n\t" \
  1487. "movq 24(%%rax), %%rdx\n\t" \
  1488. "movq 16(%%rax), %%rsi\n\t" \
  1489. "movq 8(%%rax), %%rdi\n\t" \
  1490. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1491. VALGRIND_CALL_NOREDIR_RAX \
  1492. "addq $40, %%rsp\n" \
  1493. "addq $136,%%rsp\n\t" \
  1494. VALGRIND_CFI_EPILOGUE \
  1495. : /*out*/ "=a" (_res) \
  1496. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1497. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1498. ); \
  1499. lval = (__typeof__(lval)) _res; \
  1500. } while (0)
  1501. #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  1502. arg7,arg8,arg9,arg10,arg11,arg12) \
  1503. do { \
  1504. volatile OrigFn _orig = (orig); \
  1505. volatile unsigned long _argvec[13]; \
  1506. volatile unsigned long _res; \
  1507. _argvec[0] = (unsigned long)_orig.nraddr; \
  1508. _argvec[1] = (unsigned long)(arg1); \
  1509. _argvec[2] = (unsigned long)(arg2); \
  1510. _argvec[3] = (unsigned long)(arg3); \
  1511. _argvec[4] = (unsigned long)(arg4); \
  1512. _argvec[5] = (unsigned long)(arg5); \
  1513. _argvec[6] = (unsigned long)(arg6); \
  1514. _argvec[7] = (unsigned long)(arg7); \
  1515. _argvec[8] = (unsigned long)(arg8); \
  1516. _argvec[9] = (unsigned long)(arg9); \
  1517. _argvec[10] = (unsigned long)(arg10); \
  1518. _argvec[11] = (unsigned long)(arg11); \
  1519. _argvec[12] = (unsigned long)(arg12); \
  1520. __asm__ volatile( \
  1521. VALGRIND_CFI_PROLOGUE \
  1522. "subq $128,%%rsp\n\t" \
  1523. "pushq 96(%%rax)\n\t" \
  1524. "pushq 88(%%rax)\n\t" \
  1525. "pushq 80(%%rax)\n\t" \
  1526. "pushq 72(%%rax)\n\t" \
  1527. "pushq 64(%%rax)\n\t" \
  1528. "pushq 56(%%rax)\n\t" \
  1529. "movq 48(%%rax), %%r9\n\t" \
  1530. "movq 40(%%rax), %%r8\n\t" \
  1531. "movq 32(%%rax), %%rcx\n\t" \
  1532. "movq 24(%%rax), %%rdx\n\t" \
  1533. "movq 16(%%rax), %%rsi\n\t" \
  1534. "movq 8(%%rax), %%rdi\n\t" \
  1535. "movq (%%rax), %%rax\n\t" /* target->%rax */ \
  1536. VALGRIND_CALL_NOREDIR_RAX \
  1537. "addq $48, %%rsp\n" \
  1538. "addq $128,%%rsp\n\t" \
  1539. VALGRIND_CFI_EPILOGUE \
  1540. : /*out*/ "=a" (_res) \
  1541. : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
  1542. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
  1543. ); \
  1544. lval = (__typeof__(lval)) _res; \
  1545. } while (0)
  1546. #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
  1547. /* ------------------------ ppc32-linux ------------------------ */
  1548. #if defined(PLAT_ppc32_linux)
  1549. /* This is useful for finding out about the on-stack stuff:
  1550. extern int f9 ( int,int,int,int,int,int,int,int,int );
  1551. extern int f10 ( int,int,int,int,int,int,int,int,int,int );
  1552. extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
  1553. extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
  1554. int g9 ( void ) {
  1555. return f9(11,22,33,44,55,66,77,88,99);
  1556. }
  1557. int g10 ( void ) {
  1558. return f10(11,22,33,44,55,66,77,88,99,110);
  1559. }
  1560. int g11 ( void ) {
  1561. return f11(11,22,33,44,55,66,77,88,99,110,121);
  1562. }
  1563. int g12 ( void ) {
  1564. return f12(11,22,33,44,55,66,77,88,99,110,121,132);
  1565. }
  1566. */
  1567. /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
  1568. /* These regs are trashed by the hidden call. */
  1569. #define __CALLER_SAVED_REGS \
  1570. "lr", "ctr", "xer", \
  1571. "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
  1572. "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
  1573. "r11", "r12", "r13"
  1574. /* These CALL_FN_ macros assume that on ppc32-linux,
  1575. sizeof(unsigned long) == 4. */
  1576. #define CALL_FN_W_v(lval, orig) \
  1577. do { \
  1578. volatile OrigFn _orig = (orig); \
  1579. volatile unsigned long _argvec[1]; \
  1580. volatile unsigned long _res; \
  1581. _argvec[0] = (unsigned long)_orig.nraddr; \
  1582. __asm__ volatile( \
  1583. "mr 11,%1\n\t" \
  1584. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1585. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1586. "mr %0,3" \
  1587. : /*out*/ "=r" (_res) \
  1588. : /*in*/ "r" (&_argvec[0]) \
  1589. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1590. ); \
  1591. lval = (__typeof__(lval)) _res; \
  1592. } while (0)
  1593. #define CALL_FN_W_W(lval, orig, arg1) \
  1594. do { \
  1595. volatile OrigFn _orig = (orig); \
  1596. volatile unsigned long _argvec[2]; \
  1597. volatile unsigned long _res; \
  1598. _argvec[0] = (unsigned long)_orig.nraddr; \
  1599. _argvec[1] = (unsigned long)arg1; \
  1600. __asm__ volatile( \
  1601. "mr 11,%1\n\t" \
  1602. "lwz 3,4(11)\n\t" /* arg1->r3 */ \
  1603. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1604. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1605. "mr %0,3" \
  1606. : /*out*/ "=r" (_res) \
  1607. : /*in*/ "r" (&_argvec[0]) \
  1608. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1609. ); \
  1610. lval = (__typeof__(lval)) _res; \
  1611. } while (0)
  1612. #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
  1613. do { \
  1614. volatile OrigFn _orig = (orig); \
  1615. volatile unsigned long _argvec[3]; \
  1616. volatile unsigned long _res; \
  1617. _argvec[0] = (unsigned long)_orig.nraddr; \
  1618. _argvec[1] = (unsigned long)arg1; \
  1619. _argvec[2] = (unsigned long)arg2; \
  1620. __asm__ volatile( \
  1621. "mr 11,%1\n\t" \
  1622. "lwz 3,4(11)\n\t" /* arg1->r3 */ \
  1623. "lwz 4,8(11)\n\t" \
  1624. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1625. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1626. "mr %0,3" \
  1627. : /*out*/ "=r" (_res) \
  1628. : /*in*/ "r" (&_argvec[0]) \
  1629. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1630. ); \
  1631. lval = (__typeof__(lval)) _res; \
  1632. } while (0)
  1633. #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
  1634. do { \
  1635. volatile OrigFn _orig = (orig); \
  1636. volatile unsigned long _argvec[4]; \
  1637. volatile unsigned long _res; \
  1638. _argvec[0] = (unsigned long)_orig.nraddr; \
  1639. _argvec[1] = (unsigned long)arg1; \
  1640. _argvec[2] = (unsigned long)arg2; \
  1641. _argvec[3] = (unsigned long)arg3; \
  1642. __asm__ volatile( \
  1643. "mr 11,%1\n\t" \
  1644. "lwz 3,4(11)\n\t" /* arg1->r3 */ \
  1645. "lwz 4,8(11)\n\t" \
  1646. "lwz 5,12(11)\n\t" \
  1647. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1648. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1649. "mr %0,3" \
  1650. : /*out*/ "=r" (_res) \
  1651. : /*in*/ "r" (&_argvec[0]) \
  1652. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1653. ); \
  1654. lval = (__typeof__(lval)) _res; \
  1655. } while (0)
  1656. #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
  1657. do { \
  1658. volatile OrigFn _orig = (orig); \
  1659. volatile unsigned long _argvec[5]; \
  1660. volatile unsigned long _res; \
  1661. _argvec[0] = (unsigned long)_orig.nraddr; \
  1662. _argvec[1] = (unsigned long)arg1; \
  1663. _argvec[2] = (unsigned long)arg2; \
  1664. _argvec[3] = (unsigned long)arg3; \
  1665. _argvec[4] = (unsigned long)arg4; \
  1666. __asm__ volatile( \
  1667. "mr 11,%1\n\t" \
  1668. "lwz 3,4(11)\n\t" /* arg1->r3 */ \
  1669. "lwz 4,8(11)\n\t" \
  1670. "lwz 5,12(11)\n\t" \
  1671. "lwz 6,16(11)\n\t" /* arg4->r6 */ \
  1672. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1673. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1674. "mr %0,3" \
  1675. : /*out*/ "=r" (_res) \
  1676. : /*in*/ "r" (&_argvec[0]) \
  1677. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1678. ); \
  1679. lval = (__typeof__(lval)) _res; \
  1680. } while (0)
  1681. #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
  1682. do { \
  1683. volatile OrigFn _orig = (orig); \
  1684. volatile unsigned long _argvec[6]; \
  1685. volatile unsigned long _res; \
  1686. _argvec[0] = (unsigned long)_orig.nraddr; \
  1687. _argvec[1] = (unsigned long)arg1; \
  1688. _argvec[2] = (unsigned long)arg2; \
  1689. _argvec[3] = (unsigned long)arg3; \
  1690. _argvec[4] = (unsigned long)arg4; \
  1691. _argvec[5] = (unsigned long)arg5; \
  1692. __asm__ volatile( \
  1693. "mr 11,%1\n\t" \
  1694. "lwz 3,4(11)\n\t" /* arg1->r3 */ \
  1695. "lwz 4,8(11)\n\t" \
  1696. "lwz 5,12(11)\n\t" \
  1697. "lwz 6,16(11)\n\t" /* arg4->r6 */ \
  1698. "lwz 7,20(11)\n\t" \
  1699. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1700. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1701. "mr %0,3" \
  1702. : /*out*/ "=r" (_res) \
  1703. : /*in*/ "r" (&_argvec[0]) \
  1704. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1705. ); \
  1706. lval = (__typeof__(lval)) _res; \
  1707. } while (0)
  1708. #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
  1709. do { \
  1710. volatile OrigFn _orig = (orig); \
  1711. volatile unsigned long _argvec[7]; \
  1712. volatile unsigned long _res; \
  1713. _argvec[0] = (unsigned long)_orig.nraddr; \
  1714. _argvec[1] = (unsigned long)arg1; \
  1715. _argvec[2] = (unsigned long)arg2; \
  1716. _argvec[3] = (unsigned long)arg3; \
  1717. _argvec[4] = (unsigned long)arg4; \
  1718. _argvec[5] = (unsigned long)arg5; \
  1719. _argvec[6] = (unsigned long)arg6; \
  1720. __asm__ volatile( \
  1721. "mr 11,%1\n\t" \
  1722. "lwz 3,4(11)\n\t" /* arg1->r3 */ \
  1723. "lwz 4,8(11)\n\t" \
  1724. "lwz 5,12(11)\n\t" \
  1725. "lwz 6,16(11)\n\t" /* arg4->r6 */ \
  1726. "lwz 7,20(11)\n\t" \
  1727. "lwz 8,24(11)\n\t" \
  1728. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1729. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1730. "mr %0,3" \
  1731. : /*out*/ "=r" (_res) \
  1732. : /*in*/ "r" (&_argvec[0]) \
  1733. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1734. ); \
  1735. lval = (__typeof__(lval)) _res; \
  1736. } while (0)
  1737. #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  1738. arg7) \
  1739. do { \
  1740. volatile OrigFn _orig = (orig); \
  1741. volatile unsigned long _argvec[8]; \
  1742. volatile unsigned long _res; \
  1743. _argvec[0] = (unsigned long)_orig.nraddr; \
  1744. _argvec[1] = (unsigned long)arg1; \
  1745. _argvec[2] = (unsigned long)arg2; \
  1746. _argvec[3] = (unsigned long)arg3; \
  1747. _argvec[4] = (unsigned long)arg4; \
  1748. _argvec[5] = (unsigned long)arg5; \
  1749. _argvec[6] = (unsigned long)arg6; \
  1750. _argvec[7] = (unsigned long)arg7; \
  1751. __asm__ volatile( \
  1752. "mr 11,%1\n\t" \
  1753. "lwz 3,4(11)\n\t" /* arg1->r3 */ \
  1754. "lwz 4,8(11)\n\t" \
  1755. "lwz 5,12(11)\n\t" \
  1756. "lwz 6,16(11)\n\t" /* arg4->r6 */ \
  1757. "lwz 7,20(11)\n\t" \
  1758. "lwz 8,24(11)\n\t" \
  1759. "lwz 9,28(11)\n\t" \
  1760. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1761. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1762. "mr %0,3" \
  1763. : /*out*/ "=r" (_res) \
  1764. : /*in*/ "r" (&_argvec[0]) \
  1765. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1766. ); \
  1767. lval = (__typeof__(lval)) _res; \
  1768. } while (0)
  1769. #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  1770. arg7,arg8) \
  1771. do { \
  1772. volatile OrigFn _orig = (orig); \
  1773. volatile unsigned long _argvec[9]; \
  1774. volatile unsigned long _res; \
  1775. _argvec[0] = (unsigned long)_orig.nraddr; \
  1776. _argvec[1] = (unsigned long)arg1; \
  1777. _argvec[2] = (unsigned long)arg2; \
  1778. _argvec[3] = (unsigned long)arg3; \
  1779. _argvec[4] = (unsigned long)arg4; \
  1780. _argvec[5] = (unsigned long)arg5; \
  1781. _argvec[6] = (unsigned long)arg6; \
  1782. _argvec[7] = (unsigned long)arg7; \
  1783. _argvec[8] = (unsigned long)arg8; \
  1784. __asm__ volatile( \
  1785. "mr 11,%1\n\t" \
  1786. "lwz 3,4(11)\n\t" /* arg1->r3 */ \
  1787. "lwz 4,8(11)\n\t" \
  1788. "lwz 5,12(11)\n\t" \
  1789. "lwz 6,16(11)\n\t" /* arg4->r6 */ \
  1790. "lwz 7,20(11)\n\t" \
  1791. "lwz 8,24(11)\n\t" \
  1792. "lwz 9,28(11)\n\t" \
  1793. "lwz 10,32(11)\n\t" /* arg8->r10 */ \
  1794. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1795. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1796. "mr %0,3" \
  1797. : /*out*/ "=r" (_res) \
  1798. : /*in*/ "r" (&_argvec[0]) \
  1799. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1800. ); \
  1801. lval = (__typeof__(lval)) _res; \
  1802. } while (0)
  1803. #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  1804. arg7,arg8,arg9) \
  1805. do { \
  1806. volatile OrigFn _orig = (orig); \
  1807. volatile unsigned long _argvec[10]; \
  1808. volatile unsigned long _res; \
  1809. _argvec[0] = (unsigned long)_orig.nraddr; \
  1810. _argvec[1] = (unsigned long)arg1; \
  1811. _argvec[2] = (unsigned long)arg2; \
  1812. _argvec[3] = (unsigned long)arg3; \
  1813. _argvec[4] = (unsigned long)arg4; \
  1814. _argvec[5] = (unsigned long)arg5; \
  1815. _argvec[6] = (unsigned long)arg6; \
  1816. _argvec[7] = (unsigned long)arg7; \
  1817. _argvec[8] = (unsigned long)arg8; \
  1818. _argvec[9] = (unsigned long)arg9; \
  1819. __asm__ volatile( \
  1820. "mr 11,%1\n\t" \
  1821. "addi 1,1,-16\n\t" \
  1822. /* arg9 */ \
  1823. "lwz 3,36(11)\n\t" \
  1824. "stw 3,8(1)\n\t" \
  1825. /* args1-8 */ \
  1826. "lwz 3,4(11)\n\t" /* arg1->r3 */ \
  1827. "lwz 4,8(11)\n\t" \
  1828. "lwz 5,12(11)\n\t" \
  1829. "lwz 6,16(11)\n\t" /* arg4->r6 */ \
  1830. "lwz 7,20(11)\n\t" \
  1831. "lwz 8,24(11)\n\t" \
  1832. "lwz 9,28(11)\n\t" \
  1833. "lwz 10,32(11)\n\t" /* arg8->r10 */ \
  1834. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1835. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1836. "addi 1,1,16\n\t" \
  1837. "mr %0,3" \
  1838. : /*out*/ "=r" (_res) \
  1839. : /*in*/ "r" (&_argvec[0]) \
  1840. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1841. ); \
  1842. lval = (__typeof__(lval)) _res; \
  1843. } while (0)
  1844. #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  1845. arg7,arg8,arg9,arg10) \
  1846. do { \
  1847. volatile OrigFn _orig = (orig); \
  1848. volatile unsigned long _argvec[11]; \
  1849. volatile unsigned long _res; \
  1850. _argvec[0] = (unsigned long)_orig.nraddr; \
  1851. _argvec[1] = (unsigned long)arg1; \
  1852. _argvec[2] = (unsigned long)arg2; \
  1853. _argvec[3] = (unsigned long)arg3; \
  1854. _argvec[4] = (unsigned long)arg4; \
  1855. _argvec[5] = (unsigned long)arg5; \
  1856. _argvec[6] = (unsigned long)arg6; \
  1857. _argvec[7] = (unsigned long)arg7; \
  1858. _argvec[8] = (unsigned long)arg8; \
  1859. _argvec[9] = (unsigned long)arg9; \
  1860. _argvec[10] = (unsigned long)arg10; \
  1861. __asm__ volatile( \
  1862. "mr 11,%1\n\t" \
  1863. "addi 1,1,-16\n\t" \
  1864. /* arg10 */ \
  1865. "lwz 3,40(11)\n\t" \
  1866. "stw 3,12(1)\n\t" \
  1867. /* arg9 */ \
  1868. "lwz 3,36(11)\n\t" \
  1869. "stw 3,8(1)\n\t" \
  1870. /* args1-8 */ \
  1871. "lwz 3,4(11)\n\t" /* arg1->r3 */ \
  1872. "lwz 4,8(11)\n\t" \
  1873. "lwz 5,12(11)\n\t" \
  1874. "lwz 6,16(11)\n\t" /* arg4->r6 */ \
  1875. "lwz 7,20(11)\n\t" \
  1876. "lwz 8,24(11)\n\t" \
  1877. "lwz 9,28(11)\n\t" \
  1878. "lwz 10,32(11)\n\t" /* arg8->r10 */ \
  1879. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1880. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1881. "addi 1,1,16\n\t" \
  1882. "mr %0,3" \
  1883. : /*out*/ "=r" (_res) \
  1884. : /*in*/ "r" (&_argvec[0]) \
  1885. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1886. ); \
  1887. lval = (__typeof__(lval)) _res; \
  1888. } while (0)
  1889. #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  1890. arg7,arg8,arg9,arg10,arg11) \
  1891. do { \
  1892. volatile OrigFn _orig = (orig); \
  1893. volatile unsigned long _argvec[12]; \
  1894. volatile unsigned long _res; \
  1895. _argvec[0] = (unsigned long)_orig.nraddr; \
  1896. _argvec[1] = (unsigned long)arg1; \
  1897. _argvec[2] = (unsigned long)arg2; \
  1898. _argvec[3] = (unsigned long)arg3; \
  1899. _argvec[4] = (unsigned long)arg4; \
  1900. _argvec[5] = (unsigned long)arg5; \
  1901. _argvec[6] = (unsigned long)arg6; \
  1902. _argvec[7] = (unsigned long)arg7; \
  1903. _argvec[8] = (unsigned long)arg8; \
  1904. _argvec[9] = (unsigned long)arg9; \
  1905. _argvec[10] = (unsigned long)arg10; \
  1906. _argvec[11] = (unsigned long)arg11; \
  1907. __asm__ volatile( \
  1908. "mr 11,%1\n\t" \
  1909. "addi 1,1,-32\n\t" \
  1910. /* arg11 */ \
  1911. "lwz 3,44(11)\n\t" \
  1912. "stw 3,16(1)\n\t" \
  1913. /* arg10 */ \
  1914. "lwz 3,40(11)\n\t" \
  1915. "stw 3,12(1)\n\t" \
  1916. /* arg9 */ \
  1917. "lwz 3,36(11)\n\t" \
  1918. "stw 3,8(1)\n\t" \
  1919. /* args1-8 */ \
  1920. "lwz 3,4(11)\n\t" /* arg1->r3 */ \
  1921. "lwz 4,8(11)\n\t" \
  1922. "lwz 5,12(11)\n\t" \
  1923. "lwz 6,16(11)\n\t" /* arg4->r6 */ \
  1924. "lwz 7,20(11)\n\t" \
  1925. "lwz 8,24(11)\n\t" \
  1926. "lwz 9,28(11)\n\t" \
  1927. "lwz 10,32(11)\n\t" /* arg8->r10 */ \
  1928. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1929. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1930. "addi 1,1,32\n\t" \
  1931. "mr %0,3" \
  1932. : /*out*/ "=r" (_res) \
  1933. : /*in*/ "r" (&_argvec[0]) \
  1934. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1935. ); \
  1936. lval = (__typeof__(lval)) _res; \
  1937. } while (0)
  1938. #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  1939. arg7,arg8,arg9,arg10,arg11,arg12) \
  1940. do { \
  1941. volatile OrigFn _orig = (orig); \
  1942. volatile unsigned long _argvec[13]; \
  1943. volatile unsigned long _res; \
  1944. _argvec[0] = (unsigned long)_orig.nraddr; \
  1945. _argvec[1] = (unsigned long)arg1; \
  1946. _argvec[2] = (unsigned long)arg2; \
  1947. _argvec[3] = (unsigned long)arg3; \
  1948. _argvec[4] = (unsigned long)arg4; \
  1949. _argvec[5] = (unsigned long)arg5; \
  1950. _argvec[6] = (unsigned long)arg6; \
  1951. _argvec[7] = (unsigned long)arg7; \
  1952. _argvec[8] = (unsigned long)arg8; \
  1953. _argvec[9] = (unsigned long)arg9; \
  1954. _argvec[10] = (unsigned long)arg10; \
  1955. _argvec[11] = (unsigned long)arg11; \
  1956. _argvec[12] = (unsigned long)arg12; \
  1957. __asm__ volatile( \
  1958. "mr 11,%1\n\t" \
  1959. "addi 1,1,-32\n\t" \
  1960. /* arg12 */ \
  1961. "lwz 3,48(11)\n\t" \
  1962. "stw 3,20(1)\n\t" \
  1963. /* arg11 */ \
  1964. "lwz 3,44(11)\n\t" \
  1965. "stw 3,16(1)\n\t" \
  1966. /* arg10 */ \
  1967. "lwz 3,40(11)\n\t" \
  1968. "stw 3,12(1)\n\t" \
  1969. /* arg9 */ \
  1970. "lwz 3,36(11)\n\t" \
  1971. "stw 3,8(1)\n\t" \
  1972. /* args1-8 */ \
  1973. "lwz 3,4(11)\n\t" /* arg1->r3 */ \
  1974. "lwz 4,8(11)\n\t" \
  1975. "lwz 5,12(11)\n\t" \
  1976. "lwz 6,16(11)\n\t" /* arg4->r6 */ \
  1977. "lwz 7,20(11)\n\t" \
  1978. "lwz 8,24(11)\n\t" \
  1979. "lwz 9,28(11)\n\t" \
  1980. "lwz 10,32(11)\n\t" /* arg8->r10 */ \
  1981. "lwz 11,0(11)\n\t" /* target->r11 */ \
  1982. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  1983. "addi 1,1,32\n\t" \
  1984. "mr %0,3" \
  1985. : /*out*/ "=r" (_res) \
  1986. : /*in*/ "r" (&_argvec[0]) \
  1987. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  1988. ); \
  1989. lval = (__typeof__(lval)) _res; \
  1990. } while (0)
  1991. #endif /* PLAT_ppc32_linux */
  1992. /* ------------------------ ppc64-linux ------------------------ */
  1993. #if defined(PLAT_ppc64_linux)
  1994. /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
  1995. /* These regs are trashed by the hidden call. */
  1996. #define __CALLER_SAVED_REGS \
  1997. "lr", "ctr", "xer", \
  1998. "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
  1999. "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
  2000. "r11", "r12", "r13"
  2001. /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
  2002. long) == 8. */
  2003. #define CALL_FN_W_v(lval, orig) \
  2004. do { \
  2005. volatile OrigFn _orig = (orig); \
  2006. volatile unsigned long _argvec[3+0]; \
  2007. volatile unsigned long _res; \
  2008. /* _argvec[0] holds current r2 across the call */ \
  2009. _argvec[1] = (unsigned long)_orig.r2; \
  2010. _argvec[2] = (unsigned long)_orig.nraddr; \
  2011. __asm__ volatile( \
  2012. "mr 11,%1\n\t" \
  2013. "std 2,-16(11)\n\t" /* save tocptr */ \
  2014. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2015. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2016. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2017. "mr 11,%1\n\t" \
  2018. "mr %0,3\n\t" \
  2019. "ld 2,-16(11)" /* restore tocptr */ \
  2020. : /*out*/ "=r" (_res) \
  2021. : /*in*/ "r" (&_argvec[2]) \
  2022. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2023. ); \
  2024. lval = (__typeof__(lval)) _res; \
  2025. } while (0)
  2026. #define CALL_FN_W_W(lval, orig, arg1) \
  2027. do { \
  2028. volatile OrigFn _orig = (orig); \
  2029. volatile unsigned long _argvec[3+1]; \
  2030. volatile unsigned long _res; \
  2031. /* _argvec[0] holds current r2 across the call */ \
  2032. _argvec[1] = (unsigned long)_orig.r2; \
  2033. _argvec[2] = (unsigned long)_orig.nraddr; \
  2034. _argvec[2+1] = (unsigned long)arg1; \
  2035. __asm__ volatile( \
  2036. "mr 11,%1\n\t" \
  2037. "std 2,-16(11)\n\t" /* save tocptr */ \
  2038. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2039. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  2040. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2041. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2042. "mr 11,%1\n\t" \
  2043. "mr %0,3\n\t" \
  2044. "ld 2,-16(11)" /* restore tocptr */ \
  2045. : /*out*/ "=r" (_res) \
  2046. : /*in*/ "r" (&_argvec[2]) \
  2047. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2048. ); \
  2049. lval = (__typeof__(lval)) _res; \
  2050. } while (0)
  2051. #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
  2052. do { \
  2053. volatile OrigFn _orig = (orig); \
  2054. volatile unsigned long _argvec[3+2]; \
  2055. volatile unsigned long _res; \
  2056. /* _argvec[0] holds current r2 across the call */ \
  2057. _argvec[1] = (unsigned long)_orig.r2; \
  2058. _argvec[2] = (unsigned long)_orig.nraddr; \
  2059. _argvec[2+1] = (unsigned long)arg1; \
  2060. _argvec[2+2] = (unsigned long)arg2; \
  2061. __asm__ volatile( \
  2062. "mr 11,%1\n\t" \
  2063. "std 2,-16(11)\n\t" /* save tocptr */ \
  2064. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2065. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  2066. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  2067. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2068. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2069. "mr 11,%1\n\t" \
  2070. "mr %0,3\n\t" \
  2071. "ld 2,-16(11)" /* restore tocptr */ \
  2072. : /*out*/ "=r" (_res) \
  2073. : /*in*/ "r" (&_argvec[2]) \
  2074. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2075. ); \
  2076. lval = (__typeof__(lval)) _res; \
  2077. } while (0)
  2078. #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
  2079. do { \
  2080. volatile OrigFn _orig = (orig); \
  2081. volatile unsigned long _argvec[3+3]; \
  2082. volatile unsigned long _res; \
  2083. /* _argvec[0] holds current r2 across the call */ \
  2084. _argvec[1] = (unsigned long)_orig.r2; \
  2085. _argvec[2] = (unsigned long)_orig.nraddr; \
  2086. _argvec[2+1] = (unsigned long)arg1; \
  2087. _argvec[2+2] = (unsigned long)arg2; \
  2088. _argvec[2+3] = (unsigned long)arg3; \
  2089. __asm__ volatile( \
  2090. "mr 11,%1\n\t" \
  2091. "std 2,-16(11)\n\t" /* save tocptr */ \
  2092. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2093. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  2094. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  2095. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  2096. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2097. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2098. "mr 11,%1\n\t" \
  2099. "mr %0,3\n\t" \
  2100. "ld 2,-16(11)" /* restore tocptr */ \
  2101. : /*out*/ "=r" (_res) \
  2102. : /*in*/ "r" (&_argvec[2]) \
  2103. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2104. ); \
  2105. lval = (__typeof__(lval)) _res; \
  2106. } while (0)
  2107. #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
  2108. do { \
  2109. volatile OrigFn _orig = (orig); \
  2110. volatile unsigned long _argvec[3+4]; \
  2111. volatile unsigned long _res; \
  2112. /* _argvec[0] holds current r2 across the call */ \
  2113. _argvec[1] = (unsigned long)_orig.r2; \
  2114. _argvec[2] = (unsigned long)_orig.nraddr; \
  2115. _argvec[2+1] = (unsigned long)arg1; \
  2116. _argvec[2+2] = (unsigned long)arg2; \
  2117. _argvec[2+3] = (unsigned long)arg3; \
  2118. _argvec[2+4] = (unsigned long)arg4; \
  2119. __asm__ volatile( \
  2120. "mr 11,%1\n\t" \
  2121. "std 2,-16(11)\n\t" /* save tocptr */ \
  2122. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2123. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  2124. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  2125. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  2126. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  2127. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2128. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2129. "mr 11,%1\n\t" \
  2130. "mr %0,3\n\t" \
  2131. "ld 2,-16(11)" /* restore tocptr */ \
  2132. : /*out*/ "=r" (_res) \
  2133. : /*in*/ "r" (&_argvec[2]) \
  2134. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2135. ); \
  2136. lval = (__typeof__(lval)) _res; \
  2137. } while (0)
  2138. #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
  2139. do { \
  2140. volatile OrigFn _orig = (orig); \
  2141. volatile unsigned long _argvec[3+5]; \
  2142. volatile unsigned long _res; \
  2143. /* _argvec[0] holds current r2 across the call */ \
  2144. _argvec[1] = (unsigned long)_orig.r2; \
  2145. _argvec[2] = (unsigned long)_orig.nraddr; \
  2146. _argvec[2+1] = (unsigned long)arg1; \
  2147. _argvec[2+2] = (unsigned long)arg2; \
  2148. _argvec[2+3] = (unsigned long)arg3; \
  2149. _argvec[2+4] = (unsigned long)arg4; \
  2150. _argvec[2+5] = (unsigned long)arg5; \
  2151. __asm__ volatile( \
  2152. "mr 11,%1\n\t" \
  2153. "std 2,-16(11)\n\t" /* save tocptr */ \
  2154. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2155. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  2156. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  2157. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  2158. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  2159. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  2160. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2161. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2162. "mr 11,%1\n\t" \
  2163. "mr %0,3\n\t" \
  2164. "ld 2,-16(11)" /* restore tocptr */ \
  2165. : /*out*/ "=r" (_res) \
  2166. : /*in*/ "r" (&_argvec[2]) \
  2167. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2168. ); \
  2169. lval = (__typeof__(lval)) _res; \
  2170. } while (0)
  2171. #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
  2172. do { \
  2173. volatile OrigFn _orig = (orig); \
  2174. volatile unsigned long _argvec[3+6]; \
  2175. volatile unsigned long _res; \
  2176. /* _argvec[0] holds current r2 across the call */ \
  2177. _argvec[1] = (unsigned long)_orig.r2; \
  2178. _argvec[2] = (unsigned long)_orig.nraddr; \
  2179. _argvec[2+1] = (unsigned long)arg1; \
  2180. _argvec[2+2] = (unsigned long)arg2; \
  2181. _argvec[2+3] = (unsigned long)arg3; \
  2182. _argvec[2+4] = (unsigned long)arg4; \
  2183. _argvec[2+5] = (unsigned long)arg5; \
  2184. _argvec[2+6] = (unsigned long)arg6; \
  2185. __asm__ volatile( \
  2186. "mr 11,%1\n\t" \
  2187. "std 2,-16(11)\n\t" /* save tocptr */ \
  2188. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2189. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  2190. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  2191. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  2192. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  2193. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  2194. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  2195. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2196. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2197. "mr 11,%1\n\t" \
  2198. "mr %0,3\n\t" \
  2199. "ld 2,-16(11)" /* restore tocptr */ \
  2200. : /*out*/ "=r" (_res) \
  2201. : /*in*/ "r" (&_argvec[2]) \
  2202. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2203. ); \
  2204. lval = (__typeof__(lval)) _res; \
  2205. } while (0)
  2206. #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  2207. arg7) \
  2208. do { \
  2209. volatile OrigFn _orig = (orig); \
  2210. volatile unsigned long _argvec[3+7]; \
  2211. volatile unsigned long _res; \
  2212. /* _argvec[0] holds current r2 across the call */ \
  2213. _argvec[1] = (unsigned long)_orig.r2; \
  2214. _argvec[2] = (unsigned long)_orig.nraddr; \
  2215. _argvec[2+1] = (unsigned long)arg1; \
  2216. _argvec[2+2] = (unsigned long)arg2; \
  2217. _argvec[2+3] = (unsigned long)arg3; \
  2218. _argvec[2+4] = (unsigned long)arg4; \
  2219. _argvec[2+5] = (unsigned long)arg5; \
  2220. _argvec[2+6] = (unsigned long)arg6; \
  2221. _argvec[2+7] = (unsigned long)arg7; \
  2222. __asm__ volatile( \
  2223. "mr 11,%1\n\t" \
  2224. "std 2,-16(11)\n\t" /* save tocptr */ \
  2225. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2226. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  2227. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  2228. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  2229. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  2230. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  2231. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  2232. "ld 9, 56(11)\n\t" /* arg7->r9 */ \
  2233. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2234. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2235. "mr 11,%1\n\t" \
  2236. "mr %0,3\n\t" \
  2237. "ld 2,-16(11)" /* restore tocptr */ \
  2238. : /*out*/ "=r" (_res) \
  2239. : /*in*/ "r" (&_argvec[2]) \
  2240. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2241. ); \
  2242. lval = (__typeof__(lval)) _res; \
  2243. } while (0)
  2244. #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  2245. arg7,arg8) \
  2246. do { \
  2247. volatile OrigFn _orig = (orig); \
  2248. volatile unsigned long _argvec[3+8]; \
  2249. volatile unsigned long _res; \
  2250. /* _argvec[0] holds current r2 across the call */ \
  2251. _argvec[1] = (unsigned long)_orig.r2; \
  2252. _argvec[2] = (unsigned long)_orig.nraddr; \
  2253. _argvec[2+1] = (unsigned long)arg1; \
  2254. _argvec[2+2] = (unsigned long)arg2; \
  2255. _argvec[2+3] = (unsigned long)arg3; \
  2256. _argvec[2+4] = (unsigned long)arg4; \
  2257. _argvec[2+5] = (unsigned long)arg5; \
  2258. _argvec[2+6] = (unsigned long)arg6; \
  2259. _argvec[2+7] = (unsigned long)arg7; \
  2260. _argvec[2+8] = (unsigned long)arg8; \
  2261. __asm__ volatile( \
  2262. "mr 11,%1\n\t" \
  2263. "std 2,-16(11)\n\t" /* save tocptr */ \
  2264. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2265. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  2266. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  2267. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  2268. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  2269. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  2270. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  2271. "ld 9, 56(11)\n\t" /* arg7->r9 */ \
  2272. "ld 10, 64(11)\n\t" /* arg8->r10 */ \
  2273. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2274. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2275. "mr 11,%1\n\t" \
  2276. "mr %0,3\n\t" \
  2277. "ld 2,-16(11)" /* restore tocptr */ \
  2278. : /*out*/ "=r" (_res) \
  2279. : /*in*/ "r" (&_argvec[2]) \
  2280. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2281. ); \
  2282. lval = (__typeof__(lval)) _res; \
  2283. } while (0)
  2284. #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  2285. arg7,arg8,arg9) \
  2286. do { \
  2287. volatile OrigFn _orig = (orig); \
  2288. volatile unsigned long _argvec[3+9]; \
  2289. volatile unsigned long _res; \
  2290. /* _argvec[0] holds current r2 across the call */ \
  2291. _argvec[1] = (unsigned long)_orig.r2; \
  2292. _argvec[2] = (unsigned long)_orig.nraddr; \
  2293. _argvec[2+1] = (unsigned long)arg1; \
  2294. _argvec[2+2] = (unsigned long)arg2; \
  2295. _argvec[2+3] = (unsigned long)arg3; \
  2296. _argvec[2+4] = (unsigned long)arg4; \
  2297. _argvec[2+5] = (unsigned long)arg5; \
  2298. _argvec[2+6] = (unsigned long)arg6; \
  2299. _argvec[2+7] = (unsigned long)arg7; \
  2300. _argvec[2+8] = (unsigned long)arg8; \
  2301. _argvec[2+9] = (unsigned long)arg9; \
  2302. __asm__ volatile( \
  2303. "mr 11,%1\n\t" \
  2304. "std 2,-16(11)\n\t" /* save tocptr */ \
  2305. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2306. "addi 1,1,-128\n\t" /* expand stack frame */ \
  2307. /* arg9 */ \
  2308. "ld 3,72(11)\n\t" \
  2309. "std 3,112(1)\n\t" \
  2310. /* args1-8 */ \
  2311. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  2312. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  2313. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  2314. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  2315. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  2316. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  2317. "ld 9, 56(11)\n\t" /* arg7->r9 */ \
  2318. "ld 10, 64(11)\n\t" /* arg8->r10 */ \
  2319. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2320. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2321. "mr 11,%1\n\t" \
  2322. "mr %0,3\n\t" \
  2323. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  2324. "addi 1,1,128" /* restore frame */ \
  2325. : /*out*/ "=r" (_res) \
  2326. : /*in*/ "r" (&_argvec[2]) \
  2327. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2328. ); \
  2329. lval = (__typeof__(lval)) _res; \
  2330. } while (0)
  2331. #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  2332. arg7,arg8,arg9,arg10) \
  2333. do { \
  2334. volatile OrigFn _orig = (orig); \
  2335. volatile unsigned long _argvec[3+10]; \
  2336. volatile unsigned long _res; \
  2337. /* _argvec[0] holds current r2 across the call */ \
  2338. _argvec[1] = (unsigned long)_orig.r2; \
  2339. _argvec[2] = (unsigned long)_orig.nraddr; \
  2340. _argvec[2+1] = (unsigned long)arg1; \
  2341. _argvec[2+2] = (unsigned long)arg2; \
  2342. _argvec[2+3] = (unsigned long)arg3; \
  2343. _argvec[2+4] = (unsigned long)arg4; \
  2344. _argvec[2+5] = (unsigned long)arg5; \
  2345. _argvec[2+6] = (unsigned long)arg6; \
  2346. _argvec[2+7] = (unsigned long)arg7; \
  2347. _argvec[2+8] = (unsigned long)arg8; \
  2348. _argvec[2+9] = (unsigned long)arg9; \
  2349. _argvec[2+10] = (unsigned long)arg10; \
  2350. __asm__ volatile( \
  2351. "mr 11,%1\n\t" \
  2352. "std 2,-16(11)\n\t" /* save tocptr */ \
  2353. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2354. "addi 1,1,-128\n\t" /* expand stack frame */ \
  2355. /* arg10 */ \
  2356. "ld 3,80(11)\n\t" \
  2357. "std 3,120(1)\n\t" \
  2358. /* arg9 */ \
  2359. "ld 3,72(11)\n\t" \
  2360. "std 3,112(1)\n\t" \
  2361. /* args1-8 */ \
  2362. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  2363. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  2364. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  2365. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  2366. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  2367. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  2368. "ld 9, 56(11)\n\t" /* arg7->r9 */ \
  2369. "ld 10, 64(11)\n\t" /* arg8->r10 */ \
  2370. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2371. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2372. "mr 11,%1\n\t" \
  2373. "mr %0,3\n\t" \
  2374. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  2375. "addi 1,1,128" /* restore frame */ \
  2376. : /*out*/ "=r" (_res) \
  2377. : /*in*/ "r" (&_argvec[2]) \
  2378. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2379. ); \
  2380. lval = (__typeof__(lval)) _res; \
  2381. } while (0)
  2382. #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  2383. arg7,arg8,arg9,arg10,arg11) \
  2384. do { \
  2385. volatile OrigFn _orig = (orig); \
  2386. volatile unsigned long _argvec[3+11]; \
  2387. volatile unsigned long _res; \
  2388. /* _argvec[0] holds current r2 across the call */ \
  2389. _argvec[1] = (unsigned long)_orig.r2; \
  2390. _argvec[2] = (unsigned long)_orig.nraddr; \
  2391. _argvec[2+1] = (unsigned long)arg1; \
  2392. _argvec[2+2] = (unsigned long)arg2; \
  2393. _argvec[2+3] = (unsigned long)arg3; \
  2394. _argvec[2+4] = (unsigned long)arg4; \
  2395. _argvec[2+5] = (unsigned long)arg5; \
  2396. _argvec[2+6] = (unsigned long)arg6; \
  2397. _argvec[2+7] = (unsigned long)arg7; \
  2398. _argvec[2+8] = (unsigned long)arg8; \
  2399. _argvec[2+9] = (unsigned long)arg9; \
  2400. _argvec[2+10] = (unsigned long)arg10; \
  2401. _argvec[2+11] = (unsigned long)arg11; \
  2402. __asm__ volatile( \
  2403. "mr 11,%1\n\t" \
  2404. "std 2,-16(11)\n\t" /* save tocptr */ \
  2405. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2406. "addi 1,1,-144\n\t" /* expand stack frame */ \
  2407. /* arg11 */ \
  2408. "ld 3,88(11)\n\t" \
  2409. "std 3,128(1)\n\t" \
  2410. /* arg10 */ \
  2411. "ld 3,80(11)\n\t" \
  2412. "std 3,120(1)\n\t" \
  2413. /* arg9 */ \
  2414. "ld 3,72(11)\n\t" \
  2415. "std 3,112(1)\n\t" \
  2416. /* args1-8 */ \
  2417. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  2418. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  2419. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  2420. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  2421. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  2422. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  2423. "ld 9, 56(11)\n\t" /* arg7->r9 */ \
  2424. "ld 10, 64(11)\n\t" /* arg8->r10 */ \
  2425. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2426. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2427. "mr 11,%1\n\t" \
  2428. "mr %0,3\n\t" \
  2429. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  2430. "addi 1,1,144" /* restore frame */ \
  2431. : /*out*/ "=r" (_res) \
  2432. : /*in*/ "r" (&_argvec[2]) \
  2433. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2434. ); \
  2435. lval = (__typeof__(lval)) _res; \
  2436. } while (0)
  2437. #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  2438. arg7,arg8,arg9,arg10,arg11,arg12) \
  2439. do { \
  2440. volatile OrigFn _orig = (orig); \
  2441. volatile unsigned long _argvec[3+12]; \
  2442. volatile unsigned long _res; \
  2443. /* _argvec[0] holds current r2 across the call */ \
  2444. _argvec[1] = (unsigned long)_orig.r2; \
  2445. _argvec[2] = (unsigned long)_orig.nraddr; \
  2446. _argvec[2+1] = (unsigned long)arg1; \
  2447. _argvec[2+2] = (unsigned long)arg2; \
  2448. _argvec[2+3] = (unsigned long)arg3; \
  2449. _argvec[2+4] = (unsigned long)arg4; \
  2450. _argvec[2+5] = (unsigned long)arg5; \
  2451. _argvec[2+6] = (unsigned long)arg6; \
  2452. _argvec[2+7] = (unsigned long)arg7; \
  2453. _argvec[2+8] = (unsigned long)arg8; \
  2454. _argvec[2+9] = (unsigned long)arg9; \
  2455. _argvec[2+10] = (unsigned long)arg10; \
  2456. _argvec[2+11] = (unsigned long)arg11; \
  2457. _argvec[2+12] = (unsigned long)arg12; \
  2458. __asm__ volatile( \
  2459. "mr 11,%1\n\t" \
  2460. "std 2,-16(11)\n\t" /* save tocptr */ \
  2461. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  2462. "addi 1,1,-144\n\t" /* expand stack frame */ \
  2463. /* arg12 */ \
  2464. "ld 3,96(11)\n\t" \
  2465. "std 3,136(1)\n\t" \
  2466. /* arg11 */ \
  2467. "ld 3,88(11)\n\t" \
  2468. "std 3,128(1)\n\t" \
  2469. /* arg10 */ \
  2470. "ld 3,80(11)\n\t" \
  2471. "std 3,120(1)\n\t" \
  2472. /* arg9 */ \
  2473. "ld 3,72(11)\n\t" \
  2474. "std 3,112(1)\n\t" \
  2475. /* args1-8 */ \
  2476. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  2477. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  2478. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  2479. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  2480. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  2481. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  2482. "ld 9, 56(11)\n\t" /* arg7->r9 */ \
  2483. "ld 10, 64(11)\n\t" /* arg8->r10 */ \
  2484. "ld 11, 0(11)\n\t" /* target->r11 */ \
  2485. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2486. "mr 11,%1\n\t" \
  2487. "mr %0,3\n\t" \
  2488. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  2489. "addi 1,1,144" /* restore frame */ \
  2490. : /*out*/ "=r" (_res) \
  2491. : /*in*/ "r" (&_argvec[2]) \
  2492. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2493. ); \
  2494. lval = (__typeof__(lval)) _res; \
  2495. } while (0)
  2496. #endif /* PLAT_ppc64_linux */
  2497. /* ------------------------- arm-linux ------------------------- */
  2498. #if defined(PLAT_arm_linux)
  2499. /* These regs are trashed by the hidden call. */
  2500. #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
  2501. /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
  2502. long) == 4. */
  2503. #define CALL_FN_W_v(lval, orig) \
  2504. do { \
  2505. volatile OrigFn _orig = (orig); \
  2506. volatile unsigned long _argvec[1]; \
  2507. volatile unsigned long _res; \
  2508. _argvec[0] = (unsigned long)_orig.nraddr; \
  2509. __asm__ volatile( \
  2510. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2511. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2512. "mov %0, r0\n" \
  2513. : /*out*/ "=r" (_res) \
  2514. : /*in*/ "0" (&_argvec[0]) \
  2515. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2516. ); \
  2517. lval = (__typeof__(lval)) _res; \
  2518. } while (0)
  2519. #define CALL_FN_W_W(lval, orig, arg1) \
  2520. do { \
  2521. volatile OrigFn _orig = (orig); \
  2522. volatile unsigned long _argvec[2]; \
  2523. volatile unsigned long _res; \
  2524. _argvec[0] = (unsigned long)_orig.nraddr; \
  2525. _argvec[1] = (unsigned long)(arg1); \
  2526. __asm__ volatile( \
  2527. "ldr r0, [%1, #4] \n\t" \
  2528. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2529. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2530. "mov %0, r0\n" \
  2531. : /*out*/ "=r" (_res) \
  2532. : /*in*/ "0" (&_argvec[0]) \
  2533. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2534. ); \
  2535. lval = (__typeof__(lval)) _res; \
  2536. } while (0)
  2537. #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
  2538. do { \
  2539. volatile OrigFn _orig = (orig); \
  2540. volatile unsigned long _argvec[3]; \
  2541. volatile unsigned long _res; \
  2542. _argvec[0] = (unsigned long)_orig.nraddr; \
  2543. _argvec[1] = (unsigned long)(arg1); \
  2544. _argvec[2] = (unsigned long)(arg2); \
  2545. __asm__ volatile( \
  2546. "ldr r0, [%1, #4] \n\t" \
  2547. "ldr r1, [%1, #8] \n\t" \
  2548. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2549. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2550. "mov %0, r0\n" \
  2551. : /*out*/ "=r" (_res) \
  2552. : /*in*/ "0" (&_argvec[0]) \
  2553. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2554. ); \
  2555. lval = (__typeof__(lval)) _res; \
  2556. } while (0)
  2557. #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
  2558. do { \
  2559. volatile OrigFn _orig = (orig); \
  2560. volatile unsigned long _argvec[4]; \
  2561. volatile unsigned long _res; \
  2562. _argvec[0] = (unsigned long)_orig.nraddr; \
  2563. _argvec[1] = (unsigned long)(arg1); \
  2564. _argvec[2] = (unsigned long)(arg2); \
  2565. _argvec[3] = (unsigned long)(arg3); \
  2566. __asm__ volatile( \
  2567. "ldr r0, [%1, #4] \n\t" \
  2568. "ldr r1, [%1, #8] \n\t" \
  2569. "ldr r2, [%1, #12] \n\t" \
  2570. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2571. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2572. "mov %0, r0\n" \
  2573. : /*out*/ "=r" (_res) \
  2574. : /*in*/ "0" (&_argvec[0]) \
  2575. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2576. ); \
  2577. lval = (__typeof__(lval)) _res; \
  2578. } while (0)
  2579. #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
  2580. do { \
  2581. volatile OrigFn _orig = (orig); \
  2582. volatile unsigned long _argvec[5]; \
  2583. volatile unsigned long _res; \
  2584. _argvec[0] = (unsigned long)_orig.nraddr; \
  2585. _argvec[1] = (unsigned long)(arg1); \
  2586. _argvec[2] = (unsigned long)(arg2); \
  2587. _argvec[3] = (unsigned long)(arg3); \
  2588. _argvec[4] = (unsigned long)(arg4); \
  2589. __asm__ volatile( \
  2590. "ldr r0, [%1, #4] \n\t" \
  2591. "ldr r1, [%1, #8] \n\t" \
  2592. "ldr r2, [%1, #12] \n\t" \
  2593. "ldr r3, [%1, #16] \n\t" \
  2594. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2595. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2596. "mov %0, r0" \
  2597. : /*out*/ "=r" (_res) \
  2598. : /*in*/ "0" (&_argvec[0]) \
  2599. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2600. ); \
  2601. lval = (__typeof__(lval)) _res; \
  2602. } while (0)
  2603. #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
  2604. do { \
  2605. volatile OrigFn _orig = (orig); \
  2606. volatile unsigned long _argvec[6]; \
  2607. volatile unsigned long _res; \
  2608. _argvec[0] = (unsigned long)_orig.nraddr; \
  2609. _argvec[1] = (unsigned long)(arg1); \
  2610. _argvec[2] = (unsigned long)(arg2); \
  2611. _argvec[3] = (unsigned long)(arg3); \
  2612. _argvec[4] = (unsigned long)(arg4); \
  2613. _argvec[5] = (unsigned long)(arg5); \
  2614. __asm__ volatile( \
  2615. "ldr r0, [%1, #20] \n\t" \
  2616. "push {r0} \n\t" \
  2617. "ldr r0, [%1, #4] \n\t" \
  2618. "ldr r1, [%1, #8] \n\t" \
  2619. "ldr r2, [%1, #12] \n\t" \
  2620. "ldr r3, [%1, #16] \n\t" \
  2621. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2622. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2623. "add sp, sp, #4 \n\t" \
  2624. "mov %0, r0" \
  2625. : /*out*/ "=r" (_res) \
  2626. : /*in*/ "0" (&_argvec[0]) \
  2627. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2628. ); \
  2629. lval = (__typeof__(lval)) _res; \
  2630. } while (0)
  2631. #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
  2632. do { \
  2633. volatile OrigFn _orig = (orig); \
  2634. volatile unsigned long _argvec[7]; \
  2635. volatile unsigned long _res; \
  2636. _argvec[0] = (unsigned long)_orig.nraddr; \
  2637. _argvec[1] = (unsigned long)(arg1); \
  2638. _argvec[2] = (unsigned long)(arg2); \
  2639. _argvec[3] = (unsigned long)(arg3); \
  2640. _argvec[4] = (unsigned long)(arg4); \
  2641. _argvec[5] = (unsigned long)(arg5); \
  2642. _argvec[6] = (unsigned long)(arg6); \
  2643. __asm__ volatile( \
  2644. "ldr r0, [%1, #20] \n\t" \
  2645. "ldr r1, [%1, #24] \n\t" \
  2646. "push {r0, r1} \n\t" \
  2647. "ldr r0, [%1, #4] \n\t" \
  2648. "ldr r1, [%1, #8] \n\t" \
  2649. "ldr r2, [%1, #12] \n\t" \
  2650. "ldr r3, [%1, #16] \n\t" \
  2651. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2652. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2653. "add sp, sp, #8 \n\t" \
  2654. "mov %0, r0" \
  2655. : /*out*/ "=r" (_res) \
  2656. : /*in*/ "0" (&_argvec[0]) \
  2657. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2658. ); \
  2659. lval = (__typeof__(lval)) _res; \
  2660. } while (0)
  2661. #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  2662. arg7) \
  2663. do { \
  2664. volatile OrigFn _orig = (orig); \
  2665. volatile unsigned long _argvec[8]; \
  2666. volatile unsigned long _res; \
  2667. _argvec[0] = (unsigned long)_orig.nraddr; \
  2668. _argvec[1] = (unsigned long)(arg1); \
  2669. _argvec[2] = (unsigned long)(arg2); \
  2670. _argvec[3] = (unsigned long)(arg3); \
  2671. _argvec[4] = (unsigned long)(arg4); \
  2672. _argvec[5] = (unsigned long)(arg5); \
  2673. _argvec[6] = (unsigned long)(arg6); \
  2674. _argvec[7] = (unsigned long)(arg7); \
  2675. __asm__ volatile( \
  2676. "ldr r0, [%1, #20] \n\t" \
  2677. "ldr r1, [%1, #24] \n\t" \
  2678. "ldr r2, [%1, #28] \n\t" \
  2679. "push {r0, r1, r2} \n\t" \
  2680. "ldr r0, [%1, #4] \n\t" \
  2681. "ldr r1, [%1, #8] \n\t" \
  2682. "ldr r2, [%1, #12] \n\t" \
  2683. "ldr r3, [%1, #16] \n\t" \
  2684. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2685. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2686. "add sp, sp, #12 \n\t" \
  2687. "mov %0, r0" \
  2688. : /*out*/ "=r" (_res) \
  2689. : /*in*/ "0" (&_argvec[0]) \
  2690. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2691. ); \
  2692. lval = (__typeof__(lval)) _res; \
  2693. } while (0)
  2694. #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  2695. arg7,arg8) \
  2696. do { \
  2697. volatile OrigFn _orig = (orig); \
  2698. volatile unsigned long _argvec[9]; \
  2699. volatile unsigned long _res; \
  2700. _argvec[0] = (unsigned long)_orig.nraddr; \
  2701. _argvec[1] = (unsigned long)(arg1); \
  2702. _argvec[2] = (unsigned long)(arg2); \
  2703. _argvec[3] = (unsigned long)(arg3); \
  2704. _argvec[4] = (unsigned long)(arg4); \
  2705. _argvec[5] = (unsigned long)(arg5); \
  2706. _argvec[6] = (unsigned long)(arg6); \
  2707. _argvec[7] = (unsigned long)(arg7); \
  2708. _argvec[8] = (unsigned long)(arg8); \
  2709. __asm__ volatile( \
  2710. "ldr r0, [%1, #20] \n\t" \
  2711. "ldr r1, [%1, #24] \n\t" \
  2712. "ldr r2, [%1, #28] \n\t" \
  2713. "ldr r3, [%1, #32] \n\t" \
  2714. "push {r0, r1, r2, r3} \n\t" \
  2715. "ldr r0, [%1, #4] \n\t" \
  2716. "ldr r1, [%1, #8] \n\t" \
  2717. "ldr r2, [%1, #12] \n\t" \
  2718. "ldr r3, [%1, #16] \n\t" \
  2719. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2720. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2721. "add sp, sp, #16 \n\t" \
  2722. "mov %0, r0" \
  2723. : /*out*/ "=r" (_res) \
  2724. : /*in*/ "0" (&_argvec[0]) \
  2725. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2726. ); \
  2727. lval = (__typeof__(lval)) _res; \
  2728. } while (0)
  2729. #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  2730. arg7,arg8,arg9) \
  2731. do { \
  2732. volatile OrigFn _orig = (orig); \
  2733. volatile unsigned long _argvec[10]; \
  2734. volatile unsigned long _res; \
  2735. _argvec[0] = (unsigned long)_orig.nraddr; \
  2736. _argvec[1] = (unsigned long)(arg1); \
  2737. _argvec[2] = (unsigned long)(arg2); \
  2738. _argvec[3] = (unsigned long)(arg3); \
  2739. _argvec[4] = (unsigned long)(arg4); \
  2740. _argvec[5] = (unsigned long)(arg5); \
  2741. _argvec[6] = (unsigned long)(arg6); \
  2742. _argvec[7] = (unsigned long)(arg7); \
  2743. _argvec[8] = (unsigned long)(arg8); \
  2744. _argvec[9] = (unsigned long)(arg9); \
  2745. __asm__ volatile( \
  2746. "ldr r0, [%1, #20] \n\t" \
  2747. "ldr r1, [%1, #24] \n\t" \
  2748. "ldr r2, [%1, #28] \n\t" \
  2749. "ldr r3, [%1, #32] \n\t" \
  2750. "ldr r4, [%1, #36] \n\t" \
  2751. "push {r0, r1, r2, r3, r4} \n\t" \
  2752. "ldr r0, [%1, #4] \n\t" \
  2753. "ldr r1, [%1, #8] \n\t" \
  2754. "ldr r2, [%1, #12] \n\t" \
  2755. "ldr r3, [%1, #16] \n\t" \
  2756. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2757. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2758. "add sp, sp, #20 \n\t" \
  2759. "mov %0, r0" \
  2760. : /*out*/ "=r" (_res) \
  2761. : /*in*/ "0" (&_argvec[0]) \
  2762. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2763. ); \
  2764. lval = (__typeof__(lval)) _res; \
  2765. } while (0)
  2766. #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  2767. arg7,arg8,arg9,arg10) \
  2768. do { \
  2769. volatile OrigFn _orig = (orig); \
  2770. volatile unsigned long _argvec[11]; \
  2771. volatile unsigned long _res; \
  2772. _argvec[0] = (unsigned long)_orig.nraddr; \
  2773. _argvec[1] = (unsigned long)(arg1); \
  2774. _argvec[2] = (unsigned long)(arg2); \
  2775. _argvec[3] = (unsigned long)(arg3); \
  2776. _argvec[4] = (unsigned long)(arg4); \
  2777. _argvec[5] = (unsigned long)(arg5); \
  2778. _argvec[6] = (unsigned long)(arg6); \
  2779. _argvec[7] = (unsigned long)(arg7); \
  2780. _argvec[8] = (unsigned long)(arg8); \
  2781. _argvec[9] = (unsigned long)(arg9); \
  2782. _argvec[10] = (unsigned long)(arg10); \
  2783. __asm__ volatile( \
  2784. "ldr r0, [%1, #40] \n\t" \
  2785. "push {r0} \n\t" \
  2786. "ldr r0, [%1, #20] \n\t" \
  2787. "ldr r1, [%1, #24] \n\t" \
  2788. "ldr r2, [%1, #28] \n\t" \
  2789. "ldr r3, [%1, #32] \n\t" \
  2790. "ldr r4, [%1, #36] \n\t" \
  2791. "push {r0, r1, r2, r3, r4} \n\t" \
  2792. "ldr r0, [%1, #4] \n\t" \
  2793. "ldr r1, [%1, #8] \n\t" \
  2794. "ldr r2, [%1, #12] \n\t" \
  2795. "ldr r3, [%1, #16] \n\t" \
  2796. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2797. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2798. "add sp, sp, #24 \n\t" \
  2799. "mov %0, r0" \
  2800. : /*out*/ "=r" (_res) \
  2801. : /*in*/ "0" (&_argvec[0]) \
  2802. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2803. ); \
  2804. lval = (__typeof__(lval)) _res; \
  2805. } while (0)
  2806. #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
  2807. arg6,arg7,arg8,arg9,arg10, \
  2808. arg11) \
  2809. do { \
  2810. volatile OrigFn _orig = (orig); \
  2811. volatile unsigned long _argvec[12]; \
  2812. volatile unsigned long _res; \
  2813. _argvec[0] = (unsigned long)_orig.nraddr; \
  2814. _argvec[1] = (unsigned long)(arg1); \
  2815. _argvec[2] = (unsigned long)(arg2); \
  2816. _argvec[3] = (unsigned long)(arg3); \
  2817. _argvec[4] = (unsigned long)(arg4); \
  2818. _argvec[5] = (unsigned long)(arg5); \
  2819. _argvec[6] = (unsigned long)(arg6); \
  2820. _argvec[7] = (unsigned long)(arg7); \
  2821. _argvec[8] = (unsigned long)(arg8); \
  2822. _argvec[9] = (unsigned long)(arg9); \
  2823. _argvec[10] = (unsigned long)(arg10); \
  2824. _argvec[11] = (unsigned long)(arg11); \
  2825. __asm__ volatile( \
  2826. "ldr r0, [%1, #40] \n\t" \
  2827. "ldr r1, [%1, #44] \n\t" \
  2828. "push {r0, r1} \n\t" \
  2829. "ldr r0, [%1, #20] \n\t" \
  2830. "ldr r1, [%1, #24] \n\t" \
  2831. "ldr r2, [%1, #28] \n\t" \
  2832. "ldr r3, [%1, #32] \n\t" \
  2833. "ldr r4, [%1, #36] \n\t" \
  2834. "push {r0, r1, r2, r3, r4} \n\t" \
  2835. "ldr r0, [%1, #4] \n\t" \
  2836. "ldr r1, [%1, #8] \n\t" \
  2837. "ldr r2, [%1, #12] \n\t" \
  2838. "ldr r3, [%1, #16] \n\t" \
  2839. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2840. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2841. "add sp, sp, #28 \n\t" \
  2842. "mov %0, r0" \
  2843. : /*out*/ "=r" (_res) \
  2844. : /*in*/ "0" (&_argvec[0]) \
  2845. : /*trash*/ "cc", "memory",__CALLER_SAVED_REGS \
  2846. ); \
  2847. lval = (__typeof__(lval)) _res; \
  2848. } while (0)
  2849. #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
  2850. arg6,arg7,arg8,arg9,arg10, \
  2851. arg11,arg12) \
  2852. do { \
  2853. volatile OrigFn _orig = (orig); \
  2854. volatile unsigned long _argvec[13]; \
  2855. volatile unsigned long _res; \
  2856. _argvec[0] = (unsigned long)_orig.nraddr; \
  2857. _argvec[1] = (unsigned long)(arg1); \
  2858. _argvec[2] = (unsigned long)(arg2); \
  2859. _argvec[3] = (unsigned long)(arg3); \
  2860. _argvec[4] = (unsigned long)(arg4); \
  2861. _argvec[5] = (unsigned long)(arg5); \
  2862. _argvec[6] = (unsigned long)(arg6); \
  2863. _argvec[7] = (unsigned long)(arg7); \
  2864. _argvec[8] = (unsigned long)(arg8); \
  2865. _argvec[9] = (unsigned long)(arg9); \
  2866. _argvec[10] = (unsigned long)(arg10); \
  2867. _argvec[11] = (unsigned long)(arg11); \
  2868. _argvec[12] = (unsigned long)(arg12); \
  2869. __asm__ volatile( \
  2870. "ldr r0, [%1, #40] \n\t" \
  2871. "ldr r1, [%1, #44] \n\t" \
  2872. "ldr r2, [%1, #48] \n\t" \
  2873. "push {r0, r1, r2} \n\t" \
  2874. "ldr r0, [%1, #20] \n\t" \
  2875. "ldr r1, [%1, #24] \n\t" \
  2876. "ldr r2, [%1, #28] \n\t" \
  2877. "ldr r3, [%1, #32] \n\t" \
  2878. "ldr r4, [%1, #36] \n\t" \
  2879. "push {r0, r1, r2, r3, r4} \n\t" \
  2880. "ldr r0, [%1, #4] \n\t" \
  2881. "ldr r1, [%1, #8] \n\t" \
  2882. "ldr r2, [%1, #12] \n\t" \
  2883. "ldr r3, [%1, #16] \n\t" \
  2884. "ldr r4, [%1] \n\t" /* target->r4 */ \
  2885. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
  2886. "add sp, sp, #32 \n\t" \
  2887. "mov %0, r0" \
  2888. : /*out*/ "=r" (_res) \
  2889. : /*in*/ "0" (&_argvec[0]) \
  2890. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2891. ); \
  2892. lval = (__typeof__(lval)) _res; \
  2893. } while (0)
  2894. #endif /* PLAT_arm_linux */
  2895. /* ------------------------ ppc32-aix5 ------------------------- */
  2896. #if defined(PLAT_ppc32_aix5)
  2897. /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
  2898. /* These regs are trashed by the hidden call. */
  2899. #define __CALLER_SAVED_REGS \
  2900. "lr", "ctr", "xer", \
  2901. "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
  2902. "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
  2903. "r11", "r12", "r13"
  2904. /* Expand the stack frame, copying enough info that unwinding
  2905. still works. Trashes r3. */
  2906. #define VG_EXPAND_FRAME_BY_trashes_r3(_n_fr) \
  2907. "addi 1,1,-" #_n_fr "\n\t" \
  2908. "lwz 3," #_n_fr "(1)\n\t" \
  2909. "stw 3,0(1)\n\t"
  2910. #define VG_CONTRACT_FRAME_BY(_n_fr) \
  2911. "addi 1,1," #_n_fr "\n\t"
  2912. /* These CALL_FN_ macros assume that on ppc32-aix5, sizeof(unsigned
  2913. long) == 4. */
  2914. #define CALL_FN_W_v(lval, orig) \
  2915. do { \
  2916. volatile OrigFn _orig = (orig); \
  2917. volatile unsigned long _argvec[3+0]; \
  2918. volatile unsigned long _res; \
  2919. /* _argvec[0] holds current r2 across the call */ \
  2920. _argvec[1] = (unsigned long)_orig.r2; \
  2921. _argvec[2] = (unsigned long)_orig.nraddr; \
  2922. __asm__ volatile( \
  2923. "mr 11,%1\n\t" \
  2924. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  2925. "stw 2,-8(11)\n\t" /* save tocptr */ \
  2926. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  2927. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  2928. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2929. "mr 11,%1\n\t" \
  2930. "mr %0,3\n\t" \
  2931. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  2932. VG_CONTRACT_FRAME_BY(512) \
  2933. : /*out*/ "=r" (_res) \
  2934. : /*in*/ "r" (&_argvec[2]) \
  2935. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2936. ); \
  2937. lval = (__typeof__(lval)) _res; \
  2938. } while (0)
  2939. #define CALL_FN_W_W(lval, orig, arg1) \
  2940. do { \
  2941. volatile OrigFn _orig = (orig); \
  2942. volatile unsigned long _argvec[3+1]; \
  2943. volatile unsigned long _res; \
  2944. /* _argvec[0] holds current r2 across the call */ \
  2945. _argvec[1] = (unsigned long)_orig.r2; \
  2946. _argvec[2] = (unsigned long)_orig.nraddr; \
  2947. _argvec[2+1] = (unsigned long)arg1; \
  2948. __asm__ volatile( \
  2949. "mr 11,%1\n\t" \
  2950. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  2951. "stw 2,-8(11)\n\t" /* save tocptr */ \
  2952. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  2953. "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
  2954. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  2955. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2956. "mr 11,%1\n\t" \
  2957. "mr %0,3\n\t" \
  2958. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  2959. VG_CONTRACT_FRAME_BY(512) \
  2960. : /*out*/ "=r" (_res) \
  2961. : /*in*/ "r" (&_argvec[2]) \
  2962. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2963. ); \
  2964. lval = (__typeof__(lval)) _res; \
  2965. } while (0)
  2966. #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
  2967. do { \
  2968. volatile OrigFn _orig = (orig); \
  2969. volatile unsigned long _argvec[3+2]; \
  2970. volatile unsigned long _res; \
  2971. /* _argvec[0] holds current r2 across the call */ \
  2972. _argvec[1] = (unsigned long)_orig.r2; \
  2973. _argvec[2] = (unsigned long)_orig.nraddr; \
  2974. _argvec[2+1] = (unsigned long)arg1; \
  2975. _argvec[2+2] = (unsigned long)arg2; \
  2976. __asm__ volatile( \
  2977. "mr 11,%1\n\t" \
  2978. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  2979. "stw 2,-8(11)\n\t" /* save tocptr */ \
  2980. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  2981. "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
  2982. "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
  2983. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  2984. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  2985. "mr 11,%1\n\t" \
  2986. "mr %0,3\n\t" \
  2987. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  2988. VG_CONTRACT_FRAME_BY(512) \
  2989. : /*out*/ "=r" (_res) \
  2990. : /*in*/ "r" (&_argvec[2]) \
  2991. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  2992. ); \
  2993. lval = (__typeof__(lval)) _res; \
  2994. } while (0)
  2995. #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
  2996. do { \
  2997. volatile OrigFn _orig = (orig); \
  2998. volatile unsigned long _argvec[3+3]; \
  2999. volatile unsigned long _res; \
  3000. /* _argvec[0] holds current r2 across the call */ \
  3001. _argvec[1] = (unsigned long)_orig.r2; \
  3002. _argvec[2] = (unsigned long)_orig.nraddr; \
  3003. _argvec[2+1] = (unsigned long)arg1; \
  3004. _argvec[2+2] = (unsigned long)arg2; \
  3005. _argvec[2+3] = (unsigned long)arg3; \
  3006. __asm__ volatile( \
  3007. "mr 11,%1\n\t" \
  3008. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3009. "stw 2,-8(11)\n\t" /* save tocptr */ \
  3010. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  3011. "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
  3012. "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
  3013. "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
  3014. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  3015. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3016. "mr 11,%1\n\t" \
  3017. "mr %0,3\n\t" \
  3018. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  3019. VG_CONTRACT_FRAME_BY(512) \
  3020. : /*out*/ "=r" (_res) \
  3021. : /*in*/ "r" (&_argvec[2]) \
  3022. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3023. ); \
  3024. lval = (__typeof__(lval)) _res; \
  3025. } while (0)
  3026. #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
  3027. do { \
  3028. volatile OrigFn _orig = (orig); \
  3029. volatile unsigned long _argvec[3+4]; \
  3030. volatile unsigned long _res; \
  3031. /* _argvec[0] holds current r2 across the call */ \
  3032. _argvec[1] = (unsigned long)_orig.r2; \
  3033. _argvec[2] = (unsigned long)_orig.nraddr; \
  3034. _argvec[2+1] = (unsigned long)arg1; \
  3035. _argvec[2+2] = (unsigned long)arg2; \
  3036. _argvec[2+3] = (unsigned long)arg3; \
  3037. _argvec[2+4] = (unsigned long)arg4; \
  3038. __asm__ volatile( \
  3039. "mr 11,%1\n\t" \
  3040. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3041. "stw 2,-8(11)\n\t" /* save tocptr */ \
  3042. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  3043. "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
  3044. "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
  3045. "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
  3046. "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
  3047. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  3048. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3049. "mr 11,%1\n\t" \
  3050. "mr %0,3\n\t" \
  3051. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  3052. VG_CONTRACT_FRAME_BY(512) \
  3053. : /*out*/ "=r" (_res) \
  3054. : /*in*/ "r" (&_argvec[2]) \
  3055. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3056. ); \
  3057. lval = (__typeof__(lval)) _res; \
  3058. } while (0)
  3059. #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
  3060. do { \
  3061. volatile OrigFn _orig = (orig); \
  3062. volatile unsigned long _argvec[3+5]; \
  3063. volatile unsigned long _res; \
  3064. /* _argvec[0] holds current r2 across the call */ \
  3065. _argvec[1] = (unsigned long)_orig.r2; \
  3066. _argvec[2] = (unsigned long)_orig.nraddr; \
  3067. _argvec[2+1] = (unsigned long)arg1; \
  3068. _argvec[2+2] = (unsigned long)arg2; \
  3069. _argvec[2+3] = (unsigned long)arg3; \
  3070. _argvec[2+4] = (unsigned long)arg4; \
  3071. _argvec[2+5] = (unsigned long)arg5; \
  3072. __asm__ volatile( \
  3073. "mr 11,%1\n\t" \
  3074. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3075. "stw 2,-8(11)\n\t" /* save tocptr */ \
  3076. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  3077. "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
  3078. "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
  3079. "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
  3080. "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
  3081. "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
  3082. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  3083. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3084. "mr 11,%1\n\t" \
  3085. "mr %0,3\n\t" \
  3086. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  3087. VG_CONTRACT_FRAME_BY(512) \
  3088. : /*out*/ "=r" (_res) \
  3089. : /*in*/ "r" (&_argvec[2]) \
  3090. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3091. ); \
  3092. lval = (__typeof__(lval)) _res; \
  3093. } while (0)
  3094. #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
  3095. do { \
  3096. volatile OrigFn _orig = (orig); \
  3097. volatile unsigned long _argvec[3+6]; \
  3098. volatile unsigned long _res; \
  3099. /* _argvec[0] holds current r2 across the call */ \
  3100. _argvec[1] = (unsigned long)_orig.r2; \
  3101. _argvec[2] = (unsigned long)_orig.nraddr; \
  3102. _argvec[2+1] = (unsigned long)arg1; \
  3103. _argvec[2+2] = (unsigned long)arg2; \
  3104. _argvec[2+3] = (unsigned long)arg3; \
  3105. _argvec[2+4] = (unsigned long)arg4; \
  3106. _argvec[2+5] = (unsigned long)arg5; \
  3107. _argvec[2+6] = (unsigned long)arg6; \
  3108. __asm__ volatile( \
  3109. "mr 11,%1\n\t" \
  3110. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3111. "stw 2,-8(11)\n\t" /* save tocptr */ \
  3112. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  3113. "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
  3114. "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
  3115. "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
  3116. "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
  3117. "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
  3118. "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
  3119. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  3120. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3121. "mr 11,%1\n\t" \
  3122. "mr %0,3\n\t" \
  3123. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  3124. VG_CONTRACT_FRAME_BY(512) \
  3125. : /*out*/ "=r" (_res) \
  3126. : /*in*/ "r" (&_argvec[2]) \
  3127. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3128. ); \
  3129. lval = (__typeof__(lval)) _res; \
  3130. } while (0)
  3131. #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  3132. arg7) \
  3133. do { \
  3134. volatile OrigFn _orig = (orig); \
  3135. volatile unsigned long _argvec[3+7]; \
  3136. volatile unsigned long _res; \
  3137. /* _argvec[0] holds current r2 across the call */ \
  3138. _argvec[1] = (unsigned long)_orig.r2; \
  3139. _argvec[2] = (unsigned long)_orig.nraddr; \
  3140. _argvec[2+1] = (unsigned long)arg1; \
  3141. _argvec[2+2] = (unsigned long)arg2; \
  3142. _argvec[2+3] = (unsigned long)arg3; \
  3143. _argvec[2+4] = (unsigned long)arg4; \
  3144. _argvec[2+5] = (unsigned long)arg5; \
  3145. _argvec[2+6] = (unsigned long)arg6; \
  3146. _argvec[2+7] = (unsigned long)arg7; \
  3147. __asm__ volatile( \
  3148. "mr 11,%1\n\t" \
  3149. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3150. "stw 2,-8(11)\n\t" /* save tocptr */ \
  3151. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  3152. "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
  3153. "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
  3154. "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
  3155. "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
  3156. "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
  3157. "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
  3158. "lwz 9, 28(11)\n\t" /* arg7->r9 */ \
  3159. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  3160. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3161. "mr 11,%1\n\t" \
  3162. "mr %0,3\n\t" \
  3163. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  3164. VG_CONTRACT_FRAME_BY(512) \
  3165. : /*out*/ "=r" (_res) \
  3166. : /*in*/ "r" (&_argvec[2]) \
  3167. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3168. ); \
  3169. lval = (__typeof__(lval)) _res; \
  3170. } while (0)
  3171. #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  3172. arg7,arg8) \
  3173. do { \
  3174. volatile OrigFn _orig = (orig); \
  3175. volatile unsigned long _argvec[3+8]; \
  3176. volatile unsigned long _res; \
  3177. /* _argvec[0] holds current r2 across the call */ \
  3178. _argvec[1] = (unsigned long)_orig.r2; \
  3179. _argvec[2] = (unsigned long)_orig.nraddr; \
  3180. _argvec[2+1] = (unsigned long)arg1; \
  3181. _argvec[2+2] = (unsigned long)arg2; \
  3182. _argvec[2+3] = (unsigned long)arg3; \
  3183. _argvec[2+4] = (unsigned long)arg4; \
  3184. _argvec[2+5] = (unsigned long)arg5; \
  3185. _argvec[2+6] = (unsigned long)arg6; \
  3186. _argvec[2+7] = (unsigned long)arg7; \
  3187. _argvec[2+8] = (unsigned long)arg8; \
  3188. __asm__ volatile( \
  3189. "mr 11,%1\n\t" \
  3190. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3191. "stw 2,-8(11)\n\t" /* save tocptr */ \
  3192. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  3193. "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
  3194. "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
  3195. "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
  3196. "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
  3197. "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
  3198. "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
  3199. "lwz 9, 28(11)\n\t" /* arg7->r9 */ \
  3200. "lwz 10, 32(11)\n\t" /* arg8->r10 */ \
  3201. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  3202. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3203. "mr 11,%1\n\t" \
  3204. "mr %0,3\n\t" \
  3205. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  3206. VG_CONTRACT_FRAME_BY(512) \
  3207. : /*out*/ "=r" (_res) \
  3208. : /*in*/ "r" (&_argvec[2]) \
  3209. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3210. ); \
  3211. lval = (__typeof__(lval)) _res; \
  3212. } while (0)
  3213. #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  3214. arg7,arg8,arg9) \
  3215. do { \
  3216. volatile OrigFn _orig = (orig); \
  3217. volatile unsigned long _argvec[3+9]; \
  3218. volatile unsigned long _res; \
  3219. /* _argvec[0] holds current r2 across the call */ \
  3220. _argvec[1] = (unsigned long)_orig.r2; \
  3221. _argvec[2] = (unsigned long)_orig.nraddr; \
  3222. _argvec[2+1] = (unsigned long)arg1; \
  3223. _argvec[2+2] = (unsigned long)arg2; \
  3224. _argvec[2+3] = (unsigned long)arg3; \
  3225. _argvec[2+4] = (unsigned long)arg4; \
  3226. _argvec[2+5] = (unsigned long)arg5; \
  3227. _argvec[2+6] = (unsigned long)arg6; \
  3228. _argvec[2+7] = (unsigned long)arg7; \
  3229. _argvec[2+8] = (unsigned long)arg8; \
  3230. _argvec[2+9] = (unsigned long)arg9; \
  3231. __asm__ volatile( \
  3232. "mr 11,%1\n\t" \
  3233. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3234. "stw 2,-8(11)\n\t" /* save tocptr */ \
  3235. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  3236. VG_EXPAND_FRAME_BY_trashes_r3(64) \
  3237. /* arg9 */ \
  3238. "lwz 3,36(11)\n\t" \
  3239. "stw 3,56(1)\n\t" \
  3240. /* args1-8 */ \
  3241. "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
  3242. "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
  3243. "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
  3244. "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
  3245. "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
  3246. "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
  3247. "lwz 9, 28(11)\n\t" /* arg7->r9 */ \
  3248. "lwz 10, 32(11)\n\t" /* arg8->r10 */ \
  3249. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  3250. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3251. "mr 11,%1\n\t" \
  3252. "mr %0,3\n\t" \
  3253. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  3254. VG_CONTRACT_FRAME_BY(64) \
  3255. VG_CONTRACT_FRAME_BY(512) \
  3256. : /*out*/ "=r" (_res) \
  3257. : /*in*/ "r" (&_argvec[2]) \
  3258. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3259. ); \
  3260. lval = (__typeof__(lval)) _res; \
  3261. } while (0)
  3262. #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  3263. arg7,arg8,arg9,arg10) \
  3264. do { \
  3265. volatile OrigFn _orig = (orig); \
  3266. volatile unsigned long _argvec[3+10]; \
  3267. volatile unsigned long _res; \
  3268. /* _argvec[0] holds current r2 across the call */ \
  3269. _argvec[1] = (unsigned long)_orig.r2; \
  3270. _argvec[2] = (unsigned long)_orig.nraddr; \
  3271. _argvec[2+1] = (unsigned long)arg1; \
  3272. _argvec[2+2] = (unsigned long)arg2; \
  3273. _argvec[2+3] = (unsigned long)arg3; \
  3274. _argvec[2+4] = (unsigned long)arg4; \
  3275. _argvec[2+5] = (unsigned long)arg5; \
  3276. _argvec[2+6] = (unsigned long)arg6; \
  3277. _argvec[2+7] = (unsigned long)arg7; \
  3278. _argvec[2+8] = (unsigned long)arg8; \
  3279. _argvec[2+9] = (unsigned long)arg9; \
  3280. _argvec[2+10] = (unsigned long)arg10; \
  3281. __asm__ volatile( \
  3282. "mr 11,%1\n\t" \
  3283. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3284. "stw 2,-8(11)\n\t" /* save tocptr */ \
  3285. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  3286. VG_EXPAND_FRAME_BY_trashes_r3(64) \
  3287. /* arg10 */ \
  3288. "lwz 3,40(11)\n\t" \
  3289. "stw 3,60(1)\n\t" \
  3290. /* arg9 */ \
  3291. "lwz 3,36(11)\n\t" \
  3292. "stw 3,56(1)\n\t" \
  3293. /* args1-8 */ \
  3294. "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
  3295. "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
  3296. "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
  3297. "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
  3298. "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
  3299. "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
  3300. "lwz 9, 28(11)\n\t" /* arg7->r9 */ \
  3301. "lwz 10, 32(11)\n\t" /* arg8->r10 */ \
  3302. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  3303. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3304. "mr 11,%1\n\t" \
  3305. "mr %0,3\n\t" \
  3306. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  3307. VG_CONTRACT_FRAME_BY(64) \
  3308. VG_CONTRACT_FRAME_BY(512) \
  3309. : /*out*/ "=r" (_res) \
  3310. : /*in*/ "r" (&_argvec[2]) \
  3311. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3312. ); \
  3313. lval = (__typeof__(lval)) _res; \
  3314. } while (0)
  3315. #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  3316. arg7,arg8,arg9,arg10,arg11) \
  3317. do { \
  3318. volatile OrigFn _orig = (orig); \
  3319. volatile unsigned long _argvec[3+11]; \
  3320. volatile unsigned long _res; \
  3321. /* _argvec[0] holds current r2 across the call */ \
  3322. _argvec[1] = (unsigned long)_orig.r2; \
  3323. _argvec[2] = (unsigned long)_orig.nraddr; \
  3324. _argvec[2+1] = (unsigned long)arg1; \
  3325. _argvec[2+2] = (unsigned long)arg2; \
  3326. _argvec[2+3] = (unsigned long)arg3; \
  3327. _argvec[2+4] = (unsigned long)arg4; \
  3328. _argvec[2+5] = (unsigned long)arg5; \
  3329. _argvec[2+6] = (unsigned long)arg6; \
  3330. _argvec[2+7] = (unsigned long)arg7; \
  3331. _argvec[2+8] = (unsigned long)arg8; \
  3332. _argvec[2+9] = (unsigned long)arg9; \
  3333. _argvec[2+10] = (unsigned long)arg10; \
  3334. _argvec[2+11] = (unsigned long)arg11; \
  3335. __asm__ volatile( \
  3336. "mr 11,%1\n\t" \
  3337. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3338. "stw 2,-8(11)\n\t" /* save tocptr */ \
  3339. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  3340. VG_EXPAND_FRAME_BY_trashes_r3(72) \
  3341. /* arg11 */ \
  3342. "lwz 3,44(11)\n\t" \
  3343. "stw 3,64(1)\n\t" \
  3344. /* arg10 */ \
  3345. "lwz 3,40(11)\n\t" \
  3346. "stw 3,60(1)\n\t" \
  3347. /* arg9 */ \
  3348. "lwz 3,36(11)\n\t" \
  3349. "stw 3,56(1)\n\t" \
  3350. /* args1-8 */ \
  3351. "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
  3352. "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
  3353. "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
  3354. "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
  3355. "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
  3356. "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
  3357. "lwz 9, 28(11)\n\t" /* arg7->r9 */ \
  3358. "lwz 10, 32(11)\n\t" /* arg8->r10 */ \
  3359. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  3360. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3361. "mr 11,%1\n\t" \
  3362. "mr %0,3\n\t" \
  3363. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  3364. VG_CONTRACT_FRAME_BY(72) \
  3365. VG_CONTRACT_FRAME_BY(512) \
  3366. : /*out*/ "=r" (_res) \
  3367. : /*in*/ "r" (&_argvec[2]) \
  3368. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3369. ); \
  3370. lval = (__typeof__(lval)) _res; \
  3371. } while (0)
  3372. #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  3373. arg7,arg8,arg9,arg10,arg11,arg12) \
  3374. do { \
  3375. volatile OrigFn _orig = (orig); \
  3376. volatile unsigned long _argvec[3+12]; \
  3377. volatile unsigned long _res; \
  3378. /* _argvec[0] holds current r2 across the call */ \
  3379. _argvec[1] = (unsigned long)_orig.r2; \
  3380. _argvec[2] = (unsigned long)_orig.nraddr; \
  3381. _argvec[2+1] = (unsigned long)arg1; \
  3382. _argvec[2+2] = (unsigned long)arg2; \
  3383. _argvec[2+3] = (unsigned long)arg3; \
  3384. _argvec[2+4] = (unsigned long)arg4; \
  3385. _argvec[2+5] = (unsigned long)arg5; \
  3386. _argvec[2+6] = (unsigned long)arg6; \
  3387. _argvec[2+7] = (unsigned long)arg7; \
  3388. _argvec[2+8] = (unsigned long)arg8; \
  3389. _argvec[2+9] = (unsigned long)arg9; \
  3390. _argvec[2+10] = (unsigned long)arg10; \
  3391. _argvec[2+11] = (unsigned long)arg11; \
  3392. _argvec[2+12] = (unsigned long)arg12; \
  3393. __asm__ volatile( \
  3394. "mr 11,%1\n\t" \
  3395. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3396. "stw 2,-8(11)\n\t" /* save tocptr */ \
  3397. "lwz 2,-4(11)\n\t" /* use nraddr's tocptr */ \
  3398. VG_EXPAND_FRAME_BY_trashes_r3(72) \
  3399. /* arg12 */ \
  3400. "lwz 3,48(11)\n\t" \
  3401. "stw 3,68(1)\n\t" \
  3402. /* arg11 */ \
  3403. "lwz 3,44(11)\n\t" \
  3404. "stw 3,64(1)\n\t" \
  3405. /* arg10 */ \
  3406. "lwz 3,40(11)\n\t" \
  3407. "stw 3,60(1)\n\t" \
  3408. /* arg9 */ \
  3409. "lwz 3,36(11)\n\t" \
  3410. "stw 3,56(1)\n\t" \
  3411. /* args1-8 */ \
  3412. "lwz 3, 4(11)\n\t" /* arg1->r3 */ \
  3413. "lwz 4, 8(11)\n\t" /* arg2->r4 */ \
  3414. "lwz 5, 12(11)\n\t" /* arg3->r5 */ \
  3415. "lwz 6, 16(11)\n\t" /* arg4->r6 */ \
  3416. "lwz 7, 20(11)\n\t" /* arg5->r7 */ \
  3417. "lwz 8, 24(11)\n\t" /* arg6->r8 */ \
  3418. "lwz 9, 28(11)\n\t" /* arg7->r9 */ \
  3419. "lwz 10, 32(11)\n\t" /* arg8->r10 */ \
  3420. "lwz 11, 0(11)\n\t" /* target->r11 */ \
  3421. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3422. "mr 11,%1\n\t" \
  3423. "mr %0,3\n\t" \
  3424. "lwz 2,-8(11)\n\t" /* restore tocptr */ \
  3425. VG_CONTRACT_FRAME_BY(72) \
  3426. VG_CONTRACT_FRAME_BY(512) \
  3427. : /*out*/ "=r" (_res) \
  3428. : /*in*/ "r" (&_argvec[2]) \
  3429. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3430. ); \
  3431. lval = (__typeof__(lval)) _res; \
  3432. } while (0)
  3433. #endif /* PLAT_ppc32_aix5 */
  3434. /* ------------------------ ppc64-aix5 ------------------------- */
  3435. #if defined(PLAT_ppc64_aix5)
  3436. /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
  3437. /* These regs are trashed by the hidden call. */
  3438. #define __CALLER_SAVED_REGS \
  3439. "lr", "ctr", "xer", \
  3440. "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
  3441. "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
  3442. "r11", "r12", "r13"
  3443. /* Expand the stack frame, copying enough info that unwinding
  3444. still works. Trashes r3. */
  3445. #define VG_EXPAND_FRAME_BY_trashes_r3(_n_fr) \
  3446. "addi 1,1,-" #_n_fr "\n\t" \
  3447. "ld 3," #_n_fr "(1)\n\t" \
  3448. "std 3,0(1)\n\t"
  3449. #define VG_CONTRACT_FRAME_BY(_n_fr) \
  3450. "addi 1,1," #_n_fr "\n\t"
  3451. /* These CALL_FN_ macros assume that on ppc64-aix5, sizeof(unsigned
  3452. long) == 8. */
  3453. #define CALL_FN_W_v(lval, orig) \
  3454. do { \
  3455. volatile OrigFn _orig = (orig); \
  3456. volatile unsigned long _argvec[3+0]; \
  3457. volatile unsigned long _res; \
  3458. /* _argvec[0] holds current r2 across the call */ \
  3459. _argvec[1] = (unsigned long)_orig.r2; \
  3460. _argvec[2] = (unsigned long)_orig.nraddr; \
  3461. __asm__ volatile( \
  3462. "mr 11,%1\n\t" \
  3463. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3464. "std 2,-16(11)\n\t" /* save tocptr */ \
  3465. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3466. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3467. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3468. "mr 11,%1\n\t" \
  3469. "mr %0,3\n\t" \
  3470. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3471. VG_CONTRACT_FRAME_BY(512) \
  3472. : /*out*/ "=r" (_res) \
  3473. : /*in*/ "r" (&_argvec[2]) \
  3474. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3475. ); \
  3476. lval = (__typeof__(lval)) _res; \
  3477. } while (0)
  3478. #define CALL_FN_W_W(lval, orig, arg1) \
  3479. do { \
  3480. volatile OrigFn _orig = (orig); \
  3481. volatile unsigned long _argvec[3+1]; \
  3482. volatile unsigned long _res; \
  3483. /* _argvec[0] holds current r2 across the call */ \
  3484. _argvec[1] = (unsigned long)_orig.r2; \
  3485. _argvec[2] = (unsigned long)_orig.nraddr; \
  3486. _argvec[2+1] = (unsigned long)arg1; \
  3487. __asm__ volatile( \
  3488. "mr 11,%1\n\t" \
  3489. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3490. "std 2,-16(11)\n\t" /* save tocptr */ \
  3491. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3492. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  3493. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3494. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3495. "mr 11,%1\n\t" \
  3496. "mr %0,3\n\t" \
  3497. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3498. VG_CONTRACT_FRAME_BY(512) \
  3499. : /*out*/ "=r" (_res) \
  3500. : /*in*/ "r" (&_argvec[2]) \
  3501. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3502. ); \
  3503. lval = (__typeof__(lval)) _res; \
  3504. } while (0)
  3505. #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
  3506. do { \
  3507. volatile OrigFn _orig = (orig); \
  3508. volatile unsigned long _argvec[3+2]; \
  3509. volatile unsigned long _res; \
  3510. /* _argvec[0] holds current r2 across the call */ \
  3511. _argvec[1] = (unsigned long)_orig.r2; \
  3512. _argvec[2] = (unsigned long)_orig.nraddr; \
  3513. _argvec[2+1] = (unsigned long)arg1; \
  3514. _argvec[2+2] = (unsigned long)arg2; \
  3515. __asm__ volatile( \
  3516. "mr 11,%1\n\t" \
  3517. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3518. "std 2,-16(11)\n\t" /* save tocptr */ \
  3519. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3520. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  3521. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  3522. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3523. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3524. "mr 11,%1\n\t" \
  3525. "mr %0,3\n\t" \
  3526. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3527. VG_CONTRACT_FRAME_BY(512) \
  3528. : /*out*/ "=r" (_res) \
  3529. : /*in*/ "r" (&_argvec[2]) \
  3530. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3531. ); \
  3532. lval = (__typeof__(lval)) _res; \
  3533. } while (0)
  3534. #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
  3535. do { \
  3536. volatile OrigFn _orig = (orig); \
  3537. volatile unsigned long _argvec[3+3]; \
  3538. volatile unsigned long _res; \
  3539. /* _argvec[0] holds current r2 across the call */ \
  3540. _argvec[1] = (unsigned long)_orig.r2; \
  3541. _argvec[2] = (unsigned long)_orig.nraddr; \
  3542. _argvec[2+1] = (unsigned long)arg1; \
  3543. _argvec[2+2] = (unsigned long)arg2; \
  3544. _argvec[2+3] = (unsigned long)arg3; \
  3545. __asm__ volatile( \
  3546. "mr 11,%1\n\t" \
  3547. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3548. "std 2,-16(11)\n\t" /* save tocptr */ \
  3549. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3550. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  3551. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  3552. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  3553. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3554. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3555. "mr 11,%1\n\t" \
  3556. "mr %0,3\n\t" \
  3557. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3558. VG_CONTRACT_FRAME_BY(512) \
  3559. : /*out*/ "=r" (_res) \
  3560. : /*in*/ "r" (&_argvec[2]) \
  3561. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3562. ); \
  3563. lval = (__typeof__(lval)) _res; \
  3564. } while (0)
  3565. #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
  3566. do { \
  3567. volatile OrigFn _orig = (orig); \
  3568. volatile unsigned long _argvec[3+4]; \
  3569. volatile unsigned long _res; \
  3570. /* _argvec[0] holds current r2 across the call */ \
  3571. _argvec[1] = (unsigned long)_orig.r2; \
  3572. _argvec[2] = (unsigned long)_orig.nraddr; \
  3573. _argvec[2+1] = (unsigned long)arg1; \
  3574. _argvec[2+2] = (unsigned long)arg2; \
  3575. _argvec[2+3] = (unsigned long)arg3; \
  3576. _argvec[2+4] = (unsigned long)arg4; \
  3577. __asm__ volatile( \
  3578. "mr 11,%1\n\t" \
  3579. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3580. "std 2,-16(11)\n\t" /* save tocptr */ \
  3581. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3582. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  3583. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  3584. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  3585. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  3586. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3587. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3588. "mr 11,%1\n\t" \
  3589. "mr %0,3\n\t" \
  3590. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3591. VG_CONTRACT_FRAME_BY(512) \
  3592. : /*out*/ "=r" (_res) \
  3593. : /*in*/ "r" (&_argvec[2]) \
  3594. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3595. ); \
  3596. lval = (__typeof__(lval)) _res; \
  3597. } while (0)
  3598. #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
  3599. do { \
  3600. volatile OrigFn _orig = (orig); \
  3601. volatile unsigned long _argvec[3+5]; \
  3602. volatile unsigned long _res; \
  3603. /* _argvec[0] holds current r2 across the call */ \
  3604. _argvec[1] = (unsigned long)_orig.r2; \
  3605. _argvec[2] = (unsigned long)_orig.nraddr; \
  3606. _argvec[2+1] = (unsigned long)arg1; \
  3607. _argvec[2+2] = (unsigned long)arg2; \
  3608. _argvec[2+3] = (unsigned long)arg3; \
  3609. _argvec[2+4] = (unsigned long)arg4; \
  3610. _argvec[2+5] = (unsigned long)arg5; \
  3611. __asm__ volatile( \
  3612. "mr 11,%1\n\t" \
  3613. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3614. "std 2,-16(11)\n\t" /* save tocptr */ \
  3615. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3616. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  3617. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  3618. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  3619. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  3620. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  3621. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3622. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3623. "mr 11,%1\n\t" \
  3624. "mr %0,3\n\t" \
  3625. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3626. VG_CONTRACT_FRAME_BY(512) \
  3627. : /*out*/ "=r" (_res) \
  3628. : /*in*/ "r" (&_argvec[2]) \
  3629. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3630. ); \
  3631. lval = (__typeof__(lval)) _res; \
  3632. } while (0)
  3633. #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
  3634. do { \
  3635. volatile OrigFn _orig = (orig); \
  3636. volatile unsigned long _argvec[3+6]; \
  3637. volatile unsigned long _res; \
  3638. /* _argvec[0] holds current r2 across the call */ \
  3639. _argvec[1] = (unsigned long)_orig.r2; \
  3640. _argvec[2] = (unsigned long)_orig.nraddr; \
  3641. _argvec[2+1] = (unsigned long)arg1; \
  3642. _argvec[2+2] = (unsigned long)arg2; \
  3643. _argvec[2+3] = (unsigned long)arg3; \
  3644. _argvec[2+4] = (unsigned long)arg4; \
  3645. _argvec[2+5] = (unsigned long)arg5; \
  3646. _argvec[2+6] = (unsigned long)arg6; \
  3647. __asm__ volatile( \
  3648. "mr 11,%1\n\t" \
  3649. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3650. "std 2,-16(11)\n\t" /* save tocptr */ \
  3651. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3652. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  3653. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  3654. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  3655. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  3656. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  3657. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  3658. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3659. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3660. "mr 11,%1\n\t" \
  3661. "mr %0,3\n\t" \
  3662. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3663. VG_CONTRACT_FRAME_BY(512) \
  3664. : /*out*/ "=r" (_res) \
  3665. : /*in*/ "r" (&_argvec[2]) \
  3666. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3667. ); \
  3668. lval = (__typeof__(lval)) _res; \
  3669. } while (0)
  3670. #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  3671. arg7) \
  3672. do { \
  3673. volatile OrigFn _orig = (orig); \
  3674. volatile unsigned long _argvec[3+7]; \
  3675. volatile unsigned long _res; \
  3676. /* _argvec[0] holds current r2 across the call */ \
  3677. _argvec[1] = (unsigned long)_orig.r2; \
  3678. _argvec[2] = (unsigned long)_orig.nraddr; \
  3679. _argvec[2+1] = (unsigned long)arg1; \
  3680. _argvec[2+2] = (unsigned long)arg2; \
  3681. _argvec[2+3] = (unsigned long)arg3; \
  3682. _argvec[2+4] = (unsigned long)arg4; \
  3683. _argvec[2+5] = (unsigned long)arg5; \
  3684. _argvec[2+6] = (unsigned long)arg6; \
  3685. _argvec[2+7] = (unsigned long)arg7; \
  3686. __asm__ volatile( \
  3687. "mr 11,%1\n\t" \
  3688. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3689. "std 2,-16(11)\n\t" /* save tocptr */ \
  3690. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3691. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  3692. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  3693. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  3694. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  3695. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  3696. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  3697. "ld 9, 56(11)\n\t" /* arg7->r9 */ \
  3698. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3699. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3700. "mr 11,%1\n\t" \
  3701. "mr %0,3\n\t" \
  3702. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3703. VG_CONTRACT_FRAME_BY(512) \
  3704. : /*out*/ "=r" (_res) \
  3705. : /*in*/ "r" (&_argvec[2]) \
  3706. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3707. ); \
  3708. lval = (__typeof__(lval)) _res; \
  3709. } while (0)
  3710. #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  3711. arg7,arg8) \
  3712. do { \
  3713. volatile OrigFn _orig = (orig); \
  3714. volatile unsigned long _argvec[3+8]; \
  3715. volatile unsigned long _res; \
  3716. /* _argvec[0] holds current r2 across the call */ \
  3717. _argvec[1] = (unsigned long)_orig.r2; \
  3718. _argvec[2] = (unsigned long)_orig.nraddr; \
  3719. _argvec[2+1] = (unsigned long)arg1; \
  3720. _argvec[2+2] = (unsigned long)arg2; \
  3721. _argvec[2+3] = (unsigned long)arg3; \
  3722. _argvec[2+4] = (unsigned long)arg4; \
  3723. _argvec[2+5] = (unsigned long)arg5; \
  3724. _argvec[2+6] = (unsigned long)arg6; \
  3725. _argvec[2+7] = (unsigned long)arg7; \
  3726. _argvec[2+8] = (unsigned long)arg8; \
  3727. __asm__ volatile( \
  3728. "mr 11,%1\n\t" \
  3729. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3730. "std 2,-16(11)\n\t" /* save tocptr */ \
  3731. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3732. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  3733. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  3734. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  3735. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  3736. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  3737. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  3738. "ld 9, 56(11)\n\t" /* arg7->r9 */ \
  3739. "ld 10, 64(11)\n\t" /* arg8->r10 */ \
  3740. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3741. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3742. "mr 11,%1\n\t" \
  3743. "mr %0,3\n\t" \
  3744. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3745. VG_CONTRACT_FRAME_BY(512) \
  3746. : /*out*/ "=r" (_res) \
  3747. : /*in*/ "r" (&_argvec[2]) \
  3748. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3749. ); \
  3750. lval = (__typeof__(lval)) _res; \
  3751. } while (0)
  3752. #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  3753. arg7,arg8,arg9) \
  3754. do { \
  3755. volatile OrigFn _orig = (orig); \
  3756. volatile unsigned long _argvec[3+9]; \
  3757. volatile unsigned long _res; \
  3758. /* _argvec[0] holds current r2 across the call */ \
  3759. _argvec[1] = (unsigned long)_orig.r2; \
  3760. _argvec[2] = (unsigned long)_orig.nraddr; \
  3761. _argvec[2+1] = (unsigned long)arg1; \
  3762. _argvec[2+2] = (unsigned long)arg2; \
  3763. _argvec[2+3] = (unsigned long)arg3; \
  3764. _argvec[2+4] = (unsigned long)arg4; \
  3765. _argvec[2+5] = (unsigned long)arg5; \
  3766. _argvec[2+6] = (unsigned long)arg6; \
  3767. _argvec[2+7] = (unsigned long)arg7; \
  3768. _argvec[2+8] = (unsigned long)arg8; \
  3769. _argvec[2+9] = (unsigned long)arg9; \
  3770. __asm__ volatile( \
  3771. "mr 11,%1\n\t" \
  3772. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3773. "std 2,-16(11)\n\t" /* save tocptr */ \
  3774. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3775. VG_EXPAND_FRAME_BY_trashes_r3(128) \
  3776. /* arg9 */ \
  3777. "ld 3,72(11)\n\t" \
  3778. "std 3,112(1)\n\t" \
  3779. /* args1-8 */ \
  3780. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  3781. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  3782. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  3783. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  3784. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  3785. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  3786. "ld 9, 56(11)\n\t" /* arg7->r9 */ \
  3787. "ld 10, 64(11)\n\t" /* arg8->r10 */ \
  3788. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3789. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3790. "mr 11,%1\n\t" \
  3791. "mr %0,3\n\t" \
  3792. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3793. VG_CONTRACT_FRAME_BY(128) \
  3794. VG_CONTRACT_FRAME_BY(512) \
  3795. : /*out*/ "=r" (_res) \
  3796. : /*in*/ "r" (&_argvec[2]) \
  3797. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3798. ); \
  3799. lval = (__typeof__(lval)) _res; \
  3800. } while (0)
  3801. #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  3802. arg7,arg8,arg9,arg10) \
  3803. do { \
  3804. volatile OrigFn _orig = (orig); \
  3805. volatile unsigned long _argvec[3+10]; \
  3806. volatile unsigned long _res; \
  3807. /* _argvec[0] holds current r2 across the call */ \
  3808. _argvec[1] = (unsigned long)_orig.r2; \
  3809. _argvec[2] = (unsigned long)_orig.nraddr; \
  3810. _argvec[2+1] = (unsigned long)arg1; \
  3811. _argvec[2+2] = (unsigned long)arg2; \
  3812. _argvec[2+3] = (unsigned long)arg3; \
  3813. _argvec[2+4] = (unsigned long)arg4; \
  3814. _argvec[2+5] = (unsigned long)arg5; \
  3815. _argvec[2+6] = (unsigned long)arg6; \
  3816. _argvec[2+7] = (unsigned long)arg7; \
  3817. _argvec[2+8] = (unsigned long)arg8; \
  3818. _argvec[2+9] = (unsigned long)arg9; \
  3819. _argvec[2+10] = (unsigned long)arg10; \
  3820. __asm__ volatile( \
  3821. "mr 11,%1\n\t" \
  3822. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3823. "std 2,-16(11)\n\t" /* save tocptr */ \
  3824. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3825. VG_EXPAND_FRAME_BY_trashes_r3(128) \
  3826. /* arg10 */ \
  3827. "ld 3,80(11)\n\t" \
  3828. "std 3,120(1)\n\t" \
  3829. /* arg9 */ \
  3830. "ld 3,72(11)\n\t" \
  3831. "std 3,112(1)\n\t" \
  3832. /* args1-8 */ \
  3833. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  3834. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  3835. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  3836. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  3837. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  3838. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  3839. "ld 9, 56(11)\n\t" /* arg7->r9 */ \
  3840. "ld 10, 64(11)\n\t" /* arg8->r10 */ \
  3841. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3842. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3843. "mr 11,%1\n\t" \
  3844. "mr %0,3\n\t" \
  3845. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3846. VG_CONTRACT_FRAME_BY(128) \
  3847. VG_CONTRACT_FRAME_BY(512) \
  3848. : /*out*/ "=r" (_res) \
  3849. : /*in*/ "r" (&_argvec[2]) \
  3850. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3851. ); \
  3852. lval = (__typeof__(lval)) _res; \
  3853. } while (0)
  3854. #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  3855. arg7,arg8,arg9,arg10,arg11) \
  3856. do { \
  3857. volatile OrigFn _orig = (orig); \
  3858. volatile unsigned long _argvec[3+11]; \
  3859. volatile unsigned long _res; \
  3860. /* _argvec[0] holds current r2 across the call */ \
  3861. _argvec[1] = (unsigned long)_orig.r2; \
  3862. _argvec[2] = (unsigned long)_orig.nraddr; \
  3863. _argvec[2+1] = (unsigned long)arg1; \
  3864. _argvec[2+2] = (unsigned long)arg2; \
  3865. _argvec[2+3] = (unsigned long)arg3; \
  3866. _argvec[2+4] = (unsigned long)arg4; \
  3867. _argvec[2+5] = (unsigned long)arg5; \
  3868. _argvec[2+6] = (unsigned long)arg6; \
  3869. _argvec[2+7] = (unsigned long)arg7; \
  3870. _argvec[2+8] = (unsigned long)arg8; \
  3871. _argvec[2+9] = (unsigned long)arg9; \
  3872. _argvec[2+10] = (unsigned long)arg10; \
  3873. _argvec[2+11] = (unsigned long)arg11; \
  3874. __asm__ volatile( \
  3875. "mr 11,%1\n\t" \
  3876. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3877. "std 2,-16(11)\n\t" /* save tocptr */ \
  3878. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3879. VG_EXPAND_FRAME_BY_trashes_r3(144) \
  3880. /* arg11 */ \
  3881. "ld 3,88(11)\n\t" \
  3882. "std 3,128(1)\n\t" \
  3883. /* arg10 */ \
  3884. "ld 3,80(11)\n\t" \
  3885. "std 3,120(1)\n\t" \
  3886. /* arg9 */ \
  3887. "ld 3,72(11)\n\t" \
  3888. "std 3,112(1)\n\t" \
  3889. /* args1-8 */ \
  3890. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  3891. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  3892. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  3893. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  3894. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  3895. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  3896. "ld 9, 56(11)\n\t" /* arg7->r9 */ \
  3897. "ld 10, 64(11)\n\t" /* arg8->r10 */ \
  3898. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3899. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3900. "mr 11,%1\n\t" \
  3901. "mr %0,3\n\t" \
  3902. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3903. VG_CONTRACT_FRAME_BY(144) \
  3904. VG_CONTRACT_FRAME_BY(512) \
  3905. : /*out*/ "=r" (_res) \
  3906. : /*in*/ "r" (&_argvec[2]) \
  3907. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3908. ); \
  3909. lval = (__typeof__(lval)) _res; \
  3910. } while (0)
  3911. #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
  3912. arg7,arg8,arg9,arg10,arg11,arg12) \
  3913. do { \
  3914. volatile OrigFn _orig = (orig); \
  3915. volatile unsigned long _argvec[3+12]; \
  3916. volatile unsigned long _res; \
  3917. /* _argvec[0] holds current r2 across the call */ \
  3918. _argvec[1] = (unsigned long)_orig.r2; \
  3919. _argvec[2] = (unsigned long)_orig.nraddr; \
  3920. _argvec[2+1] = (unsigned long)arg1; \
  3921. _argvec[2+2] = (unsigned long)arg2; \
  3922. _argvec[2+3] = (unsigned long)arg3; \
  3923. _argvec[2+4] = (unsigned long)arg4; \
  3924. _argvec[2+5] = (unsigned long)arg5; \
  3925. _argvec[2+6] = (unsigned long)arg6; \
  3926. _argvec[2+7] = (unsigned long)arg7; \
  3927. _argvec[2+8] = (unsigned long)arg8; \
  3928. _argvec[2+9] = (unsigned long)arg9; \
  3929. _argvec[2+10] = (unsigned long)arg10; \
  3930. _argvec[2+11] = (unsigned long)arg11; \
  3931. _argvec[2+12] = (unsigned long)arg12; \
  3932. __asm__ volatile( \
  3933. "mr 11,%1\n\t" \
  3934. VG_EXPAND_FRAME_BY_trashes_r3(512) \
  3935. "std 2,-16(11)\n\t" /* save tocptr */ \
  3936. "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
  3937. VG_EXPAND_FRAME_BY_trashes_r3(144) \
  3938. /* arg12 */ \
  3939. "ld 3,96(11)\n\t" \
  3940. "std 3,136(1)\n\t" \
  3941. /* arg11 */ \
  3942. "ld 3,88(11)\n\t" \
  3943. "std 3,128(1)\n\t" \
  3944. /* arg10 */ \
  3945. "ld 3,80(11)\n\t" \
  3946. "std 3,120(1)\n\t" \
  3947. /* arg9 */ \
  3948. "ld 3,72(11)\n\t" \
  3949. "std 3,112(1)\n\t" \
  3950. /* args1-8 */ \
  3951. "ld 3, 8(11)\n\t" /* arg1->r3 */ \
  3952. "ld 4, 16(11)\n\t" /* arg2->r4 */ \
  3953. "ld 5, 24(11)\n\t" /* arg3->r5 */ \
  3954. "ld 6, 32(11)\n\t" /* arg4->r6 */ \
  3955. "ld 7, 40(11)\n\t" /* arg5->r7 */ \
  3956. "ld 8, 48(11)\n\t" /* arg6->r8 */ \
  3957. "ld 9, 56(11)\n\t" /* arg7->r9 */ \
  3958. "ld 10, 64(11)\n\t" /* arg8->r10 */ \
  3959. "ld 11, 0(11)\n\t" /* target->r11 */ \
  3960. VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
  3961. "mr 11,%1\n\t" \
  3962. "mr %0,3\n\t" \
  3963. "ld 2,-16(11)\n\t" /* restore tocptr */ \
  3964. VG_CONTRACT_FRAME_BY(144) \
  3965. VG_CONTRACT_FRAME_BY(512) \
  3966. : /*out*/ "=r" (_res) \
  3967. : /*in*/ "r" (&_argvec[2]) \
  3968. : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
  3969. ); \
  3970. lval = (__typeof__(lval)) _res; \
  3971. } while (0)
  3972. #endif /* PLAT_ppc64_aix5 */
  3973. /* ------------------------------------------------------------------ */
  3974. /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
  3975. /* */
  3976. /* ------------------------------------------------------------------ */
  3977. /* Some request codes. There are many more of these, but most are not
  3978. exposed to end-user view. These are the public ones, all of the
  3979. form 0x1000 + small_number.
  3980. Core ones are in the range 0x00000000--0x0000ffff. The non-public
  3981. ones start at 0x2000.
  3982. */
  3983. /* These macros are used by tools -- they must be public, but don't
  3984. embed them into other programs. */
  3985. #define VG_USERREQ_TOOL_BASE(a,b) \
  3986. ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
  3987. #define VG_IS_TOOL_USERREQ(a, b, v) \
  3988. (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
  3989. /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
  3990. This enum comprises an ABI exported by Valgrind to programs
  3991. which use client requests. DO NOT CHANGE THE ORDER OF THESE
  3992. ENTRIES, NOR DELETE ANY -- add new ones at the end. */
  3993. typedef
  3994. enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
  3995. VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
  3996. /* These allow any function to be called from the simulated
  3997. CPU but run on the real CPU. Nb: the first arg passed to
  3998. the function is always the ThreadId of the running
  3999. thread! So CLIENT_CALL0 actually requires a 1 arg
  4000. function, etc. */
  4001. VG_USERREQ__CLIENT_CALL0 = 0x1101,
  4002. VG_USERREQ__CLIENT_CALL1 = 0x1102,
  4003. VG_USERREQ__CLIENT_CALL2 = 0x1103,
  4004. VG_USERREQ__CLIENT_CALL3 = 0x1104,
  4005. /* Can be useful in regression testing suites -- eg. can
  4006. send Valgrind's output to /dev/null and still count
  4007. errors. */
  4008. VG_USERREQ__COUNT_ERRORS = 0x1201,
  4009. /* These are useful and can be interpreted by any tool that
  4010. tracks malloc() et al, by using vg_replace_malloc.c. */
  4011. VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
  4012. VG_USERREQ__FREELIKE_BLOCK = 0x1302,
  4013. /* Memory pool support. */
  4014. VG_USERREQ__CREATE_MEMPOOL = 0x1303,
  4015. VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
  4016. VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
  4017. VG_USERREQ__MEMPOOL_FREE = 0x1306,
  4018. VG_USERREQ__MEMPOOL_TRIM = 0x1307,
  4019. VG_USERREQ__MOVE_MEMPOOL = 0x1308,
  4020. VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
  4021. VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
  4022. /* Allow printfs to valgrind log. */
  4023. /* The first two pass the va_list argument by value, which
  4024. assumes it is the same size as or smaller than a UWord,
  4025. which generally isn't the case. Hence are deprecated.
  4026. The second two pass the vargs by reference and so are
  4027. immune to this problem. */
  4028. /* both :: char* fmt, va_list vargs (DEPRECATED) */
  4029. VG_USERREQ__PRINTF = 0x1401,
  4030. VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
  4031. /* both :: char* fmt, va_list* vargs */
  4032. VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
  4033. VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
  4034. /* Stack support. */
  4035. VG_USERREQ__STACK_REGISTER = 0x1501,
  4036. VG_USERREQ__STACK_DEREGISTER = 0x1502,
  4037. VG_USERREQ__STACK_CHANGE = 0x1503,
  4038. /* Wine support */
  4039. VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
  4040. /* Querying of debug info. */
  4041. VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701
  4042. } Vg_ClientRequest;
  4043. #if !defined(__GNUC__)
  4044. # define __extension__ /* */
  4045. #endif
  4046. /*
  4047. * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
  4048. * client request and whose value equals the client request result.
  4049. */
  4050. #if defined(NVALGRIND)
  4051. #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
  4052. _zzq_default, _zzq_request, \
  4053. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  4054. (_zzq_default)
  4055. #else /*defined(NVALGRIND)*/
  4056. #if defined(_MSC_VER)
  4057. #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
  4058. _zzq_default, _zzq_request, \
  4059. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  4060. (vg_VALGRIND_DO_CLIENT_REQUEST_EXPR((uintptr_t)(_zzq_default), \
  4061. (_zzq_request), (uintptr_t)(_zzq_arg1), (uintptr_t)(_zzq_arg2), \
  4062. (uintptr_t)(_zzq_arg3), (uintptr_t)(_zzq_arg4), \
  4063. (uintptr_t)(_zzq_arg5)))
  4064. static __inline unsigned
  4065. vg_VALGRIND_DO_CLIENT_REQUEST_EXPR(uintptr_t _zzq_default,
  4066. unsigned _zzq_request, uintptr_t _zzq_arg1,
  4067. uintptr_t _zzq_arg2, uintptr_t _zzq_arg3,
  4068. uintptr_t _zzq_arg4, uintptr_t _zzq_arg5)
  4069. {
  4070. unsigned _zzq_rlval;
  4071. VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, _zzq_request,
  4072. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5);
  4073. return _zzq_rlval;
  4074. }
  4075. #else /*defined(_MSC_VER)*/
  4076. #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
  4077. _zzq_default, _zzq_request, \
  4078. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  4079. (__extension__({unsigned int _zzq_rlval; \
  4080. VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, _zzq_request, \
  4081. _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
  4082. _zzq_rlval; \
  4083. }))
  4084. #endif /*defined(_MSC_VER)*/
  4085. #endif /*defined(NVALGRIND)*/
  4086. /* Returns the number of Valgrinds this code is running under. That
  4087. is, 0 if running natively, 1 if running under Valgrind, 2 if
  4088. running under Valgrind which is running under another Valgrind,
  4089. etc. */
  4090. #define RUNNING_ON_VALGRIND \
  4091. VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
  4092. VG_USERREQ__RUNNING_ON_VALGRIND, \
  4093. 0, 0, 0, 0, 0) \
  4094. /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
  4095. _qzz_len - 1]. Useful if you are debugging a JITter or some such,
  4096. since it provides a way to make sure valgrind will retranslate the
  4097. invalidated area. Returns no value. */
  4098. #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
  4099. {unsigned int _qzz_res; \
  4100. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4101. VG_USERREQ__DISCARD_TRANSLATIONS, \
  4102. _qzz_addr, _qzz_len, 0, 0, 0); \
  4103. }
  4104. /* These requests are for getting Valgrind itself to print something.
  4105. Possibly with a backtrace. This is a really ugly hack. The return value
  4106. is the number of characters printed, excluding the "**<pid>** " part at the
  4107. start and the backtrace (if present). */
  4108. #if defined(NVALGRIND)
  4109. # define VALGRIND_PRINTF(...)
  4110. # define VALGRIND_PRINTF_BACKTRACE(...)
  4111. #else /* NVALGRIND */
  4112. #if !defined(_MSC_VER)
  4113. /* Modern GCC will optimize the static routine out if unused,
  4114. and unused attribute will shut down warnings about it. */
  4115. static int VALGRIND_PRINTF(const char *format, ...)
  4116. __attribute__((format(__printf__, 1, 2), __unused__));
  4117. #endif
  4118. static int
  4119. #if defined(_MSC_VER)
  4120. __inline
  4121. #endif
  4122. VALGRIND_PRINTF(const char *format, ...)
  4123. {
  4124. unsigned long _qzz_res;
  4125. va_list vargs;
  4126. va_start(vargs, format);
  4127. #if defined(_MSC_VER)
  4128. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0,
  4129. VG_USERREQ__PRINTF_VALIST_BY_REF,
  4130. (uintptr_t)format,
  4131. (uintptr_t)&vargs,
  4132. 0, 0, 0);
  4133. #else
  4134. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0,
  4135. VG_USERREQ__PRINTF_VALIST_BY_REF,
  4136. (unsigned long)format,
  4137. (unsigned long)&vargs,
  4138. 0, 0, 0);
  4139. #endif
  4140. va_end(vargs);
  4141. return (int)_qzz_res;
  4142. }
  4143. #if !defined(_MSC_VER)
  4144. static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
  4145. __attribute__((format(__printf__, 1, 2), __unused__));
  4146. #endif
  4147. static int
  4148. #if defined(_MSC_VER)
  4149. __inline
  4150. #endif
  4151. VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
  4152. {
  4153. unsigned long _qzz_res;
  4154. va_list vargs;
  4155. va_start(vargs, format);
  4156. #if defined(_MSC_VER)
  4157. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0,
  4158. VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
  4159. (uintptr_t)format,
  4160. (uintptr_t)&vargs,
  4161. 0, 0, 0);
  4162. #else
  4163. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0,
  4164. VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
  4165. (unsigned long)format,
  4166. (unsigned long)&vargs,
  4167. 0, 0, 0);
  4168. #endif
  4169. va_end(vargs);
  4170. return (int)_qzz_res;
  4171. }
  4172. #endif /* NVALGRIND */
  4173. /* These requests allow control to move from the simulated CPU to the
  4174. real CPU, calling an arbitary function.
  4175. Note that the current ThreadId is inserted as the first argument.
  4176. So this call:
  4177. VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
  4178. requires f to have this signature:
  4179. Word f(Word tid, Word arg1, Word arg2)
  4180. where "Word" is a word-sized type.
  4181. Note that these client requests are not entirely reliable. For example,
  4182. if you call a function with them that subsequently calls printf(),
  4183. there's a high chance Valgrind will crash. Generally, your prospects of
  4184. these working are made higher if the called function does not refer to
  4185. any global variables, and does not refer to any libc or other functions
  4186. (printf et al). Any kind of entanglement with libc or dynamic linking is
  4187. likely to have a bad outcome, for tricky reasons which we've grappled
  4188. with a lot in the past.
  4189. */
  4190. #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
  4191. __extension__ \
  4192. ({unsigned long _qyy_res; \
  4193. VALGRIND_DO_CLIENT_REQUEST(_qyy_res, 0 /* default return */, \
  4194. VG_USERREQ__CLIENT_CALL0, \
  4195. _qyy_fn, \
  4196. 0, 0, 0, 0); \
  4197. _qyy_res; \
  4198. })
  4199. #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
  4200. __extension__ \
  4201. ({unsigned long _qyy_res; \
  4202. VALGRIND_DO_CLIENT_REQUEST(_qyy_res, 0 /* default return */, \
  4203. VG_USERREQ__CLIENT_CALL1, \
  4204. _qyy_fn, \
  4205. _qyy_arg1, 0, 0, 0); \
  4206. _qyy_res; \
  4207. })
  4208. #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
  4209. __extension__ \
  4210. ({unsigned long _qyy_res; \
  4211. VALGRIND_DO_CLIENT_REQUEST(_qyy_res, 0 /* default return */, \
  4212. VG_USERREQ__CLIENT_CALL2, \
  4213. _qyy_fn, \
  4214. _qyy_arg1, _qyy_arg2, 0, 0); \
  4215. _qyy_res; \
  4216. })
  4217. #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
  4218. __extension__ \
  4219. ({unsigned long _qyy_res; \
  4220. VALGRIND_DO_CLIENT_REQUEST(_qyy_res, 0 /* default return */, \
  4221. VG_USERREQ__CLIENT_CALL3, \
  4222. _qyy_fn, \
  4223. _qyy_arg1, _qyy_arg2, \
  4224. _qyy_arg3, 0); \
  4225. _qyy_res; \
  4226. })
  4227. /* Counts the number of errors that have been recorded by a tool. Nb:
  4228. the tool must record the errors with VG_(maybe_record_error)() or
  4229. VG_(unique_error)() for them to be counted. */
  4230. #define VALGRIND_COUNT_ERRORS \
  4231. __extension__ \
  4232. ({unsigned int _qyy_res; \
  4233. VALGRIND_DO_CLIENT_REQUEST(_qyy_res, 0 /* default return */, \
  4234. VG_USERREQ__COUNT_ERRORS, \
  4235. 0, 0, 0, 0, 0); \
  4236. _qyy_res; \
  4237. })
  4238. /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
  4239. when heap blocks are allocated in order to give accurate results. This
  4240. happens automatically for the standard allocator functions such as
  4241. malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
  4242. delete[], etc.
  4243. But if your program uses a custom allocator, this doesn't automatically
  4244. happen, and Valgrind will not do as well. For example, if you allocate
  4245. superblocks with mmap() and then allocates chunks of the superblocks, all
  4246. Valgrind's observations will be at the mmap() level and it won't know that
  4247. the chunks should be considered separate entities. In Memcheck's case,
  4248. that means you probably won't get heap block overrun detection (because
  4249. there won't be redzones marked as unaddressable) and you definitely won't
  4250. get any leak detection.
  4251. The following client requests allow a custom allocator to be annotated so
  4252. that it can be handled accurately by Valgrind.
  4253. VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
  4254. by a malloc()-like function. For Memcheck (an illustrative case), this
  4255. does two things:
  4256. - It records that the block has been allocated. This means any addresses
  4257. within the block mentioned in error messages will be
  4258. identified as belonging to the block. It also means that if the block
  4259. isn't freed it will be detected by the leak checker.
  4260. - It marks the block as being addressable and undefined (if 'is_zeroed' is
  4261. not set), or addressable and defined (if 'is_zeroed' is set). This
  4262. controls how accesses to the block by the program are handled.
  4263. 'addr' is the start of the usable block (ie. after any
  4264. redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
  4265. can apply redzones -- these are blocks of padding at the start and end of
  4266. each block. Adding redzones is recommended as it makes it much more likely
  4267. Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
  4268. zeroed (or filled with another predictable value), as is the case for
  4269. calloc().
  4270. VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
  4271. heap block -- that will be used by the client program -- is allocated.
  4272. It's best to put it at the outermost level of the allocator if possible;
  4273. for example, if you have a function my_alloc() which calls
  4274. internal_alloc(), and the client request is put inside internal_alloc(),
  4275. stack traces relating to the heap block will contain entries for both
  4276. my_alloc() and internal_alloc(), which is probably not what you want.
  4277. For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
  4278. custom blocks from within a heap block, B, that has been allocated with
  4279. malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
  4280. -- the custom blocks will take precedence.
  4281. VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
  4282. Memcheck, it does two things:
  4283. - It records that the block has been deallocated. This assumes that the
  4284. block was annotated as having been allocated via
  4285. VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
  4286. - It marks the block as being unaddressable.
  4287. VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
  4288. heap block is deallocated.
  4289. In many cases, these two client requests will not be enough to get your
  4290. allocator working well with Memcheck. More specifically, if your allocator
  4291. writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
  4292. will be necessary to mark the memory as addressable just before the zeroing
  4293. occurs, otherwise you'll get a lot of invalid write errors. For example,
  4294. you'll need to do this if your allocator recycles freed blocks, but it
  4295. zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
  4296. Alternatively, if your allocator reuses freed blocks for allocator-internal
  4297. data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
  4298. Really, what's happening is a blurring of the lines between the client
  4299. program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
  4300. memory should be considered unaddressable to the client program, but the
  4301. allocator knows more than the rest of the client program and so may be able
  4302. to safely access it. Extra client requests are necessary for Valgrind to
  4303. understand the distinction between the allocator and the rest of the
  4304. program.
  4305. Note: there is currently no VALGRIND_REALLOCLIKE_BLOCK client request; it
  4306. has to be emulated with MALLOCLIKE/FREELIKE and memory copying.
  4307. Ignored if addr == 0.
  4308. */
  4309. #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
  4310. {unsigned int _qzz_res; \
  4311. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4312. VG_USERREQ__MALLOCLIKE_BLOCK, \
  4313. addr, sizeB, rzB, is_zeroed, 0); \
  4314. }
  4315. /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
  4316. Ignored if addr == 0.
  4317. */
  4318. #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
  4319. {unsigned int _qzz_res; \
  4320. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4321. VG_USERREQ__FREELIKE_BLOCK, \
  4322. addr, rzB, 0, 0, 0); \
  4323. }
  4324. /* Create a memory pool. */
  4325. #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
  4326. {unsigned int _qzz_res; \
  4327. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4328. VG_USERREQ__CREATE_MEMPOOL, \
  4329. pool, rzB, is_zeroed, 0, 0); \
  4330. }
  4331. /* Destroy a memory pool. */
  4332. #define VALGRIND_DESTROY_MEMPOOL(pool) \
  4333. {unsigned int _qzz_res; \
  4334. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4335. VG_USERREQ__DESTROY_MEMPOOL, \
  4336. pool, 0, 0, 0, 0); \
  4337. }
  4338. /* Associate a piece of memory with a memory pool. */
  4339. #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
  4340. {unsigned int _qzz_res; \
  4341. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4342. VG_USERREQ__MEMPOOL_ALLOC, \
  4343. pool, addr, size, 0, 0); \
  4344. }
  4345. /* Disassociate a piece of memory from a memory pool. */
  4346. #define VALGRIND_MEMPOOL_FREE(pool, addr) \
  4347. {unsigned int _qzz_res; \
  4348. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4349. VG_USERREQ__MEMPOOL_FREE, \
  4350. pool, addr, 0, 0, 0); \
  4351. }
  4352. /* Disassociate any pieces outside a particular range. */
  4353. #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
  4354. {unsigned int _qzz_res; \
  4355. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4356. VG_USERREQ__MEMPOOL_TRIM, \
  4357. pool, addr, size, 0, 0); \
  4358. }
  4359. /* Resize and/or move a piece associated with a memory pool. */
  4360. #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
  4361. {unsigned int _qzz_res; \
  4362. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4363. VG_USERREQ__MOVE_MEMPOOL, \
  4364. poolA, poolB, 0, 0, 0); \
  4365. }
  4366. /* Resize and/or move a piece associated with a memory pool. */
  4367. #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
  4368. {unsigned int _qzz_res; \
  4369. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4370. VG_USERREQ__MEMPOOL_CHANGE, \
  4371. pool, addrA, addrB, size, 0); \
  4372. }
  4373. /* Return 1 if a mempool exists, else 0. */
  4374. #define VALGRIND_MEMPOOL_EXISTS(pool) \
  4375. __extension__ \
  4376. ({unsigned int _qzz_res; \
  4377. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4378. VG_USERREQ__MEMPOOL_EXISTS, \
  4379. pool, 0, 0, 0, 0); \
  4380. _qzz_res; \
  4381. })
  4382. /* Mark a piece of memory as being a stack. Returns a stack id. */
  4383. #define VALGRIND_STACK_REGISTER(start, end) \
  4384. __extension__ \
  4385. ({unsigned int _qzz_res; \
  4386. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4387. VG_USERREQ__STACK_REGISTER, \
  4388. start, end, 0, 0, 0); \
  4389. _qzz_res; \
  4390. })
  4391. /* Unmark the piece of memory associated with a stack id as being a
  4392. stack. */
  4393. #define VALGRIND_STACK_DEREGISTER(id) \
  4394. {unsigned int _qzz_res; \
  4395. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4396. VG_USERREQ__STACK_DEREGISTER, \
  4397. id, 0, 0, 0, 0); \
  4398. }
  4399. /* Change the start and end address of the stack id. */
  4400. #define VALGRIND_STACK_CHANGE(id, start, end) \
  4401. {unsigned int _qzz_res; \
  4402. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4403. VG_USERREQ__STACK_CHANGE, \
  4404. id, start, end, 0, 0); \
  4405. }
  4406. /* Load PDB debug info for Wine PE image_map. */
  4407. #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
  4408. {unsigned int _qzz_res; \
  4409. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4410. VG_USERREQ__LOAD_PDB_DEBUGINFO, \
  4411. fd, ptr, total_size, delta, 0); \
  4412. }
  4413. /* Map a code address to a source file name and line number. buf64
  4414. must point to a 64-byte buffer in the caller's address space. The
  4415. result will be dumped in there and is guaranteed to be zero
  4416. terminated. If no info is found, the first byte is set to zero. */
  4417. #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
  4418. {unsigned int _qzz_res; \
  4419. VALGRIND_DO_CLIENT_REQUEST(_qzz_res, 0, \
  4420. VG_USERREQ__MAP_IP_TO_SRCLOC, \
  4421. addr, buf64, 0, 0, 0); \
  4422. }
  4423. #undef PLAT_x86_linux
  4424. #undef PLAT_amd64_linux
  4425. #undef PLAT_ppc32_linux
  4426. #undef PLAT_ppc64_linux
  4427. #undef PLAT_arm_linux
  4428. #undef PLAT_ppc32_aix5
  4429. #undef PLAT_ppc64_aix5
  4430. #endif /* __VALGRIND_H */