memoryobject.c 98 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409
  1. /*
  2. * Memoryview object implementation
  3. * --------------------------------
  4. *
  5. * This implementation is a complete rewrite contributed by Stefan Krah in
  6. * Python 3.3. Substantial credit goes to Antoine Pitrou (who had already
  7. * fortified and rewritten the previous implementation) and Nick Coghlan
  8. * (who came up with the idea of the ManagedBuffer) for analyzing the complex
  9. * ownership rules.
  10. *
  11. */
  12. #include "Python.h"
  13. #include "pycore_abstract.h" // _PyIndex_Check()
  14. #include "pycore_object.h" // _PyObject_GC_UNTRACK()
  15. #include "pycore_strhex.h" // _Py_strhex_with_sep()
  16. #include <stddef.h> // offsetof()
  17. /*[clinic input]
  18. class memoryview "PyMemoryViewObject *" "&PyMemoryView_Type"
  19. [clinic start generated code]*/
  20. /*[clinic end generated code: output=da39a3ee5e6b4b0d input=e2e49d2192835219]*/
  21. #include "clinic/memoryobject.c.h"
  22. /****************************************************************************/
  23. /* ManagedBuffer Object */
  24. /****************************************************************************/
  25. /*
  26. ManagedBuffer Object:
  27. ---------------------
  28. The purpose of this object is to facilitate the handling of chained
  29. memoryviews that have the same underlying exporting object. PEP-3118
  30. allows the underlying object to change while a view is exported. This
  31. could lead to unexpected results when constructing a new memoryview
  32. from an existing memoryview.
  33. Rather than repeatedly redirecting buffer requests to the original base
  34. object, all chained memoryviews use a single buffer snapshot. This
  35. snapshot is generated by the constructor _PyManagedBuffer_FromObject().
  36. Ownership rules:
  37. ----------------
  38. The master buffer inside a managed buffer is filled in by the original
  39. base object. shape, strides, suboffsets and format are read-only for
  40. all consumers.
  41. A memoryview's buffer is a private copy of the exporter's buffer. shape,
  42. strides and suboffsets belong to the memoryview and are thus writable.
  43. If a memoryview itself exports several buffers via memory_getbuf(), all
  44. buffer copies share shape, strides and suboffsets. In this case, the
  45. arrays are NOT writable.
  46. Reference count assumptions:
  47. ----------------------------
  48. The 'obj' member of a Py_buffer must either be NULL or refer to the
  49. exporting base object. In the Python codebase, all getbufferprocs
  50. return a new reference to view.obj (example: bytes_buffer_getbuffer()).
  51. PyBuffer_Release() decrements view.obj (if non-NULL), so the
  52. releasebufferprocs must NOT decrement view.obj.
  53. */
  54. static inline _PyManagedBufferObject *
  55. mbuf_alloc(void)
  56. {
  57. _PyManagedBufferObject *mbuf;
  58. mbuf = (_PyManagedBufferObject *)
  59. PyObject_GC_New(_PyManagedBufferObject, &_PyManagedBuffer_Type);
  60. if (mbuf == NULL)
  61. return NULL;
  62. mbuf->flags = 0;
  63. mbuf->exports = 0;
  64. mbuf->master.obj = NULL;
  65. _PyObject_GC_TRACK(mbuf);
  66. return mbuf;
  67. }
  68. static PyObject *
  69. _PyManagedBuffer_FromObject(PyObject *base, int flags)
  70. {
  71. _PyManagedBufferObject *mbuf;
  72. mbuf = mbuf_alloc();
  73. if (mbuf == NULL)
  74. return NULL;
  75. if (PyObject_GetBuffer(base, &mbuf->master, flags) < 0) {
  76. mbuf->master.obj = NULL;
  77. Py_DECREF(mbuf);
  78. return NULL;
  79. }
  80. return (PyObject *)mbuf;
  81. }
  82. static void
  83. mbuf_release(_PyManagedBufferObject *self)
  84. {
  85. if (self->flags&_Py_MANAGED_BUFFER_RELEASED)
  86. return;
  87. /* NOTE: at this point self->exports can still be > 0 if this function
  88. is called from mbuf_clear() to break up a reference cycle. */
  89. self->flags |= _Py_MANAGED_BUFFER_RELEASED;
  90. /* PyBuffer_Release() decrements master->obj and sets it to NULL. */
  91. _PyObject_GC_UNTRACK(self);
  92. PyBuffer_Release(&self->master);
  93. }
  94. static void
  95. mbuf_dealloc(_PyManagedBufferObject *self)
  96. {
  97. assert(self->exports == 0);
  98. mbuf_release(self);
  99. if (self->flags&_Py_MANAGED_BUFFER_FREE_FORMAT)
  100. PyMem_Free(self->master.format);
  101. PyObject_GC_Del(self);
  102. }
  103. static int
  104. mbuf_traverse(_PyManagedBufferObject *self, visitproc visit, void *arg)
  105. {
  106. Py_VISIT(self->master.obj);
  107. return 0;
  108. }
  109. static int
  110. mbuf_clear(_PyManagedBufferObject *self)
  111. {
  112. assert(self->exports >= 0);
  113. mbuf_release(self);
  114. return 0;
  115. }
  116. PyTypeObject _PyManagedBuffer_Type = {
  117. PyVarObject_HEAD_INIT(&PyType_Type, 0)
  118. "managedbuffer",
  119. sizeof(_PyManagedBufferObject),
  120. 0,
  121. (destructor)mbuf_dealloc, /* tp_dealloc */
  122. 0, /* tp_vectorcall_offset */
  123. 0, /* tp_getattr */
  124. 0, /* tp_setattr */
  125. 0, /* tp_as_async */
  126. 0, /* tp_repr */
  127. 0, /* tp_as_number */
  128. 0, /* tp_as_sequence */
  129. 0, /* tp_as_mapping */
  130. 0, /* tp_hash */
  131. 0, /* tp_call */
  132. 0, /* tp_str */
  133. PyObject_GenericGetAttr, /* tp_getattro */
  134. 0, /* tp_setattro */
  135. 0, /* tp_as_buffer */
  136. Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
  137. 0, /* tp_doc */
  138. (traverseproc)mbuf_traverse, /* tp_traverse */
  139. (inquiry)mbuf_clear /* tp_clear */
  140. };
  141. /****************************************************************************/
  142. /* MemoryView Object */
  143. /****************************************************************************/
  144. /* In the process of breaking reference cycles mbuf_release() can be
  145. called before memory_release(). */
  146. #define BASE_INACCESSIBLE(mv) \
  147. (((PyMemoryViewObject *)mv)->flags&_Py_MEMORYVIEW_RELEASED || \
  148. ((PyMemoryViewObject *)mv)->mbuf->flags&_Py_MANAGED_BUFFER_RELEASED)
  149. #define CHECK_RELEASED(mv) \
  150. if (BASE_INACCESSIBLE(mv)) { \
  151. PyErr_SetString(PyExc_ValueError, \
  152. "operation forbidden on released memoryview object"); \
  153. return NULL; \
  154. }
  155. #define CHECK_RELEASED_INT(mv) \
  156. if (BASE_INACCESSIBLE(mv)) { \
  157. PyErr_SetString(PyExc_ValueError, \
  158. "operation forbidden on released memoryview object"); \
  159. return -1; \
  160. }
  161. #define CHECK_RESTRICTED(mv) \
  162. if (((PyMemoryViewObject *)(mv))->flags & _Py_MEMORYVIEW_RESTRICTED) { \
  163. PyErr_SetString(PyExc_ValueError, \
  164. "cannot create new view on restricted memoryview"); \
  165. return NULL; \
  166. }
  167. #define CHECK_RESTRICTED_INT(mv) \
  168. if (((PyMemoryViewObject *)(mv))->flags & _Py_MEMORYVIEW_RESTRICTED) { \
  169. PyErr_SetString(PyExc_ValueError, \
  170. "cannot create new view on restricted memoryview"); \
  171. return -1; \
  172. }
  173. /* See gh-92888. These macros signal that we need to check the memoryview
  174. again due to possible read after frees. */
  175. #define CHECK_RELEASED_AGAIN(mv) CHECK_RELEASED(mv)
  176. #define CHECK_RELEASED_INT_AGAIN(mv) CHECK_RELEASED_INT(mv)
  177. #define CHECK_LIST_OR_TUPLE(v) \
  178. if (!PyList_Check(v) && !PyTuple_Check(v)) { \
  179. PyErr_SetString(PyExc_TypeError, \
  180. #v " must be a list or a tuple"); \
  181. return NULL; \
  182. }
  183. #define VIEW_ADDR(mv) (&((PyMemoryViewObject *)mv)->view)
  184. /* Check for the presence of suboffsets in the first dimension. */
  185. #define HAVE_PTR(suboffsets, dim) (suboffsets && suboffsets[dim] >= 0)
  186. /* Adjust ptr if suboffsets are present. */
  187. #define ADJUST_PTR(ptr, suboffsets, dim) \
  188. (HAVE_PTR(suboffsets, dim) ? *((char**)ptr) + suboffsets[dim] : ptr)
  189. /* Memoryview buffer properties */
  190. #define MV_C_CONTIGUOUS(flags) (flags&(_Py_MEMORYVIEW_SCALAR|_Py_MEMORYVIEW_C))
  191. #define MV_F_CONTIGUOUS(flags) \
  192. (flags&(_Py_MEMORYVIEW_SCALAR|_Py_MEMORYVIEW_FORTRAN))
  193. #define MV_ANY_CONTIGUOUS(flags) \
  194. (flags&(_Py_MEMORYVIEW_SCALAR|_Py_MEMORYVIEW_C|_Py_MEMORYVIEW_FORTRAN))
  195. /* Fast contiguity test. Caller must ensure suboffsets==NULL and ndim==1. */
  196. #define MV_CONTIGUOUS_NDIM1(view) \
  197. ((view)->shape[0] == 1 || (view)->strides[0] == (view)->itemsize)
  198. /* getbuffer() requests */
  199. #define REQ_INDIRECT(flags) ((flags&PyBUF_INDIRECT) == PyBUF_INDIRECT)
  200. #define REQ_C_CONTIGUOUS(flags) ((flags&PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS)
  201. #define REQ_F_CONTIGUOUS(flags) ((flags&PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS)
  202. #define REQ_ANY_CONTIGUOUS(flags) ((flags&PyBUF_ANY_CONTIGUOUS) == PyBUF_ANY_CONTIGUOUS)
  203. #define REQ_STRIDES(flags) ((flags&PyBUF_STRIDES) == PyBUF_STRIDES)
  204. #define REQ_SHAPE(flags) ((flags&PyBUF_ND) == PyBUF_ND)
  205. #define REQ_WRITABLE(flags) (flags&PyBUF_WRITABLE)
  206. #define REQ_FORMAT(flags) (flags&PyBUF_FORMAT)
  207. /**************************************************************************/
  208. /* Copy memoryview buffers */
  209. /**************************************************************************/
  210. /* The functions in this section take a source and a destination buffer
  211. with the same logical structure: format, itemsize, ndim and shape
  212. are identical, with ndim > 0.
  213. NOTE: All buffers are assumed to have PyBUF_FULL information, which
  214. is the case for memoryviews! */
  215. /* Assumptions: ndim >= 1. The macro tests for a corner case that should
  216. perhaps be explicitly forbidden in the PEP. */
  217. #define HAVE_SUBOFFSETS_IN_LAST_DIM(view) \
  218. (view->suboffsets && view->suboffsets[view->ndim-1] >= 0)
  219. static inline int
  220. last_dim_is_contiguous(const Py_buffer *dest, const Py_buffer *src)
  221. {
  222. assert(dest->ndim > 0 && src->ndim > 0);
  223. return (!HAVE_SUBOFFSETS_IN_LAST_DIM(dest) &&
  224. !HAVE_SUBOFFSETS_IN_LAST_DIM(src) &&
  225. dest->strides[dest->ndim-1] == dest->itemsize &&
  226. src->strides[src->ndim-1] == src->itemsize);
  227. }
  228. /* This is not a general function for determining format equivalence.
  229. It is used in copy_single() and copy_buffer() to weed out non-matching
  230. formats. Skipping the '@' character is specifically used in slice
  231. assignments, where the lvalue is already known to have a single character
  232. format. This is a performance hack that could be rewritten (if properly
  233. benchmarked). */
  234. static inline int
  235. equiv_format(const Py_buffer *dest, const Py_buffer *src)
  236. {
  237. const char *dfmt, *sfmt;
  238. assert(dest->format && src->format);
  239. dfmt = dest->format[0] == '@' ? dest->format+1 : dest->format;
  240. sfmt = src->format[0] == '@' ? src->format+1 : src->format;
  241. if (strcmp(dfmt, sfmt) != 0 ||
  242. dest->itemsize != src->itemsize) {
  243. return 0;
  244. }
  245. return 1;
  246. }
  247. /* Two shapes are equivalent if they are either equal or identical up
  248. to a zero element at the same position. For example, in NumPy arrays
  249. the shapes [1, 0, 5] and [1, 0, 7] are equivalent. */
  250. static inline int
  251. equiv_shape(const Py_buffer *dest, const Py_buffer *src)
  252. {
  253. int i;
  254. if (dest->ndim != src->ndim)
  255. return 0;
  256. for (i = 0; i < dest->ndim; i++) {
  257. if (dest->shape[i] != src->shape[i])
  258. return 0;
  259. if (dest->shape[i] == 0)
  260. break;
  261. }
  262. return 1;
  263. }
  264. /* Check that the logical structure of the destination and source buffers
  265. is identical. */
  266. static int
  267. equiv_structure(const Py_buffer *dest, const Py_buffer *src)
  268. {
  269. if (!equiv_format(dest, src) ||
  270. !equiv_shape(dest, src)) {
  271. PyErr_SetString(PyExc_ValueError,
  272. "memoryview assignment: lvalue and rvalue have different "
  273. "structures");
  274. return 0;
  275. }
  276. return 1;
  277. }
  278. /* Base case for recursive multi-dimensional copying. Contiguous arrays are
  279. copied with very little overhead. Assumptions: ndim == 1, mem == NULL or
  280. sizeof(mem) == shape[0] * itemsize. */
  281. static void
  282. copy_base(const Py_ssize_t *shape, Py_ssize_t itemsize,
  283. char *dptr, const Py_ssize_t *dstrides, const Py_ssize_t *dsuboffsets,
  284. char *sptr, const Py_ssize_t *sstrides, const Py_ssize_t *ssuboffsets,
  285. char *mem)
  286. {
  287. if (mem == NULL) { /* contiguous */
  288. Py_ssize_t size = shape[0] * itemsize;
  289. if (dptr + size < sptr || sptr + size < dptr)
  290. memcpy(dptr, sptr, size); /* no overlapping */
  291. else
  292. memmove(dptr, sptr, size);
  293. }
  294. else {
  295. char *p;
  296. Py_ssize_t i;
  297. for (i=0, p=mem; i < shape[0]; p+=itemsize, sptr+=sstrides[0], i++) {
  298. char *xsptr = ADJUST_PTR(sptr, ssuboffsets, 0);
  299. memcpy(p, xsptr, itemsize);
  300. }
  301. for (i=0, p=mem; i < shape[0]; p+=itemsize, dptr+=dstrides[0], i++) {
  302. char *xdptr = ADJUST_PTR(dptr, dsuboffsets, 0);
  303. memcpy(xdptr, p, itemsize);
  304. }
  305. }
  306. }
  307. /* Recursively copy a source buffer to a destination buffer. The two buffers
  308. have the same ndim, shape and itemsize. */
  309. static void
  310. copy_rec(const Py_ssize_t *shape, Py_ssize_t ndim, Py_ssize_t itemsize,
  311. char *dptr, const Py_ssize_t *dstrides, const Py_ssize_t *dsuboffsets,
  312. char *sptr, const Py_ssize_t *sstrides, const Py_ssize_t *ssuboffsets,
  313. char *mem)
  314. {
  315. Py_ssize_t i;
  316. assert(ndim >= 1);
  317. if (ndim == 1) {
  318. copy_base(shape, itemsize,
  319. dptr, dstrides, dsuboffsets,
  320. sptr, sstrides, ssuboffsets,
  321. mem);
  322. return;
  323. }
  324. for (i = 0; i < shape[0]; dptr+=dstrides[0], sptr+=sstrides[0], i++) {
  325. char *xdptr = ADJUST_PTR(dptr, dsuboffsets, 0);
  326. char *xsptr = ADJUST_PTR(sptr, ssuboffsets, 0);
  327. copy_rec(shape+1, ndim-1, itemsize,
  328. xdptr, dstrides+1, dsuboffsets ? dsuboffsets+1 : NULL,
  329. xsptr, sstrides+1, ssuboffsets ? ssuboffsets+1 : NULL,
  330. mem);
  331. }
  332. }
  333. /* Faster copying of one-dimensional arrays. */
  334. static int
  335. copy_single(PyMemoryViewObject *self, const Py_buffer *dest, const Py_buffer *src)
  336. {
  337. CHECK_RELEASED_INT_AGAIN(self);
  338. char *mem = NULL;
  339. assert(dest->ndim == 1);
  340. if (!equiv_structure(dest, src))
  341. return -1;
  342. if (!last_dim_is_contiguous(dest, src)) {
  343. mem = PyMem_Malloc(dest->shape[0] * dest->itemsize);
  344. if (mem == NULL) {
  345. PyErr_NoMemory();
  346. return -1;
  347. }
  348. }
  349. copy_base(dest->shape, dest->itemsize,
  350. dest->buf, dest->strides, dest->suboffsets,
  351. src->buf, src->strides, src->suboffsets,
  352. mem);
  353. if (mem)
  354. PyMem_Free(mem);
  355. return 0;
  356. }
  357. /* Recursively copy src to dest. Both buffers must have the same basic
  358. structure. Copying is atomic, the function never fails with a partial
  359. copy. */
  360. static int
  361. copy_buffer(const Py_buffer *dest, const Py_buffer *src)
  362. {
  363. char *mem = NULL;
  364. assert(dest->ndim > 0);
  365. if (!equiv_structure(dest, src))
  366. return -1;
  367. if (!last_dim_is_contiguous(dest, src)) {
  368. mem = PyMem_Malloc(dest->shape[dest->ndim-1] * dest->itemsize);
  369. if (mem == NULL) {
  370. PyErr_NoMemory();
  371. return -1;
  372. }
  373. }
  374. copy_rec(dest->shape, dest->ndim, dest->itemsize,
  375. dest->buf, dest->strides, dest->suboffsets,
  376. src->buf, src->strides, src->suboffsets,
  377. mem);
  378. if (mem)
  379. PyMem_Free(mem);
  380. return 0;
  381. }
  382. /* Initialize strides for a C-contiguous array. */
  383. static inline void
  384. init_strides_from_shape(Py_buffer *view)
  385. {
  386. Py_ssize_t i;
  387. assert(view->ndim > 0);
  388. view->strides[view->ndim-1] = view->itemsize;
  389. for (i = view->ndim-2; i >= 0; i--)
  390. view->strides[i] = view->strides[i+1] * view->shape[i+1];
  391. }
  392. /* Initialize strides for a Fortran-contiguous array. */
  393. static inline void
  394. init_fortran_strides_from_shape(Py_buffer *view)
  395. {
  396. Py_ssize_t i;
  397. assert(view->ndim > 0);
  398. view->strides[0] = view->itemsize;
  399. for (i = 1; i < view->ndim; i++)
  400. view->strides[i] = view->strides[i-1] * view->shape[i-1];
  401. }
  402. /* Copy src to a contiguous representation. order is one of 'C', 'F' (Fortran)
  403. or 'A' (Any). Assumptions: src has PyBUF_FULL information, src->ndim >= 1,
  404. len(mem) == src->len. */
  405. static int
  406. buffer_to_contiguous(char *mem, const Py_buffer *src, char order)
  407. {
  408. Py_buffer dest;
  409. Py_ssize_t *strides;
  410. int ret;
  411. assert(src->ndim >= 1);
  412. assert(src->shape != NULL);
  413. assert(src->strides != NULL);
  414. strides = PyMem_Malloc(src->ndim * (sizeof *src->strides));
  415. if (strides == NULL) {
  416. PyErr_NoMemory();
  417. return -1;
  418. }
  419. /* initialize dest */
  420. dest = *src;
  421. dest.buf = mem;
  422. /* shape is constant and shared: the logical representation of the
  423. array is unaltered. */
  424. /* The physical representation determined by strides (and possibly
  425. suboffsets) may change. */
  426. dest.strides = strides;
  427. if (order == 'C' || order == 'A') {
  428. init_strides_from_shape(&dest);
  429. }
  430. else {
  431. init_fortran_strides_from_shape(&dest);
  432. }
  433. dest.suboffsets = NULL;
  434. ret = copy_buffer(&dest, src);
  435. PyMem_Free(strides);
  436. return ret;
  437. }
  438. /****************************************************************************/
  439. /* Constructors */
  440. /****************************************************************************/
  441. /* Initialize values that are shared with the managed buffer. */
  442. static inline void
  443. init_shared_values(Py_buffer *dest, const Py_buffer *src)
  444. {
  445. dest->obj = src->obj;
  446. dest->buf = src->buf;
  447. dest->len = src->len;
  448. dest->itemsize = src->itemsize;
  449. dest->readonly = src->readonly;
  450. dest->format = src->format ? src->format : "B";
  451. dest->internal = src->internal;
  452. }
  453. /* Copy shape and strides. Reconstruct missing values. */
  454. static void
  455. init_shape_strides(Py_buffer *dest, const Py_buffer *src)
  456. {
  457. Py_ssize_t i;
  458. if (src->ndim == 0) {
  459. dest->shape = NULL;
  460. dest->strides = NULL;
  461. return;
  462. }
  463. if (src->ndim == 1) {
  464. dest->shape[0] = src->shape ? src->shape[0] : src->len / src->itemsize;
  465. dest->strides[0] = src->strides ? src->strides[0] : src->itemsize;
  466. return;
  467. }
  468. for (i = 0; i < src->ndim; i++)
  469. dest->shape[i] = src->shape[i];
  470. if (src->strides) {
  471. for (i = 0; i < src->ndim; i++)
  472. dest->strides[i] = src->strides[i];
  473. }
  474. else {
  475. init_strides_from_shape(dest);
  476. }
  477. }
  478. static inline void
  479. init_suboffsets(Py_buffer *dest, const Py_buffer *src)
  480. {
  481. Py_ssize_t i;
  482. if (src->suboffsets == NULL) {
  483. dest->suboffsets = NULL;
  484. return;
  485. }
  486. for (i = 0; i < src->ndim; i++)
  487. dest->suboffsets[i] = src->suboffsets[i];
  488. }
  489. /* len = product(shape) * itemsize */
  490. static inline void
  491. init_len(Py_buffer *view)
  492. {
  493. Py_ssize_t i, len;
  494. len = 1;
  495. for (i = 0; i < view->ndim; i++)
  496. len *= view->shape[i];
  497. len *= view->itemsize;
  498. view->len = len;
  499. }
  500. /* Initialize memoryview buffer properties. */
  501. static void
  502. init_flags(PyMemoryViewObject *mv)
  503. {
  504. const Py_buffer *view = &mv->view;
  505. int flags = 0;
  506. switch (view->ndim) {
  507. case 0:
  508. flags |= (_Py_MEMORYVIEW_SCALAR|_Py_MEMORYVIEW_C|
  509. _Py_MEMORYVIEW_FORTRAN);
  510. break;
  511. case 1:
  512. if (MV_CONTIGUOUS_NDIM1(view))
  513. flags |= (_Py_MEMORYVIEW_C|_Py_MEMORYVIEW_FORTRAN);
  514. break;
  515. default:
  516. if (PyBuffer_IsContiguous(view, 'C'))
  517. flags |= _Py_MEMORYVIEW_C;
  518. if (PyBuffer_IsContiguous(view, 'F'))
  519. flags |= _Py_MEMORYVIEW_FORTRAN;
  520. break;
  521. }
  522. if (view->suboffsets) {
  523. flags |= _Py_MEMORYVIEW_PIL;
  524. flags &= ~(_Py_MEMORYVIEW_C|_Py_MEMORYVIEW_FORTRAN);
  525. }
  526. mv->flags = flags;
  527. }
  528. /* Allocate a new memoryview and perform basic initialization. New memoryviews
  529. are exclusively created through the mbuf_add functions. */
  530. static inline PyMemoryViewObject *
  531. memory_alloc(int ndim)
  532. {
  533. PyMemoryViewObject *mv;
  534. mv = (PyMemoryViewObject *)
  535. PyObject_GC_NewVar(PyMemoryViewObject, &PyMemoryView_Type, 3*ndim);
  536. if (mv == NULL)
  537. return NULL;
  538. mv->mbuf = NULL;
  539. mv->hash = -1;
  540. mv->flags = 0;
  541. mv->exports = 0;
  542. mv->view.ndim = ndim;
  543. mv->view.shape = mv->ob_array;
  544. mv->view.strides = mv->ob_array + ndim;
  545. mv->view.suboffsets = mv->ob_array + 2 * ndim;
  546. mv->weakreflist = NULL;
  547. _PyObject_GC_TRACK(mv);
  548. return mv;
  549. }
  550. /*
  551. Return a new memoryview that is registered with mbuf. If src is NULL,
  552. use mbuf->master as the underlying buffer. Otherwise, use src.
  553. The new memoryview has full buffer information: shape and strides
  554. are always present, suboffsets as needed. Arrays are copied to
  555. the memoryview's ob_array field.
  556. */
  557. static PyObject *
  558. mbuf_add_view(_PyManagedBufferObject *mbuf, const Py_buffer *src)
  559. {
  560. PyMemoryViewObject *mv;
  561. Py_buffer *dest;
  562. if (src == NULL)
  563. src = &mbuf->master;
  564. if (src->ndim > PyBUF_MAX_NDIM) {
  565. PyErr_SetString(PyExc_ValueError,
  566. "memoryview: number of dimensions must not exceed "
  567. Py_STRINGIFY(PyBUF_MAX_NDIM));
  568. return NULL;
  569. }
  570. mv = memory_alloc(src->ndim);
  571. if (mv == NULL)
  572. return NULL;
  573. dest = &mv->view;
  574. init_shared_values(dest, src);
  575. init_shape_strides(dest, src);
  576. init_suboffsets(dest, src);
  577. init_flags(mv);
  578. mv->mbuf = (_PyManagedBufferObject*)Py_NewRef(mbuf);
  579. mbuf->exports++;
  580. return (PyObject *)mv;
  581. }
  582. /* Register an incomplete view: shape, strides, suboffsets and flags still
  583. need to be initialized. Use 'ndim' instead of src->ndim to determine the
  584. size of the memoryview's ob_array.
  585. Assumption: ndim <= PyBUF_MAX_NDIM. */
  586. static PyObject *
  587. mbuf_add_incomplete_view(_PyManagedBufferObject *mbuf, const Py_buffer *src,
  588. int ndim)
  589. {
  590. PyMemoryViewObject *mv;
  591. Py_buffer *dest;
  592. if (src == NULL)
  593. src = &mbuf->master;
  594. assert(ndim <= PyBUF_MAX_NDIM);
  595. mv = memory_alloc(ndim);
  596. if (mv == NULL)
  597. return NULL;
  598. dest = &mv->view;
  599. init_shared_values(dest, src);
  600. mv->mbuf = (_PyManagedBufferObject*)Py_NewRef(mbuf);
  601. mbuf->exports++;
  602. return (PyObject *)mv;
  603. }
  604. /* Expose a raw memory area as a view of contiguous bytes. flags can be
  605. PyBUF_READ or PyBUF_WRITE. view->format is set to "B" (unsigned bytes).
  606. The memoryview has complete buffer information. */
  607. PyObject *
  608. PyMemoryView_FromMemory(char *mem, Py_ssize_t size, int flags)
  609. {
  610. _PyManagedBufferObject *mbuf;
  611. PyObject *mv;
  612. int readonly;
  613. assert(mem != NULL);
  614. assert(flags == PyBUF_READ || flags == PyBUF_WRITE);
  615. mbuf = mbuf_alloc();
  616. if (mbuf == NULL)
  617. return NULL;
  618. readonly = (flags == PyBUF_WRITE) ? 0 : 1;
  619. (void)PyBuffer_FillInfo(&mbuf->master, NULL, mem, size, readonly,
  620. PyBUF_FULL_RO);
  621. mv = mbuf_add_view(mbuf, NULL);
  622. Py_DECREF(mbuf);
  623. return mv;
  624. }
  625. /* Create a memoryview from a given Py_buffer. For simple byte views,
  626. PyMemoryView_FromMemory() should be used instead.
  627. This function is the only entry point that can create a master buffer
  628. without full information. Because of this fact init_shape_strides()
  629. must be able to reconstruct missing values. */
  630. PyObject *
  631. PyMemoryView_FromBuffer(const Py_buffer *info)
  632. {
  633. _PyManagedBufferObject *mbuf;
  634. PyObject *mv;
  635. if (info->buf == NULL) {
  636. PyErr_SetString(PyExc_ValueError,
  637. "PyMemoryView_FromBuffer(): info->buf must not be NULL");
  638. return NULL;
  639. }
  640. mbuf = mbuf_alloc();
  641. if (mbuf == NULL)
  642. return NULL;
  643. /* info->obj is either NULL or a borrowed reference. This reference
  644. should not be decremented in PyBuffer_Release(). */
  645. mbuf->master = *info;
  646. mbuf->master.obj = NULL;
  647. mv = mbuf_add_view(mbuf, NULL);
  648. Py_DECREF(mbuf);
  649. return mv;
  650. }
  651. /* Create a memoryview from an object that implements the buffer protocol,
  652. using the given flags.
  653. If the object is a memoryview, the new memoryview must be registered
  654. with the same managed buffer. Otherwise, a new managed buffer is created. */
  655. static PyObject *
  656. PyMemoryView_FromObjectAndFlags(PyObject *v, int flags)
  657. {
  658. _PyManagedBufferObject *mbuf;
  659. if (PyMemoryView_Check(v)) {
  660. PyMemoryViewObject *mv = (PyMemoryViewObject *)v;
  661. CHECK_RELEASED(mv);
  662. CHECK_RESTRICTED(mv);
  663. return mbuf_add_view(mv->mbuf, &mv->view);
  664. }
  665. else if (PyObject_CheckBuffer(v)) {
  666. PyObject *ret;
  667. mbuf = (_PyManagedBufferObject *)_PyManagedBuffer_FromObject(v, flags);
  668. if (mbuf == NULL)
  669. return NULL;
  670. ret = mbuf_add_view(mbuf, NULL);
  671. Py_DECREF(mbuf);
  672. return ret;
  673. }
  674. PyErr_Format(PyExc_TypeError,
  675. "memoryview: a bytes-like object is required, not '%.200s'",
  676. Py_TYPE(v)->tp_name);
  677. return NULL;
  678. }
  679. /* Create a memoryview from an object that implements the buffer protocol,
  680. using the given flags.
  681. If the object is a memoryview, the new memoryview must be registered
  682. with the same managed buffer. Otherwise, a new managed buffer is created. */
  683. PyObject *
  684. _PyMemoryView_FromBufferProc(PyObject *v, int flags, getbufferproc bufferproc)
  685. {
  686. _PyManagedBufferObject *mbuf = mbuf_alloc();
  687. if (mbuf == NULL)
  688. return NULL;
  689. int res = bufferproc(v, &mbuf->master, flags);
  690. if (res < 0) {
  691. mbuf->master.obj = NULL;
  692. Py_DECREF(mbuf);
  693. return NULL;
  694. }
  695. PyObject *ret = mbuf_add_view(mbuf, NULL);
  696. Py_DECREF(mbuf);
  697. return ret;
  698. }
  699. /* Create a memoryview from an object that implements the buffer protocol.
  700. If the object is a memoryview, the new memoryview must be registered
  701. with the same managed buffer. Otherwise, a new managed buffer is created. */
  702. PyObject *
  703. PyMemoryView_FromObject(PyObject *v)
  704. {
  705. return PyMemoryView_FromObjectAndFlags(v, PyBUF_FULL_RO);
  706. }
  707. /* Copy the format string from a base object that might vanish. */
  708. static int
  709. mbuf_copy_format(_PyManagedBufferObject *mbuf, const char *fmt)
  710. {
  711. if (fmt != NULL) {
  712. char *cp = PyMem_Malloc(strlen(fmt)+1);
  713. if (cp == NULL) {
  714. PyErr_NoMemory();
  715. return -1;
  716. }
  717. mbuf->master.format = strcpy(cp, fmt);
  718. mbuf->flags |= _Py_MANAGED_BUFFER_FREE_FORMAT;
  719. }
  720. return 0;
  721. }
  722. /*
  723. Return a memoryview that is based on a contiguous copy of src.
  724. Assumptions: src has PyBUF_FULL_RO information, src->ndim > 0.
  725. Ownership rules:
  726. 1) As usual, the returned memoryview has a private copy
  727. of src->shape, src->strides and src->suboffsets.
  728. 2) src->format is copied to the master buffer and released
  729. in mbuf_dealloc(). The releasebufferproc of the bytes
  730. object is NULL, so it does not matter that mbuf_release()
  731. passes the altered format pointer to PyBuffer_Release().
  732. */
  733. static PyObject *
  734. memory_from_contiguous_copy(const Py_buffer *src, char order)
  735. {
  736. _PyManagedBufferObject *mbuf;
  737. PyMemoryViewObject *mv;
  738. PyObject *bytes;
  739. Py_buffer *dest;
  740. int i;
  741. assert(src->ndim > 0);
  742. assert(src->shape != NULL);
  743. bytes = PyBytes_FromStringAndSize(NULL, src->len);
  744. if (bytes == NULL)
  745. return NULL;
  746. mbuf = (_PyManagedBufferObject *)_PyManagedBuffer_FromObject(bytes, PyBUF_FULL_RO);
  747. Py_DECREF(bytes);
  748. if (mbuf == NULL)
  749. return NULL;
  750. if (mbuf_copy_format(mbuf, src->format) < 0) {
  751. Py_DECREF(mbuf);
  752. return NULL;
  753. }
  754. mv = (PyMemoryViewObject *)mbuf_add_incomplete_view(mbuf, NULL, src->ndim);
  755. Py_DECREF(mbuf);
  756. if (mv == NULL)
  757. return NULL;
  758. dest = &mv->view;
  759. /* shared values are initialized correctly except for itemsize */
  760. dest->itemsize = src->itemsize;
  761. /* shape and strides */
  762. for (i = 0; i < src->ndim; i++) {
  763. dest->shape[i] = src->shape[i];
  764. }
  765. if (order == 'C' || order == 'A') {
  766. init_strides_from_shape(dest);
  767. }
  768. else {
  769. init_fortran_strides_from_shape(dest);
  770. }
  771. /* suboffsets */
  772. dest->suboffsets = NULL;
  773. /* flags */
  774. init_flags(mv);
  775. if (copy_buffer(dest, src) < 0) {
  776. Py_DECREF(mv);
  777. return NULL;
  778. }
  779. return (PyObject *)mv;
  780. }
  781. /*
  782. Return a new memoryview object based on a contiguous exporter with
  783. buffertype={PyBUF_READ, PyBUF_WRITE} and order={'C', 'F'ortran, or 'A'ny}.
  784. The logical structure of the input and output buffers is the same
  785. (i.e. tolist(input) == tolist(output)), but the physical layout in
  786. memory can be explicitly chosen.
  787. As usual, if buffertype=PyBUF_WRITE, the exporter's buffer must be writable,
  788. otherwise it may be writable or read-only.
  789. If the exporter is already contiguous with the desired target order,
  790. the memoryview will be directly based on the exporter.
  791. Otherwise, if the buffertype is PyBUF_READ, the memoryview will be
  792. based on a new bytes object. If order={'C', 'A'ny}, use 'C' order,
  793. 'F'ortran order otherwise.
  794. */
  795. PyObject *
  796. PyMemoryView_GetContiguous(PyObject *obj, int buffertype, char order)
  797. {
  798. PyMemoryViewObject *mv;
  799. PyObject *ret;
  800. Py_buffer *view;
  801. assert(buffertype == PyBUF_READ || buffertype == PyBUF_WRITE);
  802. assert(order == 'C' || order == 'F' || order == 'A');
  803. mv = (PyMemoryViewObject *)PyMemoryView_FromObject(obj);
  804. if (mv == NULL)
  805. return NULL;
  806. view = &mv->view;
  807. if (buffertype == PyBUF_WRITE && view->readonly) {
  808. PyErr_SetString(PyExc_BufferError,
  809. "underlying buffer is not writable");
  810. Py_DECREF(mv);
  811. return NULL;
  812. }
  813. if (PyBuffer_IsContiguous(view, order))
  814. return (PyObject *)mv;
  815. if (buffertype == PyBUF_WRITE) {
  816. PyErr_SetString(PyExc_BufferError,
  817. "writable contiguous buffer requested "
  818. "for a non-contiguous object.");
  819. Py_DECREF(mv);
  820. return NULL;
  821. }
  822. ret = memory_from_contiguous_copy(view, order);
  823. Py_DECREF(mv);
  824. return ret;
  825. }
  826. /*[clinic input]
  827. @classmethod
  828. memoryview.__new__
  829. object: object
  830. Create a new memoryview object which references the given object.
  831. [clinic start generated code]*/
  832. static PyObject *
  833. memoryview_impl(PyTypeObject *type, PyObject *object)
  834. /*[clinic end generated code: output=7de78e184ed66db8 input=f04429eb0bdf8c6e]*/
  835. {
  836. return PyMemoryView_FromObject(object);
  837. }
  838. /*[clinic input]
  839. @classmethod
  840. memoryview._from_flags
  841. object: object
  842. flags: int
  843. Create a new memoryview object which references the given object.
  844. [clinic start generated code]*/
  845. static PyObject *
  846. memoryview__from_flags_impl(PyTypeObject *type, PyObject *object, int flags)
  847. /*[clinic end generated code: output=bf71f9906c266ee2 input=f5f82fd0e744356b]*/
  848. {
  849. return PyMemoryView_FromObjectAndFlags(object, flags);
  850. }
  851. /****************************************************************************/
  852. /* Previously in abstract.c */
  853. /****************************************************************************/
  854. typedef struct {
  855. Py_buffer view;
  856. Py_ssize_t array[1];
  857. } Py_buffer_full;
  858. int
  859. PyBuffer_ToContiguous(void *buf, const Py_buffer *src, Py_ssize_t len, char order)
  860. {
  861. Py_buffer_full *fb = NULL;
  862. int ret;
  863. assert(order == 'C' || order == 'F' || order == 'A');
  864. if (len != src->len) {
  865. PyErr_SetString(PyExc_ValueError,
  866. "PyBuffer_ToContiguous: len != view->len");
  867. return -1;
  868. }
  869. if (PyBuffer_IsContiguous(src, order)) {
  870. memcpy((char *)buf, src->buf, len);
  871. return 0;
  872. }
  873. /* buffer_to_contiguous() assumes PyBUF_FULL */
  874. fb = PyMem_Malloc(sizeof *fb + 3 * src->ndim * (sizeof *fb->array));
  875. if (fb == NULL) {
  876. PyErr_NoMemory();
  877. return -1;
  878. }
  879. fb->view.ndim = src->ndim;
  880. fb->view.shape = fb->array;
  881. fb->view.strides = fb->array + src->ndim;
  882. fb->view.suboffsets = fb->array + 2 * src->ndim;
  883. init_shared_values(&fb->view, src);
  884. init_shape_strides(&fb->view, src);
  885. init_suboffsets(&fb->view, src);
  886. src = &fb->view;
  887. ret = buffer_to_contiguous(buf, src, order);
  888. PyMem_Free(fb);
  889. return ret;
  890. }
  891. /****************************************************************************/
  892. /* Release/GC management */
  893. /****************************************************************************/
  894. /* Inform the managed buffer that this particular memoryview will not access
  895. the underlying buffer again. If no other memoryviews are registered with
  896. the managed buffer, the underlying buffer is released instantly and
  897. marked as inaccessible for both the memoryview and the managed buffer.
  898. This function fails if the memoryview itself has exported buffers. */
  899. static int
  900. _memory_release(PyMemoryViewObject *self)
  901. {
  902. if (self->flags & _Py_MEMORYVIEW_RELEASED)
  903. return 0;
  904. if (self->exports == 0) {
  905. self->flags |= _Py_MEMORYVIEW_RELEASED;
  906. assert(self->mbuf->exports > 0);
  907. if (--self->mbuf->exports == 0)
  908. mbuf_release(self->mbuf);
  909. return 0;
  910. }
  911. if (self->exports > 0) {
  912. PyErr_Format(PyExc_BufferError,
  913. "memoryview has %zd exported buffer%s", self->exports,
  914. self->exports==1 ? "" : "s");
  915. return -1;
  916. }
  917. PyErr_SetString(PyExc_SystemError,
  918. "_memory_release(): negative export count");
  919. return -1;
  920. }
  921. /*[clinic input]
  922. memoryview.release
  923. Release the underlying buffer exposed by the memoryview object.
  924. [clinic start generated code]*/
  925. static PyObject *
  926. memoryview_release_impl(PyMemoryViewObject *self)
  927. /*[clinic end generated code: output=d0b7e3ba95b7fcb9 input=bc71d1d51f4a52f0]*/
  928. {
  929. if (_memory_release(self) < 0)
  930. return NULL;
  931. Py_RETURN_NONE;
  932. }
  933. static void
  934. memory_dealloc(PyMemoryViewObject *self)
  935. {
  936. assert(self->exports == 0);
  937. _PyObject_GC_UNTRACK(self);
  938. (void)_memory_release(self);
  939. Py_CLEAR(self->mbuf);
  940. if (self->weakreflist != NULL)
  941. PyObject_ClearWeakRefs((PyObject *) self);
  942. PyObject_GC_Del(self);
  943. }
  944. static int
  945. memory_traverse(PyMemoryViewObject *self, visitproc visit, void *arg)
  946. {
  947. Py_VISIT(self->mbuf);
  948. return 0;
  949. }
  950. static int
  951. memory_clear(PyMemoryViewObject *self)
  952. {
  953. (void)_memory_release(self);
  954. Py_CLEAR(self->mbuf);
  955. return 0;
  956. }
  957. static PyObject *
  958. memory_enter(PyObject *self, PyObject *args)
  959. {
  960. CHECK_RELEASED(self);
  961. return Py_NewRef(self);
  962. }
  963. static PyObject *
  964. memory_exit(PyObject *self, PyObject *args)
  965. {
  966. return memoryview_release_impl((PyMemoryViewObject *)self);
  967. }
  968. /****************************************************************************/
  969. /* Casting format and shape */
  970. /****************************************************************************/
  971. #define IS_BYTE_FORMAT(f) (f == 'b' || f == 'B' || f == 'c')
  972. static inline Py_ssize_t
  973. get_native_fmtchar(char *result, const char *fmt)
  974. {
  975. Py_ssize_t size = -1;
  976. if (fmt[0] == '@') fmt++;
  977. switch (fmt[0]) {
  978. case 'c': case 'b': case 'B': size = sizeof(char); break;
  979. case 'h': case 'H': size = sizeof(short); break;
  980. case 'i': case 'I': size = sizeof(int); break;
  981. case 'l': case 'L': size = sizeof(long); break;
  982. case 'q': case 'Q': size = sizeof(long long); break;
  983. case 'n': case 'N': size = sizeof(Py_ssize_t); break;
  984. case 'f': size = sizeof(float); break;
  985. case 'd': size = sizeof(double); break;
  986. case 'e': size = sizeof(float) / 2; break;
  987. case '?': size = sizeof(_Bool); break;
  988. case 'P': size = sizeof(void *); break;
  989. }
  990. if (size > 0 && fmt[1] == '\0') {
  991. *result = fmt[0];
  992. return size;
  993. }
  994. return -1;
  995. }
  996. static inline const char *
  997. get_native_fmtstr(const char *fmt)
  998. {
  999. int at = 0;
  1000. if (fmt[0] == '@') {
  1001. at = 1;
  1002. fmt++;
  1003. }
  1004. if (fmt[0] == '\0' || fmt[1] != '\0') {
  1005. return NULL;
  1006. }
  1007. #define RETURN(s) do { return at ? "@" s : s; } while (0)
  1008. switch (fmt[0]) {
  1009. case 'c': RETURN("c");
  1010. case 'b': RETURN("b");
  1011. case 'B': RETURN("B");
  1012. case 'h': RETURN("h");
  1013. case 'H': RETURN("H");
  1014. case 'i': RETURN("i");
  1015. case 'I': RETURN("I");
  1016. case 'l': RETURN("l");
  1017. case 'L': RETURN("L");
  1018. case 'q': RETURN("q");
  1019. case 'Q': RETURN("Q");
  1020. case 'n': RETURN("n");
  1021. case 'N': RETURN("N");
  1022. case 'f': RETURN("f");
  1023. case 'd': RETURN("d");
  1024. case 'e': RETURN("e");
  1025. case '?': RETURN("?");
  1026. case 'P': RETURN("P");
  1027. }
  1028. return NULL;
  1029. }
  1030. /* Cast a memoryview's data type to 'format'. The input array must be
  1031. C-contiguous. At least one of input-format, output-format must have
  1032. byte size. The output array is 1-D, with the same byte length as the
  1033. input array. Thus, view->len must be a multiple of the new itemsize. */
  1034. static int
  1035. cast_to_1D(PyMemoryViewObject *mv, PyObject *format)
  1036. {
  1037. Py_buffer *view = &mv->view;
  1038. PyObject *asciifmt;
  1039. char srcchar, destchar;
  1040. Py_ssize_t itemsize;
  1041. int ret = -1;
  1042. assert(view->ndim >= 1);
  1043. assert(Py_SIZE(mv) == 3*view->ndim);
  1044. assert(view->shape == mv->ob_array);
  1045. assert(view->strides == mv->ob_array + view->ndim);
  1046. assert(view->suboffsets == mv->ob_array + 2*view->ndim);
  1047. asciifmt = PyUnicode_AsASCIIString(format);
  1048. if (asciifmt == NULL)
  1049. return ret;
  1050. itemsize = get_native_fmtchar(&destchar, PyBytes_AS_STRING(asciifmt));
  1051. if (itemsize < 0) {
  1052. PyErr_SetString(PyExc_ValueError,
  1053. "memoryview: destination format must be a native single "
  1054. "character format prefixed with an optional '@'");
  1055. goto out;
  1056. }
  1057. if ((get_native_fmtchar(&srcchar, view->format) < 0 ||
  1058. !IS_BYTE_FORMAT(srcchar)) && !IS_BYTE_FORMAT(destchar)) {
  1059. PyErr_SetString(PyExc_TypeError,
  1060. "memoryview: cannot cast between two non-byte formats");
  1061. goto out;
  1062. }
  1063. if (view->len % itemsize) {
  1064. PyErr_SetString(PyExc_TypeError,
  1065. "memoryview: length is not a multiple of itemsize");
  1066. goto out;
  1067. }
  1068. view->format = (char *)get_native_fmtstr(PyBytes_AS_STRING(asciifmt));
  1069. if (view->format == NULL) {
  1070. /* NOT_REACHED: get_native_fmtchar() already validates the format. */
  1071. PyErr_SetString(PyExc_RuntimeError,
  1072. "memoryview: internal error");
  1073. goto out;
  1074. }
  1075. view->itemsize = itemsize;
  1076. view->ndim = 1;
  1077. view->shape[0] = view->len / view->itemsize;
  1078. view->strides[0] = view->itemsize;
  1079. view->suboffsets = NULL;
  1080. init_flags(mv);
  1081. ret = 0;
  1082. out:
  1083. Py_DECREF(asciifmt);
  1084. return ret;
  1085. }
  1086. /* The memoryview must have space for 3*len(seq) elements. */
  1087. static Py_ssize_t
  1088. copy_shape(Py_ssize_t *shape, const PyObject *seq, Py_ssize_t ndim,
  1089. Py_ssize_t itemsize)
  1090. {
  1091. Py_ssize_t x, i;
  1092. Py_ssize_t len = itemsize;
  1093. for (i = 0; i < ndim; i++) {
  1094. PyObject *tmp = PySequence_Fast_GET_ITEM(seq, i);
  1095. if (!PyLong_Check(tmp)) {
  1096. PyErr_SetString(PyExc_TypeError,
  1097. "memoryview.cast(): elements of shape must be integers");
  1098. return -1;
  1099. }
  1100. x = PyLong_AsSsize_t(tmp);
  1101. if (x == -1 && PyErr_Occurred()) {
  1102. return -1;
  1103. }
  1104. if (x <= 0) {
  1105. /* In general elements of shape may be 0, but not for casting. */
  1106. PyErr_Format(PyExc_ValueError,
  1107. "memoryview.cast(): elements of shape must be integers > 0");
  1108. return -1;
  1109. }
  1110. if (x > PY_SSIZE_T_MAX / len) {
  1111. PyErr_Format(PyExc_ValueError,
  1112. "memoryview.cast(): product(shape) > SSIZE_MAX");
  1113. return -1;
  1114. }
  1115. len *= x;
  1116. shape[i] = x;
  1117. }
  1118. return len;
  1119. }
  1120. /* Cast a 1-D array to a new shape. The result array will be C-contiguous.
  1121. If the result array does not have exactly the same byte length as the
  1122. input array, raise ValueError. */
  1123. static int
  1124. cast_to_ND(PyMemoryViewObject *mv, const PyObject *shape, int ndim)
  1125. {
  1126. Py_buffer *view = &mv->view;
  1127. Py_ssize_t len;
  1128. assert(view->ndim == 1); /* ndim from cast_to_1D() */
  1129. assert(Py_SIZE(mv) == 3*(ndim==0?1:ndim)); /* ndim of result array */
  1130. assert(view->shape == mv->ob_array);
  1131. assert(view->strides == mv->ob_array + (ndim==0?1:ndim));
  1132. assert(view->suboffsets == NULL);
  1133. view->ndim = ndim;
  1134. if (view->ndim == 0) {
  1135. view->shape = NULL;
  1136. view->strides = NULL;
  1137. len = view->itemsize;
  1138. }
  1139. else {
  1140. len = copy_shape(view->shape, shape, ndim, view->itemsize);
  1141. if (len < 0)
  1142. return -1;
  1143. init_strides_from_shape(view);
  1144. }
  1145. if (view->len != len) {
  1146. PyErr_SetString(PyExc_TypeError,
  1147. "memoryview: product(shape) * itemsize != buffer size");
  1148. return -1;
  1149. }
  1150. init_flags(mv);
  1151. return 0;
  1152. }
  1153. static int
  1154. zero_in_shape(PyMemoryViewObject *mv)
  1155. {
  1156. Py_buffer *view = &mv->view;
  1157. Py_ssize_t i;
  1158. for (i = 0; i < view->ndim; i++)
  1159. if (view->shape[i] == 0)
  1160. return 1;
  1161. return 0;
  1162. }
  1163. /*
  1164. Cast a copy of 'self' to a different view. The input view must
  1165. be C-contiguous. The function always casts the input view to a
  1166. 1-D output according to 'format'. At least one of input-format,
  1167. output-format must have byte size.
  1168. If 'shape' is given, the 1-D view from the previous step will
  1169. be cast to a C-contiguous view with new shape and strides.
  1170. All casts must result in views that will have the exact byte
  1171. size of the original input. Otherwise, an error is raised.
  1172. */
  1173. /*[clinic input]
  1174. memoryview.cast
  1175. format: unicode
  1176. shape: object = NULL
  1177. Cast a memoryview to a new format or shape.
  1178. [clinic start generated code]*/
  1179. static PyObject *
  1180. memoryview_cast_impl(PyMemoryViewObject *self, PyObject *format,
  1181. PyObject *shape)
  1182. /*[clinic end generated code: output=bae520b3a389cbab input=138936cc9041b1a3]*/
  1183. {
  1184. PyMemoryViewObject *mv = NULL;
  1185. Py_ssize_t ndim = 1;
  1186. CHECK_RELEASED(self);
  1187. CHECK_RESTRICTED(self);
  1188. if (!MV_C_CONTIGUOUS(self->flags)) {
  1189. PyErr_SetString(PyExc_TypeError,
  1190. "memoryview: casts are restricted to C-contiguous views");
  1191. return NULL;
  1192. }
  1193. if ((shape || self->view.ndim != 1) && zero_in_shape(self)) {
  1194. PyErr_SetString(PyExc_TypeError,
  1195. "memoryview: cannot cast view with zeros in shape or strides");
  1196. return NULL;
  1197. }
  1198. if (shape) {
  1199. CHECK_LIST_OR_TUPLE(shape)
  1200. ndim = PySequence_Fast_GET_SIZE(shape);
  1201. if (ndim > PyBUF_MAX_NDIM) {
  1202. PyErr_SetString(PyExc_ValueError,
  1203. "memoryview: number of dimensions must not exceed "
  1204. Py_STRINGIFY(PyBUF_MAX_NDIM));
  1205. return NULL;
  1206. }
  1207. if (self->view.ndim != 1 && ndim != 1) {
  1208. PyErr_SetString(PyExc_TypeError,
  1209. "memoryview: cast must be 1D -> ND or ND -> 1D");
  1210. return NULL;
  1211. }
  1212. }
  1213. mv = (PyMemoryViewObject *)
  1214. mbuf_add_incomplete_view(self->mbuf, &self->view, ndim==0 ? 1 : (int)ndim);
  1215. if (mv == NULL)
  1216. return NULL;
  1217. if (cast_to_1D(mv, format) < 0)
  1218. goto error;
  1219. if (shape && cast_to_ND(mv, shape, (int)ndim) < 0)
  1220. goto error;
  1221. return (PyObject *)mv;
  1222. error:
  1223. Py_DECREF(mv);
  1224. return NULL;
  1225. }
  1226. /*[clinic input]
  1227. memoryview.toreadonly
  1228. Return a readonly version of the memoryview.
  1229. [clinic start generated code]*/
  1230. static PyObject *
  1231. memoryview_toreadonly_impl(PyMemoryViewObject *self)
  1232. /*[clinic end generated code: output=2c7e056f04c99e62 input=dc06d20f19ba236f]*/
  1233. {
  1234. CHECK_RELEASED(self);
  1235. CHECK_RESTRICTED(self);
  1236. /* Even if self is already readonly, we still need to create a new
  1237. * object for .release() to work correctly.
  1238. */
  1239. self = (PyMemoryViewObject *) mbuf_add_view(self->mbuf, &self->view);
  1240. if (self != NULL) {
  1241. self->view.readonly = 1;
  1242. };
  1243. return (PyObject *) self;
  1244. }
  1245. /**************************************************************************/
  1246. /* getbuffer */
  1247. /**************************************************************************/
  1248. static int
  1249. memory_getbuf(PyMemoryViewObject *self, Py_buffer *view, int flags)
  1250. {
  1251. Py_buffer *base = &self->view;
  1252. int baseflags = self->flags;
  1253. CHECK_RELEASED_INT(self);
  1254. CHECK_RESTRICTED_INT(self);
  1255. /* start with complete information */
  1256. *view = *base;
  1257. view->obj = NULL;
  1258. if (REQ_WRITABLE(flags) && base->readonly) {
  1259. PyErr_SetString(PyExc_BufferError,
  1260. "memoryview: underlying buffer is not writable");
  1261. return -1;
  1262. }
  1263. if (!REQ_FORMAT(flags)) {
  1264. /* NULL indicates that the buffer's data type has been cast to 'B'.
  1265. view->itemsize is the _previous_ itemsize. If shape is present,
  1266. the equality product(shape) * itemsize = len still holds at this
  1267. point. The equality calcsize(format) = itemsize does _not_ hold
  1268. from here on! */
  1269. view->format = NULL;
  1270. }
  1271. if (REQ_C_CONTIGUOUS(flags) && !MV_C_CONTIGUOUS(baseflags)) {
  1272. PyErr_SetString(PyExc_BufferError,
  1273. "memoryview: underlying buffer is not C-contiguous");
  1274. return -1;
  1275. }
  1276. if (REQ_F_CONTIGUOUS(flags) && !MV_F_CONTIGUOUS(baseflags)) {
  1277. PyErr_SetString(PyExc_BufferError,
  1278. "memoryview: underlying buffer is not Fortran contiguous");
  1279. return -1;
  1280. }
  1281. if (REQ_ANY_CONTIGUOUS(flags) && !MV_ANY_CONTIGUOUS(baseflags)) {
  1282. PyErr_SetString(PyExc_BufferError,
  1283. "memoryview: underlying buffer is not contiguous");
  1284. return -1;
  1285. }
  1286. if (!REQ_INDIRECT(flags) && (baseflags & _Py_MEMORYVIEW_PIL)) {
  1287. PyErr_SetString(PyExc_BufferError,
  1288. "memoryview: underlying buffer requires suboffsets");
  1289. return -1;
  1290. }
  1291. if (!REQ_STRIDES(flags)) {
  1292. if (!MV_C_CONTIGUOUS(baseflags)) {
  1293. PyErr_SetString(PyExc_BufferError,
  1294. "memoryview: underlying buffer is not C-contiguous");
  1295. return -1;
  1296. }
  1297. view->strides = NULL;
  1298. }
  1299. if (!REQ_SHAPE(flags)) {
  1300. /* PyBUF_SIMPLE or PyBUF_WRITABLE: at this point buf is C-contiguous,
  1301. so base->buf = ndbuf->data. */
  1302. if (view->format != NULL) {
  1303. /* PyBUF_SIMPLE|PyBUF_FORMAT and PyBUF_WRITABLE|PyBUF_FORMAT do
  1304. not make sense. */
  1305. PyErr_Format(PyExc_BufferError,
  1306. "memoryview: cannot cast to unsigned bytes if the format flag "
  1307. "is present");
  1308. return -1;
  1309. }
  1310. /* product(shape) * itemsize = len and calcsize(format) = itemsize
  1311. do _not_ hold from here on! */
  1312. view->ndim = 1;
  1313. view->shape = NULL;
  1314. }
  1315. view->obj = Py_NewRef(self);
  1316. self->exports++;
  1317. return 0;
  1318. }
  1319. static void
  1320. memory_releasebuf(PyMemoryViewObject *self, Py_buffer *view)
  1321. {
  1322. self->exports--;
  1323. return;
  1324. /* PyBuffer_Release() decrements view->obj after this function returns. */
  1325. }
  1326. /* Buffer methods */
  1327. static PyBufferProcs memory_as_buffer = {
  1328. (getbufferproc)memory_getbuf, /* bf_getbuffer */
  1329. (releasebufferproc)memory_releasebuf, /* bf_releasebuffer */
  1330. };
  1331. /****************************************************************************/
  1332. /* Optimized pack/unpack for all native format specifiers */
  1333. /****************************************************************************/
  1334. /*
  1335. Fix exceptions:
  1336. 1) Include format string in the error message.
  1337. 2) OverflowError -> ValueError.
  1338. 3) The error message from PyNumber_Index() is not ideal.
  1339. */
  1340. static int
  1341. type_error_int(const char *fmt)
  1342. {
  1343. PyErr_Format(PyExc_TypeError,
  1344. "memoryview: invalid type for format '%s'", fmt);
  1345. return -1;
  1346. }
  1347. static int
  1348. value_error_int(const char *fmt)
  1349. {
  1350. PyErr_Format(PyExc_ValueError,
  1351. "memoryview: invalid value for format '%s'", fmt);
  1352. return -1;
  1353. }
  1354. static int
  1355. fix_error_int(const char *fmt)
  1356. {
  1357. assert(PyErr_Occurred());
  1358. if (PyErr_ExceptionMatches(PyExc_TypeError)) {
  1359. PyErr_Clear();
  1360. return type_error_int(fmt);
  1361. }
  1362. else if (PyErr_ExceptionMatches(PyExc_OverflowError) ||
  1363. PyErr_ExceptionMatches(PyExc_ValueError)) {
  1364. PyErr_Clear();
  1365. return value_error_int(fmt);
  1366. }
  1367. return -1;
  1368. }
  1369. /* Accept integer objects or objects with an __index__() method. */
  1370. static long
  1371. pylong_as_ld(PyObject *item)
  1372. {
  1373. PyObject *tmp;
  1374. long ld;
  1375. tmp = _PyNumber_Index(item);
  1376. if (tmp == NULL)
  1377. return -1;
  1378. ld = PyLong_AsLong(tmp);
  1379. Py_DECREF(tmp);
  1380. return ld;
  1381. }
  1382. static unsigned long
  1383. pylong_as_lu(PyObject *item)
  1384. {
  1385. PyObject *tmp;
  1386. unsigned long lu;
  1387. tmp = _PyNumber_Index(item);
  1388. if (tmp == NULL)
  1389. return (unsigned long)-1;
  1390. lu = PyLong_AsUnsignedLong(tmp);
  1391. Py_DECREF(tmp);
  1392. return lu;
  1393. }
  1394. static long long
  1395. pylong_as_lld(PyObject *item)
  1396. {
  1397. PyObject *tmp;
  1398. long long lld;
  1399. tmp = _PyNumber_Index(item);
  1400. if (tmp == NULL)
  1401. return -1;
  1402. lld = PyLong_AsLongLong(tmp);
  1403. Py_DECREF(tmp);
  1404. return lld;
  1405. }
  1406. static unsigned long long
  1407. pylong_as_llu(PyObject *item)
  1408. {
  1409. PyObject *tmp;
  1410. unsigned long long llu;
  1411. tmp = _PyNumber_Index(item);
  1412. if (tmp == NULL)
  1413. return (unsigned long long)-1;
  1414. llu = PyLong_AsUnsignedLongLong(tmp);
  1415. Py_DECREF(tmp);
  1416. return llu;
  1417. }
  1418. static Py_ssize_t
  1419. pylong_as_zd(PyObject *item)
  1420. {
  1421. PyObject *tmp;
  1422. Py_ssize_t zd;
  1423. tmp = _PyNumber_Index(item);
  1424. if (tmp == NULL)
  1425. return -1;
  1426. zd = PyLong_AsSsize_t(tmp);
  1427. Py_DECREF(tmp);
  1428. return zd;
  1429. }
  1430. static size_t
  1431. pylong_as_zu(PyObject *item)
  1432. {
  1433. PyObject *tmp;
  1434. size_t zu;
  1435. tmp = _PyNumber_Index(item);
  1436. if (tmp == NULL)
  1437. return (size_t)-1;
  1438. zu = PyLong_AsSize_t(tmp);
  1439. Py_DECREF(tmp);
  1440. return zu;
  1441. }
  1442. /* Timings with the ndarray from _testbuffer.c indicate that using the
  1443. struct module is around 15x slower than the two functions below. */
  1444. #define UNPACK_SINGLE(dest, ptr, type) \
  1445. do { \
  1446. type x; \
  1447. memcpy((char *)&x, ptr, sizeof x); \
  1448. dest = x; \
  1449. } while (0)
  1450. /* Unpack a single item. 'fmt' can be any native format character in struct
  1451. module syntax. This function is very sensitive to small changes. With this
  1452. layout gcc automatically generates a fast jump table. */
  1453. static inline PyObject *
  1454. unpack_single(PyMemoryViewObject *self, const char *ptr, const char *fmt)
  1455. {
  1456. unsigned long long llu;
  1457. unsigned long lu;
  1458. size_t zu;
  1459. long long lld;
  1460. long ld;
  1461. Py_ssize_t zd;
  1462. double d;
  1463. unsigned char uc;
  1464. void *p;
  1465. CHECK_RELEASED_AGAIN(self);
  1466. #if PY_LITTLE_ENDIAN
  1467. int endian = 1;
  1468. #else
  1469. int endian = 0;
  1470. #endif
  1471. switch (fmt[0]) {
  1472. /* signed integers and fast path for 'B' */
  1473. case 'B': uc = *((const unsigned char *)ptr); goto convert_uc;
  1474. case 'b': ld = *((const signed char *)ptr); goto convert_ld;
  1475. case 'h': UNPACK_SINGLE(ld, ptr, short); goto convert_ld;
  1476. case 'i': UNPACK_SINGLE(ld, ptr, int); goto convert_ld;
  1477. case 'l': UNPACK_SINGLE(ld, ptr, long); goto convert_ld;
  1478. /* boolean */
  1479. case '?': UNPACK_SINGLE(ld, ptr, _Bool); goto convert_bool;
  1480. /* unsigned integers */
  1481. case 'H': UNPACK_SINGLE(lu, ptr, unsigned short); goto convert_lu;
  1482. case 'I': UNPACK_SINGLE(lu, ptr, unsigned int); goto convert_lu;
  1483. case 'L': UNPACK_SINGLE(lu, ptr, unsigned long); goto convert_lu;
  1484. /* native 64-bit */
  1485. case 'q': UNPACK_SINGLE(lld, ptr, long long); goto convert_lld;
  1486. case 'Q': UNPACK_SINGLE(llu, ptr, unsigned long long); goto convert_llu;
  1487. /* ssize_t and size_t */
  1488. case 'n': UNPACK_SINGLE(zd, ptr, Py_ssize_t); goto convert_zd;
  1489. case 'N': UNPACK_SINGLE(zu, ptr, size_t); goto convert_zu;
  1490. /* floats */
  1491. case 'f': UNPACK_SINGLE(d, ptr, float); goto convert_double;
  1492. case 'd': UNPACK_SINGLE(d, ptr, double); goto convert_double;
  1493. case 'e': d = PyFloat_Unpack2(ptr, endian); goto convert_double;
  1494. /* bytes object */
  1495. case 'c': goto convert_bytes;
  1496. /* pointer */
  1497. case 'P': UNPACK_SINGLE(p, ptr, void *); goto convert_pointer;
  1498. /* default */
  1499. default: goto err_format;
  1500. }
  1501. convert_uc:
  1502. /* PyLong_FromUnsignedLong() is slower */
  1503. return PyLong_FromLong(uc);
  1504. convert_ld:
  1505. return PyLong_FromLong(ld);
  1506. convert_lu:
  1507. return PyLong_FromUnsignedLong(lu);
  1508. convert_lld:
  1509. return PyLong_FromLongLong(lld);
  1510. convert_llu:
  1511. return PyLong_FromUnsignedLongLong(llu);
  1512. convert_zd:
  1513. return PyLong_FromSsize_t(zd);
  1514. convert_zu:
  1515. return PyLong_FromSize_t(zu);
  1516. convert_double:
  1517. return PyFloat_FromDouble(d);
  1518. convert_bool:
  1519. return PyBool_FromLong(ld);
  1520. convert_bytes:
  1521. return PyBytes_FromStringAndSize(ptr, 1);
  1522. convert_pointer:
  1523. return PyLong_FromVoidPtr(p);
  1524. err_format:
  1525. PyErr_Format(PyExc_NotImplementedError,
  1526. "memoryview: format %s not supported", fmt);
  1527. return NULL;
  1528. }
  1529. #define PACK_SINGLE(ptr, src, type) \
  1530. do { \
  1531. type x; \
  1532. x = (type)src; \
  1533. memcpy(ptr, (char *)&x, sizeof x); \
  1534. } while (0)
  1535. /* Pack a single item. 'fmt' can be any native format character in
  1536. struct module syntax. */
  1537. static int
  1538. pack_single(PyMemoryViewObject *self, char *ptr, PyObject *item, const char *fmt)
  1539. {
  1540. unsigned long long llu;
  1541. unsigned long lu;
  1542. size_t zu;
  1543. long long lld;
  1544. long ld;
  1545. Py_ssize_t zd;
  1546. double d;
  1547. void *p;
  1548. #if PY_LITTLE_ENDIAN
  1549. int endian = 1;
  1550. #else
  1551. int endian = 0;
  1552. #endif
  1553. switch (fmt[0]) {
  1554. /* signed integers */
  1555. case 'b': case 'h': case 'i': case 'l':
  1556. ld = pylong_as_ld(item);
  1557. if (ld == -1 && PyErr_Occurred())
  1558. goto err_occurred;
  1559. CHECK_RELEASED_INT_AGAIN(self);
  1560. switch (fmt[0]) {
  1561. case 'b':
  1562. if (ld < SCHAR_MIN || ld > SCHAR_MAX) goto err_range;
  1563. *((signed char *)ptr) = (signed char)ld; break;
  1564. case 'h':
  1565. if (ld < SHRT_MIN || ld > SHRT_MAX) goto err_range;
  1566. PACK_SINGLE(ptr, ld, short); break;
  1567. case 'i':
  1568. if (ld < INT_MIN || ld > INT_MAX) goto err_range;
  1569. PACK_SINGLE(ptr, ld, int); break;
  1570. default: /* 'l' */
  1571. PACK_SINGLE(ptr, ld, long); break;
  1572. }
  1573. break;
  1574. /* unsigned integers */
  1575. case 'B': case 'H': case 'I': case 'L':
  1576. lu = pylong_as_lu(item);
  1577. if (lu == (unsigned long)-1 && PyErr_Occurred())
  1578. goto err_occurred;
  1579. CHECK_RELEASED_INT_AGAIN(self);
  1580. switch (fmt[0]) {
  1581. case 'B':
  1582. if (lu > UCHAR_MAX) goto err_range;
  1583. *((unsigned char *)ptr) = (unsigned char)lu; break;
  1584. case 'H':
  1585. if (lu > USHRT_MAX) goto err_range;
  1586. PACK_SINGLE(ptr, lu, unsigned short); break;
  1587. case 'I':
  1588. if (lu > UINT_MAX) goto err_range;
  1589. PACK_SINGLE(ptr, lu, unsigned int); break;
  1590. default: /* 'L' */
  1591. PACK_SINGLE(ptr, lu, unsigned long); break;
  1592. }
  1593. break;
  1594. /* native 64-bit */
  1595. case 'q':
  1596. lld = pylong_as_lld(item);
  1597. if (lld == -1 && PyErr_Occurred())
  1598. goto err_occurred;
  1599. CHECK_RELEASED_INT_AGAIN(self);
  1600. PACK_SINGLE(ptr, lld, long long);
  1601. break;
  1602. case 'Q':
  1603. llu = pylong_as_llu(item);
  1604. if (llu == (unsigned long long)-1 && PyErr_Occurred())
  1605. goto err_occurred;
  1606. CHECK_RELEASED_INT_AGAIN(self);
  1607. PACK_SINGLE(ptr, llu, unsigned long long);
  1608. break;
  1609. /* ssize_t and size_t */
  1610. case 'n':
  1611. zd = pylong_as_zd(item);
  1612. if (zd == -1 && PyErr_Occurred())
  1613. goto err_occurred;
  1614. CHECK_RELEASED_INT_AGAIN(self);
  1615. PACK_SINGLE(ptr, zd, Py_ssize_t);
  1616. break;
  1617. case 'N':
  1618. zu = pylong_as_zu(item);
  1619. if (zu == (size_t)-1 && PyErr_Occurred())
  1620. goto err_occurred;
  1621. CHECK_RELEASED_INT_AGAIN(self);
  1622. PACK_SINGLE(ptr, zu, size_t);
  1623. break;
  1624. /* floats */
  1625. case 'f': case 'd': case 'e':
  1626. d = PyFloat_AsDouble(item);
  1627. if (d == -1.0 && PyErr_Occurred())
  1628. goto err_occurred;
  1629. CHECK_RELEASED_INT_AGAIN(self);
  1630. if (fmt[0] == 'f') {
  1631. PACK_SINGLE(ptr, d, float);
  1632. }
  1633. else if (fmt[0] == 'd') {
  1634. PACK_SINGLE(ptr, d, double);
  1635. }
  1636. else {
  1637. if (PyFloat_Pack2(d, ptr, endian) < 0) {
  1638. goto err_occurred;
  1639. }
  1640. }
  1641. break;
  1642. /* bool */
  1643. case '?':
  1644. ld = PyObject_IsTrue(item);
  1645. if (ld < 0)
  1646. return -1; /* preserve original error */
  1647. CHECK_RELEASED_INT_AGAIN(self);
  1648. PACK_SINGLE(ptr, ld, _Bool);
  1649. break;
  1650. /* bytes object */
  1651. case 'c':
  1652. if (!PyBytes_Check(item))
  1653. return type_error_int(fmt);
  1654. if (PyBytes_GET_SIZE(item) != 1)
  1655. return value_error_int(fmt);
  1656. *ptr = PyBytes_AS_STRING(item)[0];
  1657. break;
  1658. /* pointer */
  1659. case 'P':
  1660. p = PyLong_AsVoidPtr(item);
  1661. if (p == NULL && PyErr_Occurred())
  1662. goto err_occurred;
  1663. CHECK_RELEASED_INT_AGAIN(self);
  1664. PACK_SINGLE(ptr, p, void *);
  1665. break;
  1666. /* default */
  1667. default: goto err_format;
  1668. }
  1669. return 0;
  1670. err_occurred:
  1671. return fix_error_int(fmt);
  1672. err_range:
  1673. return value_error_int(fmt);
  1674. err_format:
  1675. PyErr_Format(PyExc_NotImplementedError,
  1676. "memoryview: format %s not supported", fmt);
  1677. return -1;
  1678. }
  1679. /****************************************************************************/
  1680. /* unpack using the struct module */
  1681. /****************************************************************************/
  1682. /* For reasonable performance it is necessary to cache all objects required
  1683. for unpacking. An unpacker can handle the format passed to unpack_from().
  1684. Invariant: All pointer fields of the struct should either be NULL or valid
  1685. pointers. */
  1686. struct unpacker {
  1687. PyObject *unpack_from; /* Struct.unpack_from(format) */
  1688. PyObject *mview; /* cached memoryview */
  1689. char *item; /* buffer for mview */
  1690. Py_ssize_t itemsize; /* len(item) */
  1691. };
  1692. static struct unpacker *
  1693. unpacker_new(void)
  1694. {
  1695. struct unpacker *x = PyMem_Malloc(sizeof *x);
  1696. if (x == NULL) {
  1697. PyErr_NoMemory();
  1698. return NULL;
  1699. }
  1700. x->unpack_from = NULL;
  1701. x->mview = NULL;
  1702. x->item = NULL;
  1703. x->itemsize = 0;
  1704. return x;
  1705. }
  1706. static void
  1707. unpacker_free(struct unpacker *x)
  1708. {
  1709. if (x) {
  1710. Py_XDECREF(x->unpack_from);
  1711. Py_XDECREF(x->mview);
  1712. PyMem_Free(x->item);
  1713. PyMem_Free(x);
  1714. }
  1715. }
  1716. /* Return a new unpacker for the given format. */
  1717. static struct unpacker *
  1718. struct_get_unpacker(const char *fmt, Py_ssize_t itemsize)
  1719. {
  1720. PyObject *Struct = NULL; /* XXX cache it in globals? */
  1721. PyObject *structobj = NULL;
  1722. PyObject *format = NULL;
  1723. struct unpacker *x = NULL;
  1724. Struct = _PyImport_GetModuleAttrString("struct", "Struct");
  1725. if (Struct == NULL)
  1726. return NULL;
  1727. x = unpacker_new();
  1728. if (x == NULL)
  1729. goto error;
  1730. format = PyBytes_FromString(fmt);
  1731. if (format == NULL)
  1732. goto error;
  1733. structobj = PyObject_CallOneArg(Struct, format);
  1734. if (structobj == NULL)
  1735. goto error;
  1736. x->unpack_from = PyObject_GetAttrString(structobj, "unpack_from");
  1737. if (x->unpack_from == NULL)
  1738. goto error;
  1739. x->item = PyMem_Malloc(itemsize);
  1740. if (x->item == NULL) {
  1741. PyErr_NoMemory();
  1742. goto error;
  1743. }
  1744. x->itemsize = itemsize;
  1745. x->mview = PyMemoryView_FromMemory(x->item, itemsize, PyBUF_WRITE);
  1746. if (x->mview == NULL)
  1747. goto error;
  1748. out:
  1749. Py_XDECREF(Struct);
  1750. Py_XDECREF(format);
  1751. Py_XDECREF(structobj);
  1752. return x;
  1753. error:
  1754. unpacker_free(x);
  1755. x = NULL;
  1756. goto out;
  1757. }
  1758. /* unpack a single item */
  1759. static PyObject *
  1760. struct_unpack_single(const char *ptr, struct unpacker *x)
  1761. {
  1762. PyObject *v;
  1763. memcpy(x->item, ptr, x->itemsize);
  1764. v = PyObject_CallOneArg(x->unpack_from, x->mview);
  1765. if (v == NULL)
  1766. return NULL;
  1767. if (PyTuple_GET_SIZE(v) == 1) {
  1768. PyObject *res = Py_NewRef(PyTuple_GET_ITEM(v, 0));
  1769. Py_DECREF(v);
  1770. return res;
  1771. }
  1772. return v;
  1773. }
  1774. /****************************************************************************/
  1775. /* Representations */
  1776. /****************************************************************************/
  1777. /* allow explicit form of native format */
  1778. static inline const char *
  1779. adjust_fmt(const Py_buffer *view)
  1780. {
  1781. const char *fmt;
  1782. fmt = (view->format[0] == '@') ? view->format+1 : view->format;
  1783. if (fmt[0] && fmt[1] == '\0')
  1784. return fmt;
  1785. PyErr_Format(PyExc_NotImplementedError,
  1786. "memoryview: unsupported format %s", view->format);
  1787. return NULL;
  1788. }
  1789. /* Base case for multi-dimensional unpacking. Assumption: ndim == 1. */
  1790. static PyObject *
  1791. tolist_base(PyMemoryViewObject *self, const char *ptr, const Py_ssize_t *shape,
  1792. const Py_ssize_t *strides, const Py_ssize_t *suboffsets,
  1793. const char *fmt)
  1794. {
  1795. PyObject *lst, *item;
  1796. Py_ssize_t i;
  1797. lst = PyList_New(shape[0]);
  1798. if (lst == NULL)
  1799. return NULL;
  1800. for (i = 0; i < shape[0]; ptr+=strides[0], i++) {
  1801. const char *xptr = ADJUST_PTR(ptr, suboffsets, 0);
  1802. item = unpack_single(self, xptr, fmt);
  1803. if (item == NULL) {
  1804. Py_DECREF(lst);
  1805. return NULL;
  1806. }
  1807. PyList_SET_ITEM(lst, i, item);
  1808. }
  1809. return lst;
  1810. }
  1811. /* Unpack a multi-dimensional array into a nested list.
  1812. Assumption: ndim >= 1. */
  1813. static PyObject *
  1814. tolist_rec(PyMemoryViewObject *self, const char *ptr, Py_ssize_t ndim, const Py_ssize_t *shape,
  1815. const Py_ssize_t *strides, const Py_ssize_t *suboffsets,
  1816. const char *fmt)
  1817. {
  1818. PyObject *lst, *item;
  1819. Py_ssize_t i;
  1820. assert(ndim >= 1);
  1821. assert(shape != NULL);
  1822. assert(strides != NULL);
  1823. if (ndim == 1)
  1824. return tolist_base(self, ptr, shape, strides, suboffsets, fmt);
  1825. lst = PyList_New(shape[0]);
  1826. if (lst == NULL)
  1827. return NULL;
  1828. for (i = 0; i < shape[0]; ptr+=strides[0], i++) {
  1829. const char *xptr = ADJUST_PTR(ptr, suboffsets, 0);
  1830. item = tolist_rec(self, xptr, ndim-1, shape+1,
  1831. strides+1, suboffsets ? suboffsets+1 : NULL,
  1832. fmt);
  1833. if (item == NULL) {
  1834. Py_DECREF(lst);
  1835. return NULL;
  1836. }
  1837. PyList_SET_ITEM(lst, i, item);
  1838. }
  1839. return lst;
  1840. }
  1841. /* Return a list representation of the memoryview. Currently only buffers
  1842. with native format strings are supported. */
  1843. /*[clinic input]
  1844. memoryview.tolist
  1845. Return the data in the buffer as a list of elements.
  1846. [clinic start generated code]*/
  1847. static PyObject *
  1848. memoryview_tolist_impl(PyMemoryViewObject *self)
  1849. /*[clinic end generated code: output=a6cda89214fd5a1b input=21e7d0c1860b211a]*/
  1850. {
  1851. const Py_buffer *view = &self->view;
  1852. const char *fmt;
  1853. CHECK_RELEASED(self);
  1854. fmt = adjust_fmt(view);
  1855. if (fmt == NULL)
  1856. return NULL;
  1857. if (view->ndim == 0) {
  1858. return unpack_single(self, view->buf, fmt);
  1859. }
  1860. else if (view->ndim == 1) {
  1861. return tolist_base(self, view->buf, view->shape,
  1862. view->strides, view->suboffsets,
  1863. fmt);
  1864. }
  1865. else {
  1866. return tolist_rec(self, view->buf, view->ndim, view->shape,
  1867. view->strides, view->suboffsets,
  1868. fmt);
  1869. }
  1870. }
  1871. /*[clinic input]
  1872. memoryview.tobytes
  1873. order: str(accept={str, NoneType}, c_default="NULL") = 'C'
  1874. Return the data in the buffer as a byte string.
  1875. Order can be {'C', 'F', 'A'}. When order is 'C' or 'F', the data of the
  1876. original array is converted to C or Fortran order. For contiguous views,
  1877. 'A' returns an exact copy of the physical memory. In particular, in-memory
  1878. Fortran order is preserved. For non-contiguous views, the data is converted
  1879. to C first. order=None is the same as order='C'.
  1880. [clinic start generated code]*/
  1881. static PyObject *
  1882. memoryview_tobytes_impl(PyMemoryViewObject *self, const char *order)
  1883. /*[clinic end generated code: output=1288b62560a32a23 input=0efa3ddaeda573a8]*/
  1884. {
  1885. Py_buffer *src = VIEW_ADDR(self);
  1886. char ord = 'C';
  1887. PyObject *bytes;
  1888. CHECK_RELEASED(self);
  1889. if (order) {
  1890. if (strcmp(order, "F") == 0) {
  1891. ord = 'F';
  1892. }
  1893. else if (strcmp(order, "A") == 0) {
  1894. ord = 'A';
  1895. }
  1896. else if (strcmp(order, "C") != 0) {
  1897. PyErr_SetString(PyExc_ValueError,
  1898. "order must be 'C', 'F' or 'A'");
  1899. return NULL;
  1900. }
  1901. }
  1902. bytes = PyBytes_FromStringAndSize(NULL, src->len);
  1903. if (bytes == NULL)
  1904. return NULL;
  1905. if (PyBuffer_ToContiguous(PyBytes_AS_STRING(bytes), src, src->len, ord) < 0) {
  1906. Py_DECREF(bytes);
  1907. return NULL;
  1908. }
  1909. return bytes;
  1910. }
  1911. /*[clinic input]
  1912. memoryview.hex
  1913. sep: object = NULL
  1914. An optional single character or byte to separate hex bytes.
  1915. bytes_per_sep: int = 1
  1916. How many bytes between separators. Positive values count from the
  1917. right, negative values count from the left.
  1918. Return the data in the buffer as a str of hexadecimal numbers.
  1919. Example:
  1920. >>> value = memoryview(b'\xb9\x01\xef')
  1921. >>> value.hex()
  1922. 'b901ef'
  1923. >>> value.hex(':')
  1924. 'b9:01:ef'
  1925. >>> value.hex(':', 2)
  1926. 'b9:01ef'
  1927. >>> value.hex(':', -2)
  1928. 'b901:ef'
  1929. [clinic start generated code]*/
  1930. static PyObject *
  1931. memoryview_hex_impl(PyMemoryViewObject *self, PyObject *sep,
  1932. int bytes_per_sep)
  1933. /*[clinic end generated code: output=430ca760f94f3ca7 input=539f6a3a5fb56946]*/
  1934. {
  1935. Py_buffer *src = VIEW_ADDR(self);
  1936. PyObject *bytes;
  1937. PyObject *ret;
  1938. CHECK_RELEASED(self);
  1939. if (MV_C_CONTIGUOUS(self->flags)) {
  1940. return _Py_strhex_with_sep(src->buf, src->len, sep, bytes_per_sep);
  1941. }
  1942. bytes = PyBytes_FromStringAndSize(NULL, src->len);
  1943. if (bytes == NULL)
  1944. return NULL;
  1945. if (PyBuffer_ToContiguous(PyBytes_AS_STRING(bytes), src, src->len, 'C') < 0) {
  1946. Py_DECREF(bytes);
  1947. return NULL;
  1948. }
  1949. ret = _Py_strhex_with_sep(
  1950. PyBytes_AS_STRING(bytes), PyBytes_GET_SIZE(bytes),
  1951. sep, bytes_per_sep);
  1952. Py_DECREF(bytes);
  1953. return ret;
  1954. }
  1955. static PyObject *
  1956. memory_repr(PyMemoryViewObject *self)
  1957. {
  1958. if (self->flags & _Py_MEMORYVIEW_RELEASED)
  1959. return PyUnicode_FromFormat("<released memory at %p>", self);
  1960. else
  1961. return PyUnicode_FromFormat("<memory at %p>", self);
  1962. }
  1963. /**************************************************************************/
  1964. /* Indexing and slicing */
  1965. /**************************************************************************/
  1966. static char *
  1967. lookup_dimension(const Py_buffer *view, char *ptr, int dim, Py_ssize_t index)
  1968. {
  1969. Py_ssize_t nitems; /* items in the given dimension */
  1970. assert(view->shape);
  1971. assert(view->strides);
  1972. nitems = view->shape[dim];
  1973. if (index < 0) {
  1974. index += nitems;
  1975. }
  1976. if (index < 0 || index >= nitems) {
  1977. PyErr_Format(PyExc_IndexError,
  1978. "index out of bounds on dimension %d", dim + 1);
  1979. return NULL;
  1980. }
  1981. ptr += view->strides[dim] * index;
  1982. ptr = ADJUST_PTR(ptr, view->suboffsets, dim);
  1983. return ptr;
  1984. }
  1985. /* Get the pointer to the item at index. */
  1986. static char *
  1987. ptr_from_index(const Py_buffer *view, Py_ssize_t index)
  1988. {
  1989. char *ptr = (char *)view->buf;
  1990. return lookup_dimension(view, ptr, 0, index);
  1991. }
  1992. /* Get the pointer to the item at tuple. */
  1993. static char *
  1994. ptr_from_tuple(const Py_buffer *view, PyObject *tup)
  1995. {
  1996. char *ptr = (char *)view->buf;
  1997. Py_ssize_t dim, nindices = PyTuple_GET_SIZE(tup);
  1998. if (nindices > view->ndim) {
  1999. PyErr_Format(PyExc_TypeError,
  2000. "cannot index %zd-dimension view with %zd-element tuple",
  2001. view->ndim, nindices);
  2002. return NULL;
  2003. }
  2004. for (dim = 0; dim < nindices; dim++) {
  2005. Py_ssize_t index;
  2006. index = PyNumber_AsSsize_t(PyTuple_GET_ITEM(tup, dim),
  2007. PyExc_IndexError);
  2008. if (index == -1 && PyErr_Occurred())
  2009. return NULL;
  2010. ptr = lookup_dimension(view, ptr, (int)dim, index);
  2011. if (ptr == NULL)
  2012. return NULL;
  2013. }
  2014. return ptr;
  2015. }
  2016. /* Return the item at index. In a one-dimensional view, this is an object
  2017. with the type specified by view->format. Otherwise, the item is a sub-view.
  2018. The function is used in memory_subscript() and memory_as_sequence. */
  2019. static PyObject *
  2020. memory_item(PyMemoryViewObject *self, Py_ssize_t index)
  2021. {
  2022. Py_buffer *view = &(self->view);
  2023. const char *fmt;
  2024. CHECK_RELEASED(self);
  2025. fmt = adjust_fmt(view);
  2026. if (fmt == NULL)
  2027. return NULL;
  2028. if (view->ndim == 0) {
  2029. PyErr_SetString(PyExc_TypeError, "invalid indexing of 0-dim memory");
  2030. return NULL;
  2031. }
  2032. if (view->ndim == 1) {
  2033. char *ptr = ptr_from_index(view, index);
  2034. if (ptr == NULL)
  2035. return NULL;
  2036. return unpack_single(self, ptr, fmt);
  2037. }
  2038. PyErr_SetString(PyExc_NotImplementedError,
  2039. "multi-dimensional sub-views are not implemented");
  2040. return NULL;
  2041. }
  2042. /* Return the item at position *key* (a tuple of indices). */
  2043. static PyObject *
  2044. memory_item_multi(PyMemoryViewObject *self, PyObject *tup)
  2045. {
  2046. Py_buffer *view = &(self->view);
  2047. const char *fmt;
  2048. Py_ssize_t nindices = PyTuple_GET_SIZE(tup);
  2049. char *ptr;
  2050. CHECK_RELEASED(self);
  2051. fmt = adjust_fmt(view);
  2052. if (fmt == NULL)
  2053. return NULL;
  2054. if (nindices < view->ndim) {
  2055. PyErr_SetString(PyExc_NotImplementedError,
  2056. "sub-views are not implemented");
  2057. return NULL;
  2058. }
  2059. ptr = ptr_from_tuple(view, tup);
  2060. if (ptr == NULL)
  2061. return NULL;
  2062. return unpack_single(self, ptr, fmt);
  2063. }
  2064. static inline int
  2065. init_slice(Py_buffer *base, PyObject *key, int dim)
  2066. {
  2067. Py_ssize_t start, stop, step, slicelength;
  2068. if (PySlice_Unpack(key, &start, &stop, &step) < 0) {
  2069. return -1;
  2070. }
  2071. slicelength = PySlice_AdjustIndices(base->shape[dim], &start, &stop, step);
  2072. if (base->suboffsets == NULL || dim == 0) {
  2073. adjust_buf:
  2074. base->buf = (char *)base->buf + base->strides[dim] * start;
  2075. }
  2076. else {
  2077. Py_ssize_t n = dim-1;
  2078. while (n >= 0 && base->suboffsets[n] < 0)
  2079. n--;
  2080. if (n < 0)
  2081. goto adjust_buf; /* all suboffsets are negative */
  2082. base->suboffsets[n] = base->suboffsets[n] + base->strides[dim] * start;
  2083. }
  2084. base->shape[dim] = slicelength;
  2085. base->strides[dim] = base->strides[dim] * step;
  2086. return 0;
  2087. }
  2088. static int
  2089. is_multislice(PyObject *key)
  2090. {
  2091. Py_ssize_t size, i;
  2092. if (!PyTuple_Check(key))
  2093. return 0;
  2094. size = PyTuple_GET_SIZE(key);
  2095. if (size == 0)
  2096. return 0;
  2097. for (i = 0; i < size; i++) {
  2098. PyObject *x = PyTuple_GET_ITEM(key, i);
  2099. if (!PySlice_Check(x))
  2100. return 0;
  2101. }
  2102. return 1;
  2103. }
  2104. static Py_ssize_t
  2105. is_multiindex(PyObject *key)
  2106. {
  2107. Py_ssize_t size, i;
  2108. if (!PyTuple_Check(key))
  2109. return 0;
  2110. size = PyTuple_GET_SIZE(key);
  2111. for (i = 0; i < size; i++) {
  2112. PyObject *x = PyTuple_GET_ITEM(key, i);
  2113. if (!_PyIndex_Check(x)) {
  2114. return 0;
  2115. }
  2116. }
  2117. return 1;
  2118. }
  2119. /* mv[obj] returns an object holding the data for one element if obj
  2120. fully indexes the memoryview or another memoryview object if it
  2121. does not.
  2122. 0-d memoryview objects can be referenced using mv[...] or mv[()]
  2123. but not with anything else. */
  2124. static PyObject *
  2125. memory_subscript(PyMemoryViewObject *self, PyObject *key)
  2126. {
  2127. Py_buffer *view;
  2128. view = &(self->view);
  2129. CHECK_RELEASED(self);
  2130. if (view->ndim == 0) {
  2131. if (PyTuple_Check(key) && PyTuple_GET_SIZE(key) == 0) {
  2132. const char *fmt = adjust_fmt(view);
  2133. if (fmt == NULL)
  2134. return NULL;
  2135. return unpack_single(self, view->buf, fmt);
  2136. }
  2137. else if (key == Py_Ellipsis) {
  2138. return Py_NewRef(self);
  2139. }
  2140. else {
  2141. PyErr_SetString(PyExc_TypeError,
  2142. "invalid indexing of 0-dim memory");
  2143. return NULL;
  2144. }
  2145. }
  2146. if (_PyIndex_Check(key)) {
  2147. Py_ssize_t index;
  2148. index = PyNumber_AsSsize_t(key, PyExc_IndexError);
  2149. if (index == -1 && PyErr_Occurred())
  2150. return NULL;
  2151. return memory_item(self, index);
  2152. }
  2153. else if (PySlice_Check(key)) {
  2154. CHECK_RESTRICTED(self);
  2155. PyMemoryViewObject *sliced;
  2156. sliced = (PyMemoryViewObject *)mbuf_add_view(self->mbuf, view);
  2157. if (sliced == NULL)
  2158. return NULL;
  2159. if (init_slice(&sliced->view, key, 0) < 0) {
  2160. Py_DECREF(sliced);
  2161. return NULL;
  2162. }
  2163. init_len(&sliced->view);
  2164. init_flags(sliced);
  2165. return (PyObject *)sliced;
  2166. }
  2167. else if (is_multiindex(key)) {
  2168. return memory_item_multi(self, key);
  2169. }
  2170. else if (is_multislice(key)) {
  2171. PyErr_SetString(PyExc_NotImplementedError,
  2172. "multi-dimensional slicing is not implemented");
  2173. return NULL;
  2174. }
  2175. PyErr_SetString(PyExc_TypeError, "memoryview: invalid slice key");
  2176. return NULL;
  2177. }
  2178. static int
  2179. memory_ass_sub(PyMemoryViewObject *self, PyObject *key, PyObject *value)
  2180. {
  2181. Py_buffer *view = &(self->view);
  2182. Py_buffer src;
  2183. const char *fmt;
  2184. char *ptr;
  2185. CHECK_RELEASED_INT(self);
  2186. fmt = adjust_fmt(view);
  2187. if (fmt == NULL)
  2188. return -1;
  2189. if (view->readonly) {
  2190. PyErr_SetString(PyExc_TypeError, "cannot modify read-only memory");
  2191. return -1;
  2192. }
  2193. if (value == NULL) {
  2194. PyErr_SetString(PyExc_TypeError, "cannot delete memory");
  2195. return -1;
  2196. }
  2197. if (view->ndim == 0) {
  2198. if (key == Py_Ellipsis ||
  2199. (PyTuple_Check(key) && PyTuple_GET_SIZE(key)==0)) {
  2200. ptr = (char *)view->buf;
  2201. return pack_single(self, ptr, value, fmt);
  2202. }
  2203. else {
  2204. PyErr_SetString(PyExc_TypeError,
  2205. "invalid indexing of 0-dim memory");
  2206. return -1;
  2207. }
  2208. }
  2209. if (_PyIndex_Check(key)) {
  2210. Py_ssize_t index;
  2211. if (1 < view->ndim) {
  2212. PyErr_SetString(PyExc_NotImplementedError,
  2213. "sub-views are not implemented");
  2214. return -1;
  2215. }
  2216. index = PyNumber_AsSsize_t(key, PyExc_IndexError);
  2217. if (index == -1 && PyErr_Occurred())
  2218. return -1;
  2219. ptr = ptr_from_index(view, index);
  2220. if (ptr == NULL)
  2221. return -1;
  2222. return pack_single(self, ptr, value, fmt);
  2223. }
  2224. /* one-dimensional: fast path */
  2225. if (PySlice_Check(key) && view->ndim == 1) {
  2226. Py_buffer dest; /* sliced view */
  2227. Py_ssize_t arrays[3];
  2228. int ret = -1;
  2229. /* rvalue must be an exporter */
  2230. if (PyObject_GetBuffer(value, &src, PyBUF_FULL_RO) < 0)
  2231. return ret;
  2232. dest = *view;
  2233. dest.shape = &arrays[0]; dest.shape[0] = view->shape[0];
  2234. dest.strides = &arrays[1]; dest.strides[0] = view->strides[0];
  2235. if (view->suboffsets) {
  2236. dest.suboffsets = &arrays[2]; dest.suboffsets[0] = view->suboffsets[0];
  2237. }
  2238. if (init_slice(&dest, key, 0) < 0)
  2239. goto end_block;
  2240. dest.len = dest.shape[0] * dest.itemsize;
  2241. ret = copy_single(self, &dest, &src);
  2242. end_block:
  2243. PyBuffer_Release(&src);
  2244. return ret;
  2245. }
  2246. if (is_multiindex(key)) {
  2247. char *ptr;
  2248. if (PyTuple_GET_SIZE(key) < view->ndim) {
  2249. PyErr_SetString(PyExc_NotImplementedError,
  2250. "sub-views are not implemented");
  2251. return -1;
  2252. }
  2253. ptr = ptr_from_tuple(view, key);
  2254. if (ptr == NULL)
  2255. return -1;
  2256. return pack_single(self, ptr, value, fmt);
  2257. }
  2258. if (PySlice_Check(key) || is_multislice(key)) {
  2259. /* Call memory_subscript() to produce a sliced lvalue, then copy
  2260. rvalue into lvalue. This is already implemented in _testbuffer.c. */
  2261. PyErr_SetString(PyExc_NotImplementedError,
  2262. "memoryview slice assignments are currently restricted "
  2263. "to ndim = 1");
  2264. return -1;
  2265. }
  2266. PyErr_SetString(PyExc_TypeError, "memoryview: invalid slice key");
  2267. return -1;
  2268. }
  2269. static Py_ssize_t
  2270. memory_length(PyMemoryViewObject *self)
  2271. {
  2272. CHECK_RELEASED_INT(self);
  2273. if (self->view.ndim == 0) {
  2274. PyErr_SetString(PyExc_TypeError, "0-dim memory has no length");
  2275. return -1;
  2276. }
  2277. return self->view.shape[0];
  2278. }
  2279. /* As mapping */
  2280. static PyMappingMethods memory_as_mapping = {
  2281. (lenfunc)memory_length, /* mp_length */
  2282. (binaryfunc)memory_subscript, /* mp_subscript */
  2283. (objobjargproc)memory_ass_sub, /* mp_ass_subscript */
  2284. };
  2285. /* As sequence */
  2286. static PySequenceMethods memory_as_sequence = {
  2287. (lenfunc)memory_length, /* sq_length */
  2288. 0, /* sq_concat */
  2289. 0, /* sq_repeat */
  2290. (ssizeargfunc)memory_item, /* sq_item */
  2291. };
  2292. /**************************************************************************/
  2293. /* Comparisons */
  2294. /**************************************************************************/
  2295. #define MV_COMPARE_EX -1 /* exception */
  2296. #define MV_COMPARE_NOT_IMPL -2 /* not implemented */
  2297. /* Translate a StructError to "not equal". Preserve other exceptions. */
  2298. static int
  2299. fix_struct_error_int(void)
  2300. {
  2301. assert(PyErr_Occurred());
  2302. /* XXX Cannot get at StructError directly? */
  2303. if (PyErr_ExceptionMatches(PyExc_ImportError) ||
  2304. PyErr_ExceptionMatches(PyExc_MemoryError)) {
  2305. return MV_COMPARE_EX;
  2306. }
  2307. /* StructError: invalid or unknown format -> not equal */
  2308. PyErr_Clear();
  2309. return 0;
  2310. }
  2311. /* Unpack and compare single items of p and q using the struct module. */
  2312. static int
  2313. struct_unpack_cmp(const char *p, const char *q,
  2314. struct unpacker *unpack_p, struct unpacker *unpack_q)
  2315. {
  2316. PyObject *v, *w;
  2317. int ret;
  2318. /* At this point any exception from the struct module should not be
  2319. StructError, since both formats have been accepted already. */
  2320. v = struct_unpack_single(p, unpack_p);
  2321. if (v == NULL)
  2322. return MV_COMPARE_EX;
  2323. w = struct_unpack_single(q, unpack_q);
  2324. if (w == NULL) {
  2325. Py_DECREF(v);
  2326. return MV_COMPARE_EX;
  2327. }
  2328. /* MV_COMPARE_EX == -1: exceptions are preserved */
  2329. ret = PyObject_RichCompareBool(v, w, Py_EQ);
  2330. Py_DECREF(v);
  2331. Py_DECREF(w);
  2332. return ret;
  2333. }
  2334. /* Unpack and compare single items of p and q. If both p and q have the same
  2335. single element native format, the comparison uses a fast path (gcc creates
  2336. a jump table and converts memcpy into simple assignments on x86/x64).
  2337. Otherwise, the comparison is delegated to the struct module, which is
  2338. 30-60x slower. */
  2339. #define CMP_SINGLE(p, q, type) \
  2340. do { \
  2341. type x; \
  2342. type y; \
  2343. memcpy((char *)&x, p, sizeof x); \
  2344. memcpy((char *)&y, q, sizeof y); \
  2345. equal = (x == y); \
  2346. } while (0)
  2347. static inline int
  2348. unpack_cmp(const char *p, const char *q, char fmt,
  2349. struct unpacker *unpack_p, struct unpacker *unpack_q)
  2350. {
  2351. int equal;
  2352. switch (fmt) {
  2353. /* signed integers and fast path for 'B' */
  2354. case 'B': return *((const unsigned char *)p) == *((const unsigned char *)q);
  2355. case 'b': return *((const signed char *)p) == *((const signed char *)q);
  2356. case 'h': CMP_SINGLE(p, q, short); return equal;
  2357. case 'i': CMP_SINGLE(p, q, int); return equal;
  2358. case 'l': CMP_SINGLE(p, q, long); return equal;
  2359. /* boolean */
  2360. case '?': CMP_SINGLE(p, q, _Bool); return equal;
  2361. /* unsigned integers */
  2362. case 'H': CMP_SINGLE(p, q, unsigned short); return equal;
  2363. case 'I': CMP_SINGLE(p, q, unsigned int); return equal;
  2364. case 'L': CMP_SINGLE(p, q, unsigned long); return equal;
  2365. /* native 64-bit */
  2366. case 'q': CMP_SINGLE(p, q, long long); return equal;
  2367. case 'Q': CMP_SINGLE(p, q, unsigned long long); return equal;
  2368. /* ssize_t and size_t */
  2369. case 'n': CMP_SINGLE(p, q, Py_ssize_t); return equal;
  2370. case 'N': CMP_SINGLE(p, q, size_t); return equal;
  2371. /* floats */
  2372. /* XXX DBL_EPSILON? */
  2373. case 'f': CMP_SINGLE(p, q, float); return equal;
  2374. case 'd': CMP_SINGLE(p, q, double); return equal;
  2375. case 'e': {
  2376. #if PY_LITTLE_ENDIAN
  2377. int endian = 1;
  2378. #else
  2379. int endian = 0;
  2380. #endif
  2381. /* Note: PyFloat_Unpack2 should never fail */
  2382. double u = PyFloat_Unpack2(p, endian);
  2383. double v = PyFloat_Unpack2(q, endian);
  2384. return (u == v);
  2385. }
  2386. /* bytes object */
  2387. case 'c': return *p == *q;
  2388. /* pointer */
  2389. case 'P': CMP_SINGLE(p, q, void *); return equal;
  2390. /* use the struct module */
  2391. case '_':
  2392. assert(unpack_p);
  2393. assert(unpack_q);
  2394. return struct_unpack_cmp(p, q, unpack_p, unpack_q);
  2395. }
  2396. /* NOT REACHED */
  2397. PyErr_SetString(PyExc_RuntimeError,
  2398. "memoryview: internal error in richcompare");
  2399. return MV_COMPARE_EX;
  2400. }
  2401. /* Base case for recursive array comparisons. Assumption: ndim == 1. */
  2402. static int
  2403. cmp_base(const char *p, const char *q, const Py_ssize_t *shape,
  2404. const Py_ssize_t *pstrides, const Py_ssize_t *psuboffsets,
  2405. const Py_ssize_t *qstrides, const Py_ssize_t *qsuboffsets,
  2406. char fmt, struct unpacker *unpack_p, struct unpacker *unpack_q)
  2407. {
  2408. Py_ssize_t i;
  2409. int equal;
  2410. for (i = 0; i < shape[0]; p+=pstrides[0], q+=qstrides[0], i++) {
  2411. const char *xp = ADJUST_PTR(p, psuboffsets, 0);
  2412. const char *xq = ADJUST_PTR(q, qsuboffsets, 0);
  2413. equal = unpack_cmp(xp, xq, fmt, unpack_p, unpack_q);
  2414. if (equal <= 0)
  2415. return equal;
  2416. }
  2417. return 1;
  2418. }
  2419. /* Recursively compare two multi-dimensional arrays that have the same
  2420. logical structure. Assumption: ndim >= 1. */
  2421. static int
  2422. cmp_rec(const char *p, const char *q,
  2423. Py_ssize_t ndim, const Py_ssize_t *shape,
  2424. const Py_ssize_t *pstrides, const Py_ssize_t *psuboffsets,
  2425. const Py_ssize_t *qstrides, const Py_ssize_t *qsuboffsets,
  2426. char fmt, struct unpacker *unpack_p, struct unpacker *unpack_q)
  2427. {
  2428. Py_ssize_t i;
  2429. int equal;
  2430. assert(ndim >= 1);
  2431. assert(shape != NULL);
  2432. assert(pstrides != NULL);
  2433. assert(qstrides != NULL);
  2434. if (ndim == 1) {
  2435. return cmp_base(p, q, shape,
  2436. pstrides, psuboffsets,
  2437. qstrides, qsuboffsets,
  2438. fmt, unpack_p, unpack_q);
  2439. }
  2440. for (i = 0; i < shape[0]; p+=pstrides[0], q+=qstrides[0], i++) {
  2441. const char *xp = ADJUST_PTR(p, psuboffsets, 0);
  2442. const char *xq = ADJUST_PTR(q, qsuboffsets, 0);
  2443. equal = cmp_rec(xp, xq, ndim-1, shape+1,
  2444. pstrides+1, psuboffsets ? psuboffsets+1 : NULL,
  2445. qstrides+1, qsuboffsets ? qsuboffsets+1 : NULL,
  2446. fmt, unpack_p, unpack_q);
  2447. if (equal <= 0)
  2448. return equal;
  2449. }
  2450. return 1;
  2451. }
  2452. static PyObject *
  2453. memory_richcompare(PyObject *v, PyObject *w, int op)
  2454. {
  2455. PyObject *res;
  2456. Py_buffer wbuf, *vv;
  2457. Py_buffer *ww = NULL;
  2458. struct unpacker *unpack_v = NULL;
  2459. struct unpacker *unpack_w = NULL;
  2460. char vfmt, wfmt;
  2461. int equal = MV_COMPARE_NOT_IMPL;
  2462. if (op != Py_EQ && op != Py_NE)
  2463. goto result; /* Py_NotImplemented */
  2464. assert(PyMemoryView_Check(v));
  2465. if (BASE_INACCESSIBLE(v)) {
  2466. equal = (v == w);
  2467. goto result;
  2468. }
  2469. vv = VIEW_ADDR(v);
  2470. if (PyMemoryView_Check(w)) {
  2471. if (BASE_INACCESSIBLE(w)) {
  2472. equal = (v == w);
  2473. goto result;
  2474. }
  2475. ww = VIEW_ADDR(w);
  2476. }
  2477. else {
  2478. if (PyObject_GetBuffer(w, &wbuf, PyBUF_FULL_RO) < 0) {
  2479. PyErr_Clear();
  2480. goto result; /* Py_NotImplemented */
  2481. }
  2482. ww = &wbuf;
  2483. }
  2484. if (!equiv_shape(vv, ww)) {
  2485. PyErr_Clear();
  2486. equal = 0;
  2487. goto result;
  2488. }
  2489. /* Use fast unpacking for identical primitive C type formats. */
  2490. if (get_native_fmtchar(&vfmt, vv->format) < 0)
  2491. vfmt = '_';
  2492. if (get_native_fmtchar(&wfmt, ww->format) < 0)
  2493. wfmt = '_';
  2494. if (vfmt == '_' || wfmt == '_' || vfmt != wfmt) {
  2495. /* Use struct module unpacking. NOTE: Even for equal format strings,
  2496. memcmp() cannot be used for item comparison since it would give
  2497. incorrect results in the case of NaNs or uninitialized padding
  2498. bytes. */
  2499. vfmt = '_';
  2500. unpack_v = struct_get_unpacker(vv->format, vv->itemsize);
  2501. if (unpack_v == NULL) {
  2502. equal = fix_struct_error_int();
  2503. goto result;
  2504. }
  2505. unpack_w = struct_get_unpacker(ww->format, ww->itemsize);
  2506. if (unpack_w == NULL) {
  2507. equal = fix_struct_error_int();
  2508. goto result;
  2509. }
  2510. }
  2511. if (vv->ndim == 0) {
  2512. equal = unpack_cmp(vv->buf, ww->buf,
  2513. vfmt, unpack_v, unpack_w);
  2514. }
  2515. else if (vv->ndim == 1) {
  2516. equal = cmp_base(vv->buf, ww->buf, vv->shape,
  2517. vv->strides, vv->suboffsets,
  2518. ww->strides, ww->suboffsets,
  2519. vfmt, unpack_v, unpack_w);
  2520. }
  2521. else {
  2522. equal = cmp_rec(vv->buf, ww->buf, vv->ndim, vv->shape,
  2523. vv->strides, vv->suboffsets,
  2524. ww->strides, ww->suboffsets,
  2525. vfmt, unpack_v, unpack_w);
  2526. }
  2527. result:
  2528. if (equal < 0) {
  2529. if (equal == MV_COMPARE_NOT_IMPL)
  2530. res = Py_NotImplemented;
  2531. else /* exception */
  2532. res = NULL;
  2533. }
  2534. else if ((equal && op == Py_EQ) || (!equal && op == Py_NE))
  2535. res = Py_True;
  2536. else
  2537. res = Py_False;
  2538. if (ww == &wbuf)
  2539. PyBuffer_Release(ww);
  2540. unpacker_free(unpack_v);
  2541. unpacker_free(unpack_w);
  2542. return Py_XNewRef(res);
  2543. }
  2544. /**************************************************************************/
  2545. /* Hash */
  2546. /**************************************************************************/
  2547. static Py_hash_t
  2548. memory_hash(PyMemoryViewObject *self)
  2549. {
  2550. if (self->hash == -1) {
  2551. Py_buffer *view = &self->view;
  2552. char *mem = view->buf;
  2553. Py_ssize_t ret;
  2554. char fmt;
  2555. CHECK_RELEASED_INT(self);
  2556. if (!view->readonly) {
  2557. PyErr_SetString(PyExc_ValueError,
  2558. "cannot hash writable memoryview object");
  2559. return -1;
  2560. }
  2561. ret = get_native_fmtchar(&fmt, view->format);
  2562. if (ret < 0 || !IS_BYTE_FORMAT(fmt)) {
  2563. PyErr_SetString(PyExc_ValueError,
  2564. "memoryview: hashing is restricted to formats 'B', 'b' or 'c'");
  2565. return -1;
  2566. }
  2567. if (view->obj != NULL && PyObject_Hash(view->obj) == -1) {
  2568. /* Keep the original error message */
  2569. return -1;
  2570. }
  2571. if (!MV_C_CONTIGUOUS(self->flags)) {
  2572. mem = PyMem_Malloc(view->len);
  2573. if (mem == NULL) {
  2574. PyErr_NoMemory();
  2575. return -1;
  2576. }
  2577. if (buffer_to_contiguous(mem, view, 'C') < 0) {
  2578. PyMem_Free(mem);
  2579. return -1;
  2580. }
  2581. }
  2582. /* Can't fail */
  2583. self->hash = _Py_HashBytes(mem, view->len);
  2584. if (mem != view->buf)
  2585. PyMem_Free(mem);
  2586. }
  2587. return self->hash;
  2588. }
  2589. /**************************************************************************/
  2590. /* getters */
  2591. /**************************************************************************/
  2592. static PyObject *
  2593. _IntTupleFromSsizet(int len, Py_ssize_t *vals)
  2594. {
  2595. int i;
  2596. PyObject *o;
  2597. PyObject *intTuple;
  2598. if (vals == NULL)
  2599. return PyTuple_New(0);
  2600. intTuple = PyTuple_New(len);
  2601. if (!intTuple)
  2602. return NULL;
  2603. for (i=0; i<len; i++) {
  2604. o = PyLong_FromSsize_t(vals[i]);
  2605. if (!o) {
  2606. Py_DECREF(intTuple);
  2607. return NULL;
  2608. }
  2609. PyTuple_SET_ITEM(intTuple, i, o);
  2610. }
  2611. return intTuple;
  2612. }
  2613. static PyObject *
  2614. memory_obj_get(PyMemoryViewObject *self, void *Py_UNUSED(ignored))
  2615. {
  2616. Py_buffer *view = &self->view;
  2617. CHECK_RELEASED(self);
  2618. if (view->obj == NULL) {
  2619. Py_RETURN_NONE;
  2620. }
  2621. return Py_NewRef(view->obj);
  2622. }
  2623. static PyObject *
  2624. memory_nbytes_get(PyMemoryViewObject *self, void *Py_UNUSED(ignored))
  2625. {
  2626. CHECK_RELEASED(self);
  2627. return PyLong_FromSsize_t(self->view.len);
  2628. }
  2629. static PyObject *
  2630. memory_format_get(PyMemoryViewObject *self, void *Py_UNUSED(ignored))
  2631. {
  2632. CHECK_RELEASED(self);
  2633. return PyUnicode_FromString(self->view.format);
  2634. }
  2635. static PyObject *
  2636. memory_itemsize_get(PyMemoryViewObject *self, void *Py_UNUSED(ignored))
  2637. {
  2638. CHECK_RELEASED(self);
  2639. return PyLong_FromSsize_t(self->view.itemsize);
  2640. }
  2641. static PyObject *
  2642. memory_shape_get(PyMemoryViewObject *self, void *Py_UNUSED(ignored))
  2643. {
  2644. CHECK_RELEASED(self);
  2645. return _IntTupleFromSsizet(self->view.ndim, self->view.shape);
  2646. }
  2647. static PyObject *
  2648. memory_strides_get(PyMemoryViewObject *self, void *Py_UNUSED(ignored))
  2649. {
  2650. CHECK_RELEASED(self);
  2651. return _IntTupleFromSsizet(self->view.ndim, self->view.strides);
  2652. }
  2653. static PyObject *
  2654. memory_suboffsets_get(PyMemoryViewObject *self, void *Py_UNUSED(ignored))
  2655. {
  2656. CHECK_RELEASED(self);
  2657. return _IntTupleFromSsizet(self->view.ndim, self->view.suboffsets);
  2658. }
  2659. static PyObject *
  2660. memory_readonly_get(PyMemoryViewObject *self, void *Py_UNUSED(ignored))
  2661. {
  2662. CHECK_RELEASED(self);
  2663. return PyBool_FromLong(self->view.readonly);
  2664. }
  2665. static PyObject *
  2666. memory_ndim_get(PyMemoryViewObject *self, void *Py_UNUSED(ignored))
  2667. {
  2668. CHECK_RELEASED(self);
  2669. return PyLong_FromLong(self->view.ndim);
  2670. }
  2671. static PyObject *
  2672. memory_c_contiguous(PyMemoryViewObject *self, PyObject *dummy)
  2673. {
  2674. CHECK_RELEASED(self);
  2675. return PyBool_FromLong(MV_C_CONTIGUOUS(self->flags));
  2676. }
  2677. static PyObject *
  2678. memory_f_contiguous(PyMemoryViewObject *self, PyObject *dummy)
  2679. {
  2680. CHECK_RELEASED(self);
  2681. return PyBool_FromLong(MV_F_CONTIGUOUS(self->flags));
  2682. }
  2683. static PyObject *
  2684. memory_contiguous(PyMemoryViewObject *self, PyObject *dummy)
  2685. {
  2686. CHECK_RELEASED(self);
  2687. return PyBool_FromLong(MV_ANY_CONTIGUOUS(self->flags));
  2688. }
  2689. PyDoc_STRVAR(memory_obj_doc,
  2690. "The underlying object of the memoryview.");
  2691. PyDoc_STRVAR(memory_nbytes_doc,
  2692. "The amount of space in bytes that the array would use in\n"
  2693. " a contiguous representation.");
  2694. PyDoc_STRVAR(memory_readonly_doc,
  2695. "A bool indicating whether the memory is read only.");
  2696. PyDoc_STRVAR(memory_itemsize_doc,
  2697. "The size in bytes of each element of the memoryview.");
  2698. PyDoc_STRVAR(memory_format_doc,
  2699. "A string containing the format (in struct module style)\n"
  2700. " for each element in the view.");
  2701. PyDoc_STRVAR(memory_ndim_doc,
  2702. "An integer indicating how many dimensions of a multi-dimensional\n"
  2703. " array the memory represents.");
  2704. PyDoc_STRVAR(memory_shape_doc,
  2705. "A tuple of ndim integers giving the shape of the memory\n"
  2706. " as an N-dimensional array.");
  2707. PyDoc_STRVAR(memory_strides_doc,
  2708. "A tuple of ndim integers giving the size in bytes to access\n"
  2709. " each element for each dimension of the array.");
  2710. PyDoc_STRVAR(memory_suboffsets_doc,
  2711. "A tuple of integers used internally for PIL-style arrays.");
  2712. PyDoc_STRVAR(memory_c_contiguous_doc,
  2713. "A bool indicating whether the memory is C contiguous.");
  2714. PyDoc_STRVAR(memory_f_contiguous_doc,
  2715. "A bool indicating whether the memory is Fortran contiguous.");
  2716. PyDoc_STRVAR(memory_contiguous_doc,
  2717. "A bool indicating whether the memory is contiguous.");
  2718. static PyGetSetDef memory_getsetlist[] = {
  2719. {"obj", (getter)memory_obj_get, NULL, memory_obj_doc},
  2720. {"nbytes", (getter)memory_nbytes_get, NULL, memory_nbytes_doc},
  2721. {"readonly", (getter)memory_readonly_get, NULL, memory_readonly_doc},
  2722. {"itemsize", (getter)memory_itemsize_get, NULL, memory_itemsize_doc},
  2723. {"format", (getter)memory_format_get, NULL, memory_format_doc},
  2724. {"ndim", (getter)memory_ndim_get, NULL, memory_ndim_doc},
  2725. {"shape", (getter)memory_shape_get, NULL, memory_shape_doc},
  2726. {"strides", (getter)memory_strides_get, NULL, memory_strides_doc},
  2727. {"suboffsets", (getter)memory_suboffsets_get, NULL, memory_suboffsets_doc},
  2728. {"c_contiguous", (getter)memory_c_contiguous, NULL, memory_c_contiguous_doc},
  2729. {"f_contiguous", (getter)memory_f_contiguous, NULL, memory_f_contiguous_doc},
  2730. {"contiguous", (getter)memory_contiguous, NULL, memory_contiguous_doc},
  2731. {NULL, NULL, NULL, NULL},
  2732. };
  2733. static PyMethodDef memory_methods[] = {
  2734. MEMORYVIEW_RELEASE_METHODDEF
  2735. MEMORYVIEW_TOBYTES_METHODDEF
  2736. MEMORYVIEW_HEX_METHODDEF
  2737. MEMORYVIEW_TOLIST_METHODDEF
  2738. MEMORYVIEW_CAST_METHODDEF
  2739. MEMORYVIEW_TOREADONLY_METHODDEF
  2740. MEMORYVIEW__FROM_FLAGS_METHODDEF
  2741. {"__enter__", memory_enter, METH_NOARGS, NULL},
  2742. {"__exit__", memory_exit, METH_VARARGS, NULL},
  2743. {NULL, NULL}
  2744. };
  2745. /**************************************************************************/
  2746. /* Memoryview Iterator */
  2747. /**************************************************************************/
  2748. PyTypeObject _PyMemoryIter_Type;
  2749. typedef struct {
  2750. PyObject_HEAD
  2751. Py_ssize_t it_index;
  2752. PyMemoryViewObject *it_seq; // Set to NULL when iterator is exhausted
  2753. Py_ssize_t it_length;
  2754. const char *it_fmt;
  2755. } memoryiterobject;
  2756. static void
  2757. memoryiter_dealloc(memoryiterobject *it)
  2758. {
  2759. _PyObject_GC_UNTRACK(it);
  2760. Py_XDECREF(it->it_seq);
  2761. PyObject_GC_Del(it);
  2762. }
  2763. static int
  2764. memoryiter_traverse(memoryiterobject *it, visitproc visit, void *arg)
  2765. {
  2766. Py_VISIT(it->it_seq);
  2767. return 0;
  2768. }
  2769. static PyObject *
  2770. memoryiter_next(memoryiterobject *it)
  2771. {
  2772. PyMemoryViewObject *seq;
  2773. seq = it->it_seq;
  2774. if (seq == NULL) {
  2775. return NULL;
  2776. }
  2777. if (it->it_index < it->it_length) {
  2778. CHECK_RELEASED(seq);
  2779. Py_buffer *view = &(seq->view);
  2780. char *ptr = (char *)seq->view.buf;
  2781. ptr += view->strides[0] * it->it_index++;
  2782. ptr = ADJUST_PTR(ptr, view->suboffsets, 0);
  2783. if (ptr == NULL) {
  2784. return NULL;
  2785. }
  2786. return unpack_single(seq, ptr, it->it_fmt);
  2787. }
  2788. it->it_seq = NULL;
  2789. Py_DECREF(seq);
  2790. return NULL;
  2791. }
  2792. static PyObject *
  2793. memory_iter(PyObject *seq)
  2794. {
  2795. if (!PyMemoryView_Check(seq)) {
  2796. PyErr_BadInternalCall();
  2797. return NULL;
  2798. }
  2799. PyMemoryViewObject *obj = (PyMemoryViewObject *)seq;
  2800. int ndims = obj->view.ndim;
  2801. if (ndims == 0) {
  2802. PyErr_SetString(PyExc_TypeError, "invalid indexing of 0-dim memory");
  2803. return NULL;
  2804. }
  2805. if (ndims != 1) {
  2806. PyErr_SetString(PyExc_NotImplementedError,
  2807. "multi-dimensional sub-views are not implemented");
  2808. return NULL;
  2809. }
  2810. const char *fmt = adjust_fmt(&obj->view);
  2811. if (fmt == NULL) {
  2812. return NULL;
  2813. }
  2814. memoryiterobject *it;
  2815. it = PyObject_GC_New(memoryiterobject, &_PyMemoryIter_Type);
  2816. if (it == NULL) {
  2817. return NULL;
  2818. }
  2819. it->it_fmt = fmt;
  2820. it->it_length = memory_length(obj);
  2821. it->it_index = 0;
  2822. it->it_seq = (PyMemoryViewObject*)Py_NewRef(obj);
  2823. _PyObject_GC_TRACK(it);
  2824. return (PyObject *)it;
  2825. }
  2826. PyTypeObject _PyMemoryIter_Type = {
  2827. PyVarObject_HEAD_INIT(&PyType_Type, 0)
  2828. .tp_name = "memory_iterator",
  2829. .tp_basicsize = sizeof(memoryiterobject),
  2830. // methods
  2831. .tp_dealloc = (destructor)memoryiter_dealloc,
  2832. .tp_getattro = PyObject_GenericGetAttr,
  2833. .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
  2834. .tp_traverse = (traverseproc)memoryiter_traverse,
  2835. .tp_iter = PyObject_SelfIter,
  2836. .tp_iternext = (iternextfunc)memoryiter_next,
  2837. };
  2838. PyTypeObject PyMemoryView_Type = {
  2839. PyVarObject_HEAD_INIT(&PyType_Type, 0)
  2840. "memoryview", /* tp_name */
  2841. offsetof(PyMemoryViewObject, ob_array), /* tp_basicsize */
  2842. sizeof(Py_ssize_t), /* tp_itemsize */
  2843. (destructor)memory_dealloc, /* tp_dealloc */
  2844. 0, /* tp_vectorcall_offset */
  2845. 0, /* tp_getattr */
  2846. 0, /* tp_setattr */
  2847. 0, /* tp_as_async */
  2848. (reprfunc)memory_repr, /* tp_repr */
  2849. 0, /* tp_as_number */
  2850. &memory_as_sequence, /* tp_as_sequence */
  2851. &memory_as_mapping, /* tp_as_mapping */
  2852. (hashfunc)memory_hash, /* tp_hash */
  2853. 0, /* tp_call */
  2854. 0, /* tp_str */
  2855. PyObject_GenericGetAttr, /* tp_getattro */
  2856. 0, /* tp_setattro */
  2857. &memory_as_buffer, /* tp_as_buffer */
  2858. Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
  2859. Py_TPFLAGS_SEQUENCE, /* tp_flags */
  2860. memoryview__doc__, /* tp_doc */
  2861. (traverseproc)memory_traverse, /* tp_traverse */
  2862. (inquiry)memory_clear, /* tp_clear */
  2863. memory_richcompare, /* tp_richcompare */
  2864. offsetof(PyMemoryViewObject, weakreflist),/* tp_weaklistoffset */
  2865. memory_iter, /* tp_iter */
  2866. 0, /* tp_iternext */
  2867. memory_methods, /* tp_methods */
  2868. 0, /* tp_members */
  2869. memory_getsetlist, /* tp_getset */
  2870. 0, /* tp_base */
  2871. 0, /* tp_dict */
  2872. 0, /* tp_descr_get */
  2873. 0, /* tp_descr_set */
  2874. 0, /* tp_dictoffset */
  2875. 0, /* tp_init */
  2876. 0, /* tp_alloc */
  2877. memoryview, /* tp_new */
  2878. };