MemoryView_C.c 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941
  1. ////////// MemviewSliceStruct.proto //////////
  2. //@proto_block: utility_code_proto_before_types
  3. /* memoryview slice struct */
  4. struct {{memview_struct_name}};
  5. typedef struct {
  6. struct {{memview_struct_name}} *memview;
  7. char *data;
  8. Py_ssize_t shape[{{max_dims}}];
  9. Py_ssize_t strides[{{max_dims}}];
  10. Py_ssize_t suboffsets[{{max_dims}}];
  11. } {{memviewslice_name}};
  12. // used for "len(memviewslice)"
  13. #define __Pyx_MemoryView_Len(m) (m.shape[0])
  14. /////////// Atomics.proto /////////////
  15. //@proto_block: utility_code_proto_before_types
  16. #include <pythread.h>
  17. #ifndef CYTHON_ATOMICS
  18. #define CYTHON_ATOMICS 1
  19. #endif
  20. // using CYTHON_ATOMICS as a cdef extern bint in the Cython memoryview code
  21. // interacts badly with "import *". Therefore, define a helper function-like macro
  22. #define __PYX_CYTHON_ATOMICS_ENABLED() CYTHON_ATOMICS
  23. #define __pyx_atomic_int_type int
  24. #if CYTHON_ATOMICS && (__GNUC__ >= 5 || (__GNUC__ == 4 && \
  25. (__GNUC_MINOR__ > 1 || \
  26. (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL__ >= 2))))
  27. /* gcc >= 4.1.2 */
  28. #define __pyx_atomic_incr_aligned(value) __sync_fetch_and_add(value, 1)
  29. #define __pyx_atomic_decr_aligned(value) __sync_fetch_and_sub(value, 1)
  30. #ifdef __PYX_DEBUG_ATOMICS
  31. #warning "Using GNU atomics"
  32. #endif
  33. #elif CYTHON_ATOMICS && defined(_MSC_VER) && CYTHON_COMPILING_IN_NOGIL
  34. /* msvc */
  35. #include <intrin.h>
  36. #undef __pyx_atomic_int_type
  37. #define __pyx_atomic_int_type long
  38. #pragma intrinsic (_InterlockedExchangeAdd)
  39. #define __pyx_atomic_incr_aligned(value) _InterlockedExchangeAdd(value, 1)
  40. #define __pyx_atomic_decr_aligned(value) _InterlockedExchangeAdd(value, -1)
  41. #ifdef __PYX_DEBUG_ATOMICS
  42. #pragma message ("Using MSVC atomics")
  43. #endif
  44. #else
  45. #undef CYTHON_ATOMICS
  46. #define CYTHON_ATOMICS 0
  47. #ifdef __PYX_DEBUG_ATOMICS
  48. #warning "Not using atomics"
  49. #endif
  50. #endif
  51. typedef volatile __pyx_atomic_int_type __pyx_atomic_int;
  52. #if CYTHON_ATOMICS
  53. #define __pyx_add_acquisition_count(memview) \
  54. __pyx_atomic_incr_aligned(__pyx_get_slice_count_pointer(memview))
  55. #define __pyx_sub_acquisition_count(memview) \
  56. __pyx_atomic_decr_aligned(__pyx_get_slice_count_pointer(memview))
  57. #else
  58. #define __pyx_add_acquisition_count(memview) \
  59. __pyx_add_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock)
  60. #define __pyx_sub_acquisition_count(memview) \
  61. __pyx_sub_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock)
  62. #endif
  63. /////////////// ObjectToMemviewSlice.proto ///////////////
  64. static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *, int writable_flag);
  65. ////////// MemviewSliceInit.proto //////////
  66. #define __Pyx_BUF_MAX_NDIMS %(BUF_MAX_NDIMS)d
  67. #define __Pyx_MEMVIEW_DIRECT 1
  68. #define __Pyx_MEMVIEW_PTR 2
  69. #define __Pyx_MEMVIEW_FULL 4
  70. #define __Pyx_MEMVIEW_CONTIG 8
  71. #define __Pyx_MEMVIEW_STRIDED 16
  72. #define __Pyx_MEMVIEW_FOLLOW 32
  73. #define __Pyx_IS_C_CONTIG 1
  74. #define __Pyx_IS_F_CONTIG 2
  75. static int __Pyx_init_memviewslice(
  76. struct __pyx_memoryview_obj *memview,
  77. int ndim,
  78. __Pyx_memviewslice *memviewslice,
  79. int memview_is_new_reference);
  80. static CYTHON_INLINE int __pyx_add_acquisition_count_locked(
  81. __pyx_atomic_int *acquisition_count, PyThread_type_lock lock);
  82. static CYTHON_INLINE int __pyx_sub_acquisition_count_locked(
  83. __pyx_atomic_int *acquisition_count, PyThread_type_lock lock);
  84. #define __pyx_get_slice_count_pointer(memview) (memview->acquisition_count_aligned_p)
  85. #define __pyx_get_slice_count(memview) (*__pyx_get_slice_count_pointer(memview))
  86. #define __PYX_INC_MEMVIEW(slice, have_gil) __Pyx_INC_MEMVIEW(slice, have_gil, __LINE__)
  87. #define __PYX_XDEC_MEMVIEW(slice, have_gil) __Pyx_XDEC_MEMVIEW(slice, have_gil, __LINE__)
  88. static CYTHON_INLINE void __Pyx_INC_MEMVIEW({{memviewslice_name}} *, int, int);
  89. static CYTHON_INLINE void __Pyx_XDEC_MEMVIEW({{memviewslice_name}} *, int, int);
  90. /////////////// MemviewSliceIndex.proto ///////////////
  91. static CYTHON_INLINE char *__pyx_memviewslice_index_full(
  92. const char *bufp, Py_ssize_t idx, Py_ssize_t stride, Py_ssize_t suboffset);
  93. /////////////// ObjectToMemviewSlice ///////////////
  94. //@requires: MemviewSliceValidateAndInit
  95. static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *obj, int writable_flag) {
  96. {{memviewslice_name}} result = {{memslice_init}};
  97. __Pyx_BufFmt_StackElem stack[{{struct_nesting_depth}}];
  98. int axes_specs[] = { {{axes_specs}} };
  99. int retcode;
  100. if (obj == Py_None) {
  101. /* We don't bother to refcount None */
  102. result.memview = (struct __pyx_memoryview_obj *) Py_None;
  103. return result;
  104. }
  105. retcode = __Pyx_ValidateAndInit_memviewslice(axes_specs, {{c_or_f_flag}},
  106. {{buf_flag}} | writable_flag, {{ndim}},
  107. &{{dtype_typeinfo}}, stack,
  108. &result, obj);
  109. if (unlikely(retcode == -1))
  110. goto __pyx_fail;
  111. return result;
  112. __pyx_fail:
  113. result.memview = NULL;
  114. result.data = NULL;
  115. return result;
  116. }
  117. /////////////// MemviewSliceValidateAndInit.proto ///////////////
  118. static int __Pyx_ValidateAndInit_memviewslice(
  119. int *axes_specs,
  120. int c_or_f_flag,
  121. int buf_flags,
  122. int ndim,
  123. __Pyx_TypeInfo *dtype,
  124. __Pyx_BufFmt_StackElem stack[],
  125. __Pyx_memviewslice *memviewslice,
  126. PyObject *original_obj);
  127. /////////////// MemviewSliceValidateAndInit ///////////////
  128. //@requires: Buffer.c::TypeInfoCompare
  129. //@requires: Buffer.c::BufferFormatStructs
  130. //@requires: Buffer.c::BufferFormatCheck
  131. static int
  132. __pyx_check_strides(Py_buffer *buf, int dim, int ndim, int spec)
  133. {
  134. if (buf->shape[dim] <= 1)
  135. return 1;
  136. if (buf->strides) {
  137. if (spec & __Pyx_MEMVIEW_CONTIG) {
  138. if (spec & (__Pyx_MEMVIEW_PTR|__Pyx_MEMVIEW_FULL)) {
  139. if (unlikely(buf->strides[dim] != sizeof(void *))) {
  140. PyErr_Format(PyExc_ValueError,
  141. "Buffer is not indirectly contiguous "
  142. "in dimension %d.", dim);
  143. goto fail;
  144. }
  145. } else if (unlikely(buf->strides[dim] != buf->itemsize)) {
  146. PyErr_SetString(PyExc_ValueError,
  147. "Buffer and memoryview are not contiguous "
  148. "in the same dimension.");
  149. goto fail;
  150. }
  151. }
  152. if (spec & __Pyx_MEMVIEW_FOLLOW) {
  153. Py_ssize_t stride = buf->strides[dim];
  154. if (stride < 0)
  155. stride = -stride;
  156. if (unlikely(stride < buf->itemsize)) {
  157. PyErr_SetString(PyExc_ValueError,
  158. "Buffer and memoryview are not contiguous "
  159. "in the same dimension.");
  160. goto fail;
  161. }
  162. }
  163. } else {
  164. if (unlikely(spec & __Pyx_MEMVIEW_CONTIG && dim != ndim - 1)) {
  165. PyErr_Format(PyExc_ValueError,
  166. "C-contiguous buffer is not contiguous in "
  167. "dimension %d", dim);
  168. goto fail;
  169. } else if (unlikely(spec & (__Pyx_MEMVIEW_PTR))) {
  170. PyErr_Format(PyExc_ValueError,
  171. "C-contiguous buffer is not indirect in "
  172. "dimension %d", dim);
  173. goto fail;
  174. } else if (unlikely(buf->suboffsets)) {
  175. PyErr_SetString(PyExc_ValueError,
  176. "Buffer exposes suboffsets but no strides");
  177. goto fail;
  178. }
  179. }
  180. return 1;
  181. fail:
  182. return 0;
  183. }
  184. static int
  185. __pyx_check_suboffsets(Py_buffer *buf, int dim, CYTHON_UNUSED int ndim, int spec)
  186. {
  187. // Todo: without PyBUF_INDIRECT we may not have suboffset information, i.e., the
  188. // ptr may not be set to NULL but may be uninitialized?
  189. if (spec & __Pyx_MEMVIEW_DIRECT) {
  190. if (unlikely(buf->suboffsets && buf->suboffsets[dim] >= 0)) {
  191. PyErr_Format(PyExc_ValueError,
  192. "Buffer not compatible with direct access "
  193. "in dimension %d.", dim);
  194. goto fail;
  195. }
  196. }
  197. if (spec & __Pyx_MEMVIEW_PTR) {
  198. if (unlikely(!buf->suboffsets || (buf->suboffsets[dim] < 0))) {
  199. PyErr_Format(PyExc_ValueError,
  200. "Buffer is not indirectly accessible "
  201. "in dimension %d.", dim);
  202. goto fail;
  203. }
  204. }
  205. return 1;
  206. fail:
  207. return 0;
  208. }
  209. static int
  210. __pyx_verify_contig(Py_buffer *buf, int ndim, int c_or_f_flag)
  211. {
  212. int i;
  213. if (c_or_f_flag & __Pyx_IS_F_CONTIG) {
  214. Py_ssize_t stride = 1;
  215. for (i = 0; i < ndim; i++) {
  216. if (unlikely(stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1)) {
  217. PyErr_SetString(PyExc_ValueError,
  218. "Buffer not fortran contiguous.");
  219. goto fail;
  220. }
  221. stride = stride * buf->shape[i];
  222. }
  223. } else if (c_or_f_flag & __Pyx_IS_C_CONTIG) {
  224. Py_ssize_t stride = 1;
  225. for (i = ndim - 1; i >- 1; i--) {
  226. if (unlikely(stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1)) {
  227. PyErr_SetString(PyExc_ValueError,
  228. "Buffer not C contiguous.");
  229. goto fail;
  230. }
  231. stride = stride * buf->shape[i];
  232. }
  233. }
  234. return 1;
  235. fail:
  236. return 0;
  237. }
  238. static int __Pyx_ValidateAndInit_memviewslice(
  239. int *axes_specs,
  240. int c_or_f_flag,
  241. int buf_flags,
  242. int ndim,
  243. __Pyx_TypeInfo *dtype,
  244. __Pyx_BufFmt_StackElem stack[],
  245. __Pyx_memviewslice *memviewslice,
  246. PyObject *original_obj)
  247. {
  248. struct __pyx_memoryview_obj *memview, *new_memview;
  249. __Pyx_RefNannyDeclarations
  250. Py_buffer *buf;
  251. int i, spec = 0, retval = -1;
  252. __Pyx_BufFmt_Context ctx;
  253. int from_memoryview = __pyx_memoryview_check(original_obj);
  254. __Pyx_RefNannySetupContext("ValidateAndInit_memviewslice", 0);
  255. if (from_memoryview && __pyx_typeinfo_cmp(dtype, ((struct __pyx_memoryview_obj *)
  256. original_obj)->typeinfo)) {
  257. /* We have a matching dtype, skip format parsing */
  258. memview = (struct __pyx_memoryview_obj *) original_obj;
  259. new_memview = NULL;
  260. } else {
  261. memview = (struct __pyx_memoryview_obj *) __pyx_memoryview_new(
  262. original_obj, buf_flags, 0, dtype);
  263. new_memview = memview;
  264. if (unlikely(!memview))
  265. goto fail;
  266. }
  267. buf = &memview->view;
  268. if (unlikely(buf->ndim != ndim)) {
  269. PyErr_Format(PyExc_ValueError,
  270. "Buffer has wrong number of dimensions (expected %d, got %d)",
  271. ndim, buf->ndim);
  272. goto fail;
  273. }
  274. if (new_memview) {
  275. __Pyx_BufFmt_Init(&ctx, stack, dtype);
  276. if (unlikely(!__Pyx_BufFmt_CheckString(&ctx, buf->format))) goto fail;
  277. }
  278. if (unlikely((unsigned) buf->itemsize != dtype->size)) {
  279. PyErr_Format(PyExc_ValueError,
  280. "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "u byte%s) "
  281. "does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "u byte%s)",
  282. buf->itemsize,
  283. (buf->itemsize > 1) ? "s" : "",
  284. dtype->name,
  285. dtype->size,
  286. (dtype->size > 1) ? "s" : "");
  287. goto fail;
  288. }
  289. /* Check axes */
  290. if (buf->len > 0) {
  291. // 0-sized arrays do not undergo these checks since their strides are
  292. // irrelevant and they are always both C- and F-contiguous.
  293. for (i = 0; i < ndim; i++) {
  294. spec = axes_specs[i];
  295. if (unlikely(!__pyx_check_strides(buf, i, ndim, spec)))
  296. goto fail;
  297. if (unlikely(!__pyx_check_suboffsets(buf, i, ndim, spec)))
  298. goto fail;
  299. }
  300. /* Check contiguity */
  301. if (unlikely(buf->strides && !__pyx_verify_contig(buf, ndim, c_or_f_flag)))
  302. goto fail;
  303. }
  304. /* Initialize */
  305. if (unlikely(__Pyx_init_memviewslice(memview, ndim, memviewslice,
  306. new_memview != NULL) == -1)) {
  307. goto fail;
  308. }
  309. retval = 0;
  310. goto no_fail;
  311. fail:
  312. Py_XDECREF(new_memview);
  313. retval = -1;
  314. no_fail:
  315. __Pyx_RefNannyFinishContext();
  316. return retval;
  317. }
  318. ////////// MemviewSliceInit //////////
  319. static int
  320. __Pyx_init_memviewslice(struct __pyx_memoryview_obj *memview,
  321. int ndim,
  322. {{memviewslice_name}} *memviewslice,
  323. int memview_is_new_reference)
  324. {
  325. __Pyx_RefNannyDeclarations
  326. int i, retval=-1;
  327. Py_buffer *buf = &memview->view;
  328. __Pyx_RefNannySetupContext("init_memviewslice", 0);
  329. if (unlikely(memviewslice->memview || memviewslice->data)) {
  330. PyErr_SetString(PyExc_ValueError,
  331. "memviewslice is already initialized!");
  332. goto fail;
  333. }
  334. if (buf->strides) {
  335. for (i = 0; i < ndim; i++) {
  336. memviewslice->strides[i] = buf->strides[i];
  337. }
  338. } else {
  339. Py_ssize_t stride = buf->itemsize;
  340. for (i = ndim - 1; i >= 0; i--) {
  341. memviewslice->strides[i] = stride;
  342. stride *= buf->shape[i];
  343. }
  344. }
  345. for (i = 0; i < ndim; i++) {
  346. memviewslice->shape[i] = buf->shape[i];
  347. if (buf->suboffsets) {
  348. memviewslice->suboffsets[i] = buf->suboffsets[i];
  349. } else {
  350. memviewslice->suboffsets[i] = -1;
  351. }
  352. }
  353. memviewslice->memview = memview;
  354. memviewslice->data = (char *)buf->buf;
  355. if (__pyx_add_acquisition_count(memview) == 0 && !memview_is_new_reference) {
  356. Py_INCREF(memview);
  357. }
  358. retval = 0;
  359. goto no_fail;
  360. fail:
  361. /* Don't decref, the memoryview may be borrowed. Let the caller do the cleanup */
  362. /* __Pyx_XDECREF(memviewslice->memview); */
  363. memviewslice->memview = 0;
  364. memviewslice->data = 0;
  365. retval = -1;
  366. no_fail:
  367. __Pyx_RefNannyFinishContext();
  368. return retval;
  369. }
  370. #ifndef Py_NO_RETURN
  371. // available since Py3.3
  372. #define Py_NO_RETURN
  373. #endif
  374. static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN {
  375. va_list vargs;
  376. char msg[200];
  377. #if PY_VERSION_HEX >= 0x030A0000 || defined(HAVE_STDARG_PROTOTYPES)
  378. va_start(vargs, fmt);
  379. #else
  380. va_start(vargs);
  381. #endif
  382. vsnprintf(msg, 200, fmt, vargs);
  383. va_end(vargs);
  384. Py_FatalError(msg);
  385. }
  386. static CYTHON_INLINE int
  387. __pyx_add_acquisition_count_locked(__pyx_atomic_int *acquisition_count,
  388. PyThread_type_lock lock)
  389. {
  390. int result;
  391. PyThread_acquire_lock(lock, 1);
  392. result = (*acquisition_count)++;
  393. PyThread_release_lock(lock);
  394. return result;
  395. }
  396. static CYTHON_INLINE int
  397. __pyx_sub_acquisition_count_locked(__pyx_atomic_int *acquisition_count,
  398. PyThread_type_lock lock)
  399. {
  400. int result;
  401. PyThread_acquire_lock(lock, 1);
  402. result = (*acquisition_count)--;
  403. PyThread_release_lock(lock);
  404. return result;
  405. }
  406. static CYTHON_INLINE void
  407. __Pyx_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno)
  408. {
  409. int first_time;
  410. struct {{memview_struct_name}} *memview = memslice->memview;
  411. if (unlikely(!memview || (PyObject *) memview == Py_None))
  412. return; /* allow uninitialized memoryview assignment */
  413. if (unlikely(__pyx_get_slice_count(memview) < 0))
  414. __pyx_fatalerror("Acquisition count is %d (line %d)",
  415. __pyx_get_slice_count(memview), lineno);
  416. first_time = __pyx_add_acquisition_count(memview) == 0;
  417. if (unlikely(first_time)) {
  418. if (have_gil) {
  419. Py_INCREF((PyObject *) memview);
  420. } else {
  421. PyGILState_STATE _gilstate = PyGILState_Ensure();
  422. Py_INCREF((PyObject *) memview);
  423. PyGILState_Release(_gilstate);
  424. }
  425. }
  426. }
  427. static CYTHON_INLINE void __Pyx_XDEC_MEMVIEW({{memviewslice_name}} *memslice,
  428. int have_gil, int lineno) {
  429. int last_time;
  430. struct {{memview_struct_name}} *memview = memslice->memview;
  431. if (unlikely(!memview || (PyObject *) memview == Py_None)) {
  432. // we do not ref-count None
  433. memslice->memview = NULL;
  434. return;
  435. }
  436. if (unlikely(__pyx_get_slice_count(memview) <= 0))
  437. __pyx_fatalerror("Acquisition count is %d (line %d)",
  438. __pyx_get_slice_count(memview), lineno);
  439. last_time = __pyx_sub_acquisition_count(memview) == 1;
  440. memslice->data = NULL;
  441. if (unlikely(last_time)) {
  442. if (have_gil) {
  443. Py_CLEAR(memslice->memview);
  444. } else {
  445. PyGILState_STATE _gilstate = PyGILState_Ensure();
  446. Py_CLEAR(memslice->memview);
  447. PyGILState_Release(_gilstate);
  448. }
  449. } else {
  450. memslice->memview = NULL;
  451. }
  452. }
  453. ////////// MemviewSliceCopyTemplate.proto //////////
  454. static {{memviewslice_name}}
  455. __pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs,
  456. const char *mode, int ndim,
  457. size_t sizeof_dtype, int contig_flag,
  458. int dtype_is_object);
  459. ////////// MemviewSliceCopyTemplate //////////
  460. static {{memviewslice_name}}
  461. __pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs,
  462. const char *mode, int ndim,
  463. size_t sizeof_dtype, int contig_flag,
  464. int dtype_is_object)
  465. {
  466. __Pyx_RefNannyDeclarations
  467. int i;
  468. __Pyx_memviewslice new_mvs = {{memslice_init}};
  469. struct __pyx_memoryview_obj *from_memview = from_mvs->memview;
  470. Py_buffer *buf = &from_memview->view;
  471. PyObject *shape_tuple = NULL;
  472. PyObject *temp_int = NULL;
  473. struct __pyx_array_obj *array_obj = NULL;
  474. struct __pyx_memoryview_obj *memview_obj = NULL;
  475. __Pyx_RefNannySetupContext("__pyx_memoryview_copy_new_contig", 0);
  476. for (i = 0; i < ndim; i++) {
  477. if (unlikely(from_mvs->suboffsets[i] >= 0)) {
  478. PyErr_Format(PyExc_ValueError, "Cannot copy memoryview slice with "
  479. "indirect dimensions (axis %d)", i);
  480. goto fail;
  481. }
  482. }
  483. shape_tuple = PyTuple_New(ndim);
  484. if (unlikely(!shape_tuple)) {
  485. goto fail;
  486. }
  487. __Pyx_GOTREF(shape_tuple);
  488. for(i = 0; i < ndim; i++) {
  489. temp_int = PyInt_FromSsize_t(from_mvs->shape[i]);
  490. if(unlikely(!temp_int)) {
  491. goto fail;
  492. } else {
  493. PyTuple_SET_ITEM(shape_tuple, i, temp_int);
  494. temp_int = NULL;
  495. }
  496. }
  497. array_obj = __pyx_array_new(shape_tuple, sizeof_dtype, buf->format, (char *) mode, NULL);
  498. if (unlikely(!array_obj)) {
  499. goto fail;
  500. }
  501. __Pyx_GOTREF(array_obj);
  502. memview_obj = (struct __pyx_memoryview_obj *) __pyx_memoryview_new(
  503. (PyObject *) array_obj, contig_flag,
  504. dtype_is_object,
  505. from_mvs->memview->typeinfo);
  506. if (unlikely(!memview_obj))
  507. goto fail;
  508. /* initialize new_mvs */
  509. if (unlikely(__Pyx_init_memviewslice(memview_obj, ndim, &new_mvs, 1) < 0))
  510. goto fail;
  511. if (unlikely(__pyx_memoryview_copy_contents(*from_mvs, new_mvs, ndim, ndim,
  512. dtype_is_object) < 0))
  513. goto fail;
  514. goto no_fail;
  515. fail:
  516. __Pyx_XDECREF(new_mvs.memview);
  517. new_mvs.memview = NULL;
  518. new_mvs.data = NULL;
  519. no_fail:
  520. __Pyx_XDECREF(shape_tuple);
  521. __Pyx_XDECREF(temp_int);
  522. __Pyx_XDECREF(array_obj);
  523. __Pyx_RefNannyFinishContext();
  524. return new_mvs;
  525. }
  526. ////////// CopyContentsUtility.proto /////////
  527. #define {{func_cname}}(slice) \
  528. __pyx_memoryview_copy_new_contig(&slice, "{{mode}}", {{ndim}}, \
  529. sizeof({{dtype_decl}}), {{contig_flag}}, \
  530. {{dtype_is_object}})
  531. ////////// OverlappingSlices.proto //////////
  532. static int __pyx_slices_overlap({{memviewslice_name}} *slice1,
  533. {{memviewslice_name}} *slice2,
  534. int ndim, size_t itemsize);
  535. ////////// OverlappingSlices //////////
  536. /* Based on numpy's core/src/multiarray/array_assign.c */
  537. /* Gets a half-open range [start, end) which contains the array data */
  538. static void
  539. __pyx_get_array_memory_extents({{memviewslice_name}} *slice,
  540. void **out_start, void **out_end,
  541. int ndim, size_t itemsize)
  542. {
  543. char *start, *end;
  544. int i;
  545. start = end = slice->data;
  546. for (i = 0; i < ndim; i++) {
  547. Py_ssize_t stride = slice->strides[i];
  548. Py_ssize_t extent = slice->shape[i];
  549. if (extent == 0) {
  550. *out_start = *out_end = start;
  551. return;
  552. } else {
  553. if (stride > 0)
  554. end += stride * (extent - 1);
  555. else
  556. start += stride * (extent - 1);
  557. }
  558. }
  559. /* Return a half-open range */
  560. *out_start = start;
  561. *out_end = end + itemsize;
  562. }
  563. /* Returns 1 if the arrays have overlapping data, 0 otherwise */
  564. static int
  565. __pyx_slices_overlap({{memviewslice_name}} *slice1,
  566. {{memviewslice_name}} *slice2,
  567. int ndim, size_t itemsize)
  568. {
  569. void *start1, *end1, *start2, *end2;
  570. __pyx_get_array_memory_extents(slice1, &start1, &end1, ndim, itemsize);
  571. __pyx_get_array_memory_extents(slice2, &start2, &end2, ndim, itemsize);
  572. return (start1 < end2) && (start2 < end1);
  573. }
  574. ////////// MemviewSliceCheckContig.proto //////////
  575. #define __pyx_memviewslice_is_contig_{{contig_type}}{{ndim}}(slice) \
  576. __pyx_memviewslice_is_contig(slice, '{{contig_type}}', {{ndim}})
  577. ////////// MemviewSliceIsContig.proto //////////
  578. static int __pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim);/*proto*/
  579. ////////// MemviewSliceIsContig //////////
  580. static int
  581. __pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim)
  582. {
  583. int i, index, step, start;
  584. Py_ssize_t itemsize = mvs.memview->view.itemsize;
  585. if (order == 'F') {
  586. step = 1;
  587. start = 0;
  588. } else {
  589. step = -1;
  590. start = ndim - 1;
  591. }
  592. for (i = 0; i < ndim; i++) {
  593. index = start + step * i;
  594. if (mvs.suboffsets[index] >= 0 || mvs.strides[index] != itemsize)
  595. return 0;
  596. itemsize *= mvs.shape[index];
  597. }
  598. return 1;
  599. }
  600. /////////////// MemviewSliceIndex ///////////////
  601. static CYTHON_INLINE char *
  602. __pyx_memviewslice_index_full(const char *bufp, Py_ssize_t idx,
  603. Py_ssize_t stride, Py_ssize_t suboffset)
  604. {
  605. bufp = bufp + idx * stride;
  606. if (suboffset >= 0) {
  607. bufp = *((char **) bufp) + suboffset;
  608. }
  609. return (char *) bufp;
  610. }
  611. /////////////// MemviewDtypeToObject.proto ///////////////
  612. {{if to_py_function}}
  613. static CYTHON_INLINE PyObject *{{get_function}}(const char *itemp); /* proto */
  614. {{endif}}
  615. {{if from_py_function}}
  616. static CYTHON_INLINE int {{set_function}}(const char *itemp, PyObject *obj); /* proto */
  617. {{endif}}
  618. /////////////// MemviewDtypeToObject ///////////////
  619. {{#__pyx_memview_<dtype_name>_to_object}}
  620. /* Convert a dtype to or from a Python object */
  621. {{if to_py_function}}
  622. static CYTHON_INLINE PyObject *{{get_function}}(const char *itemp) {
  623. return (PyObject *) {{to_py_function}}(*({{dtype}} *) itemp);
  624. }
  625. {{endif}}
  626. {{if from_py_function}}
  627. static CYTHON_INLINE int {{set_function}}(const char *itemp, PyObject *obj) {
  628. {{dtype}} value = {{from_py_function}}(obj);
  629. if ({{error_condition}})
  630. return 0;
  631. *({{dtype}} *) itemp = value;
  632. return 1;
  633. }
  634. {{endif}}
  635. /////////////// MemviewObjectToObject.proto ///////////////
  636. /* Function callbacks (for memoryview object) for dtype object */
  637. static PyObject *{{get_function}}(const char *itemp); /* proto */
  638. static int {{set_function}}(const char *itemp, PyObject *obj); /* proto */
  639. /////////////// MemviewObjectToObject ///////////////
  640. static PyObject *{{get_function}}(const char *itemp) {
  641. PyObject *result = *(PyObject **) itemp;
  642. Py_INCREF(result);
  643. return result;
  644. }
  645. static int {{set_function}}(const char *itemp, PyObject *obj) {
  646. Py_INCREF(obj);
  647. Py_DECREF(*(PyObject **) itemp);
  648. *(PyObject **) itemp = obj;
  649. return 1;
  650. }
  651. /////////// ToughSlice //////////
  652. /* Dimension is indexed with 'start:stop:step' */
  653. if (unlikely(__pyx_memoryview_slice_memviewslice(
  654. &{{dst}},
  655. {{src}}.shape[{{dim}}], {{src}}.strides[{{dim}}], {{src}}.suboffsets[{{dim}}],
  656. {{dim}},
  657. {{new_ndim}},
  658. &{{get_suboffset_dim()}},
  659. {{start}},
  660. {{stop}},
  661. {{step}},
  662. {{int(have_start)}},
  663. {{int(have_stop)}},
  664. {{int(have_step)}},
  665. 1) < 0))
  666. {
  667. {{error_goto}}
  668. }
  669. ////////// SimpleSlice //////////
  670. /* Dimension is indexed with ':' only */
  671. {{dst}}.shape[{{new_ndim}}] = {{src}}.shape[{{dim}}];
  672. {{dst}}.strides[{{new_ndim}}] = {{src}}.strides[{{dim}}];
  673. {{if access == 'direct'}}
  674. {{dst}}.suboffsets[{{new_ndim}}] = -1;
  675. {{else}}
  676. {{dst}}.suboffsets[{{new_ndim}}] = {{src}}.suboffsets[{{dim}}];
  677. if ({{src}}.suboffsets[{{dim}}] >= 0)
  678. {{get_suboffset_dim()}} = {{new_ndim}};
  679. {{endif}}
  680. ////////// SliceIndex //////////
  681. // Dimension is indexed with an integer, we could use the ToughSlice
  682. // approach, but this is faster
  683. {
  684. Py_ssize_t __pyx_tmp_idx = {{idx}};
  685. {{if wraparound or boundscheck}}
  686. Py_ssize_t __pyx_tmp_shape = {{src}}.shape[{{dim}}];
  687. {{endif}}
  688. Py_ssize_t __pyx_tmp_stride = {{src}}.strides[{{dim}}];
  689. {{if wraparound}}
  690. if (__pyx_tmp_idx < 0)
  691. __pyx_tmp_idx += __pyx_tmp_shape;
  692. {{endif}}
  693. {{if boundscheck}}
  694. if (unlikely(!__Pyx_is_valid_index(__pyx_tmp_idx, __pyx_tmp_shape))) {
  695. {{if not have_gil}}
  696. #ifdef WITH_THREAD
  697. PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();
  698. #endif
  699. {{endif}}
  700. PyErr_SetString(PyExc_IndexError,
  701. "Index out of bounds (axis {{dim}})");
  702. {{if not have_gil}}
  703. #ifdef WITH_THREAD
  704. PyGILState_Release(__pyx_gilstate_save);
  705. #endif
  706. {{endif}}
  707. {{error_goto}}
  708. }
  709. {{endif}}
  710. {{if all_dimensions_direct}}
  711. {{dst}}.data += __pyx_tmp_idx * __pyx_tmp_stride;
  712. {{else}}
  713. if ({{get_suboffset_dim()}} < 0) {
  714. {{dst}}.data += __pyx_tmp_idx * __pyx_tmp_stride;
  715. /* This dimension is the first dimension, or is preceded by */
  716. /* direct or indirect dimensions that are indexed away. */
  717. /* Hence suboffset_dim must be less than zero, and we can have */
  718. /* our data pointer refer to another block by dereferencing. */
  719. /* slice.data -> B -> C becomes slice.data -> C */
  720. {{if indirect}}
  721. {
  722. Py_ssize_t __pyx_tmp_suboffset = {{src}}.suboffsets[{{dim}}];
  723. {{if generic}}
  724. if (__pyx_tmp_suboffset >= 0)
  725. {{endif}}
  726. {{dst}}.data = *((char **) {{dst}}.data) + __pyx_tmp_suboffset;
  727. }
  728. {{endif}}
  729. } else {
  730. {{dst}}.suboffsets[{{get_suboffset_dim()}}] += __pyx_tmp_idx * __pyx_tmp_stride;
  731. /* Note: dimension can not be indirect, the compiler will have */
  732. /* issued an error */
  733. }
  734. {{endif}}
  735. }
  736. ////////// FillStrided1DScalar.proto //////////
  737. static void
  738. __pyx_fill_slice_{{dtype_name}}({{type_decl}} *p, Py_ssize_t extent, Py_ssize_t stride,
  739. size_t itemsize, void *itemp);
  740. ////////// FillStrided1DScalar //////////
  741. /* Fill a slice with a scalar value. The dimension is direct and strided or contiguous */
  742. /* This can be used as a callback for the memoryview object to efficienty assign a scalar */
  743. /* Currently unused */
  744. static void
  745. __pyx_fill_slice_{{dtype_name}}({{type_decl}} *p, Py_ssize_t extent, Py_ssize_t stride,
  746. size_t itemsize, void *itemp)
  747. {
  748. Py_ssize_t i;
  749. {{type_decl}} item = *(({{type_decl}} *) itemp);
  750. {{type_decl}} *endp;
  751. stride /= sizeof({{type_decl}});
  752. endp = p + stride * extent;
  753. while (p < endp) {
  754. *p = item;
  755. p += stride;
  756. }
  757. }