MemoryView_C.c 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945
  1. ////////// MemviewSliceStruct.proto //////////
  2. //@proto_block: utility_code_proto_before_types
  3. /* memoryview slice struct */
  4. struct {{memview_struct_name}};
  5. typedef struct {
  6. struct {{memview_struct_name}} *memview;
  7. char *data;
  8. Py_ssize_t shape[{{max_dims}}];
  9. Py_ssize_t strides[{{max_dims}}];
  10. Py_ssize_t suboffsets[{{max_dims}}];
  11. } {{memviewslice_name}};
  12. // used for "len(memviewslice)"
  13. #define __Pyx_MemoryView_Len(m) (m.shape[0])
  14. /////////// Atomics.proto /////////////
  15. //@proto_block: utility_code_proto_before_types
  16. #include <pythread.h>
  17. #ifndef CYTHON_ATOMICS
  18. #define CYTHON_ATOMICS 1
  19. #endif
  20. #define __pyx_atomic_int_type int
  21. // todo: Portland pgcc, maybe OS X's OSAtomicIncrement32,
  22. // libatomic + autotools-like distutils support? Such a pain...
  23. #if CYTHON_ATOMICS && __GNUC__ >= 4 && (__GNUC_MINOR__ > 1 || \
  24. (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL >= 2)) && \
  25. !defined(__i386__)
  26. /* gcc >= 4.1.2 */
  27. #define __pyx_atomic_incr_aligned(value, lock) __sync_fetch_and_add(value, 1)
  28. #define __pyx_atomic_decr_aligned(value, lock) __sync_fetch_and_sub(value, 1)
  29. #ifdef __PYX_DEBUG_ATOMICS
  30. #warning "Using GNU atomics"
  31. #endif
  32. #elif CYTHON_ATOMICS && defined(_MSC_VER) && 0
  33. /* msvc */
  34. #include <Windows.h>
  35. #undef __pyx_atomic_int_type
  36. #define __pyx_atomic_int_type LONG
  37. #define __pyx_atomic_incr_aligned(value, lock) InterlockedIncrement(value)
  38. #define __pyx_atomic_decr_aligned(value, lock) InterlockedDecrement(value)
  39. #ifdef __PYX_DEBUG_ATOMICS
  40. #pragma message ("Using MSVC atomics")
  41. #endif
  42. #elif CYTHON_ATOMICS && (defined(__ICC) || defined(__INTEL_COMPILER)) && 0
  43. #define __pyx_atomic_incr_aligned(value, lock) _InterlockedIncrement(value)
  44. #define __pyx_atomic_decr_aligned(value, lock) _InterlockedDecrement(value)
  45. #ifdef __PYX_DEBUG_ATOMICS
  46. #warning "Using Intel atomics"
  47. #endif
  48. #else
  49. #undef CYTHON_ATOMICS
  50. #define CYTHON_ATOMICS 0
  51. #ifdef __PYX_DEBUG_ATOMICS
  52. #warning "Not using atomics"
  53. #endif
  54. #endif
  55. typedef volatile __pyx_atomic_int_type __pyx_atomic_int;
  56. #if CYTHON_ATOMICS
  57. #define __pyx_add_acquisition_count(memview) \
  58. __pyx_atomic_incr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock)
  59. #define __pyx_sub_acquisition_count(memview) \
  60. __pyx_atomic_decr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock)
  61. #else
  62. #define __pyx_add_acquisition_count(memview) \
  63. __pyx_add_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock)
  64. #define __pyx_sub_acquisition_count(memview) \
  65. __pyx_sub_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock)
  66. #endif
  67. /////////////// ObjectToMemviewSlice.proto ///////////////
  68. static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *, int writable_flag);
  69. ////////// MemviewSliceInit.proto //////////
  70. #define __Pyx_BUF_MAX_NDIMS %(BUF_MAX_NDIMS)d
  71. #define __Pyx_MEMVIEW_DIRECT 1
  72. #define __Pyx_MEMVIEW_PTR 2
  73. #define __Pyx_MEMVIEW_FULL 4
  74. #define __Pyx_MEMVIEW_CONTIG 8
  75. #define __Pyx_MEMVIEW_STRIDED 16
  76. #define __Pyx_MEMVIEW_FOLLOW 32
  77. #define __Pyx_IS_C_CONTIG 1
  78. #define __Pyx_IS_F_CONTIG 2
  79. static int __Pyx_init_memviewslice(
  80. struct __pyx_memoryview_obj *memview,
  81. int ndim,
  82. __Pyx_memviewslice *memviewslice,
  83. int memview_is_new_reference);
  84. static CYTHON_INLINE int __pyx_add_acquisition_count_locked(
  85. __pyx_atomic_int *acquisition_count, PyThread_type_lock lock);
  86. static CYTHON_INLINE int __pyx_sub_acquisition_count_locked(
  87. __pyx_atomic_int *acquisition_count, PyThread_type_lock lock);
  88. #define __pyx_get_slice_count_pointer(memview) (memview->acquisition_count_aligned_p)
  89. #define __pyx_get_slice_count(memview) (*__pyx_get_slice_count_pointer(memview))
  90. #define __PYX_INC_MEMVIEW(slice, have_gil) __Pyx_INC_MEMVIEW(slice, have_gil, __LINE__)
  91. #define __PYX_XDEC_MEMVIEW(slice, have_gil) __Pyx_XDEC_MEMVIEW(slice, have_gil, __LINE__)
  92. static CYTHON_INLINE void __Pyx_INC_MEMVIEW({{memviewslice_name}} *, int, int);
  93. static CYTHON_INLINE void __Pyx_XDEC_MEMVIEW({{memviewslice_name}} *, int, int);
  94. /////////////// MemviewSliceIndex.proto ///////////////
  95. static CYTHON_INLINE char *__pyx_memviewslice_index_full(
  96. const char *bufp, Py_ssize_t idx, Py_ssize_t stride, Py_ssize_t suboffset);
  97. /////////////// ObjectToMemviewSlice ///////////////
  98. //@requires: MemviewSliceValidateAndInit
  99. static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *obj, int writable_flag) {
  100. {{memviewslice_name}} result = {{memslice_init}};
  101. __Pyx_BufFmt_StackElem stack[{{struct_nesting_depth}}];
  102. int axes_specs[] = { {{axes_specs}} };
  103. int retcode;
  104. if (obj == Py_None) {
  105. /* We don't bother to refcount None */
  106. result.memview = (struct __pyx_memoryview_obj *) Py_None;
  107. return result;
  108. }
  109. retcode = __Pyx_ValidateAndInit_memviewslice(axes_specs, {{c_or_f_flag}},
  110. {{buf_flag}} | writable_flag, {{ndim}},
  111. &{{dtype_typeinfo}}, stack,
  112. &result, obj);
  113. if (unlikely(retcode == -1))
  114. goto __pyx_fail;
  115. return result;
  116. __pyx_fail:
  117. result.memview = NULL;
  118. result.data = NULL;
  119. return result;
  120. }
  121. /////////////// MemviewSliceValidateAndInit.proto ///////////////
  122. static int __Pyx_ValidateAndInit_memviewslice(
  123. int *axes_specs,
  124. int c_or_f_flag,
  125. int buf_flags,
  126. int ndim,
  127. __Pyx_TypeInfo *dtype,
  128. __Pyx_BufFmt_StackElem stack[],
  129. __Pyx_memviewslice *memviewslice,
  130. PyObject *original_obj);
  131. /////////////// MemviewSliceValidateAndInit ///////////////
  132. //@requires: Buffer.c::TypeInfoCompare
  133. //@requires: Buffer.c::BufferFormatStructs
  134. //@requires: Buffer.c::BufferFormatCheck
  135. static int
  136. __pyx_check_strides(Py_buffer *buf, int dim, int ndim, int spec)
  137. {
  138. if (buf->shape[dim] <= 1)
  139. return 1;
  140. if (buf->strides) {
  141. if (spec & __Pyx_MEMVIEW_CONTIG) {
  142. if (spec & (__Pyx_MEMVIEW_PTR|__Pyx_MEMVIEW_FULL)) {
  143. if (unlikely(buf->strides[dim] != sizeof(void *))) {
  144. PyErr_Format(PyExc_ValueError,
  145. "Buffer is not indirectly contiguous "
  146. "in dimension %d.", dim);
  147. goto fail;
  148. }
  149. } else if (unlikely(buf->strides[dim] != buf->itemsize)) {
  150. PyErr_SetString(PyExc_ValueError,
  151. "Buffer and memoryview are not contiguous "
  152. "in the same dimension.");
  153. goto fail;
  154. }
  155. }
  156. if (spec & __Pyx_MEMVIEW_FOLLOW) {
  157. Py_ssize_t stride = buf->strides[dim];
  158. if (stride < 0)
  159. stride = -stride;
  160. if (unlikely(stride < buf->itemsize)) {
  161. PyErr_SetString(PyExc_ValueError,
  162. "Buffer and memoryview are not contiguous "
  163. "in the same dimension.");
  164. goto fail;
  165. }
  166. }
  167. } else {
  168. if (unlikely(spec & __Pyx_MEMVIEW_CONTIG && dim != ndim - 1)) {
  169. PyErr_Format(PyExc_ValueError,
  170. "C-contiguous buffer is not contiguous in "
  171. "dimension %d", dim);
  172. goto fail;
  173. } else if (unlikely(spec & (__Pyx_MEMVIEW_PTR))) {
  174. PyErr_Format(PyExc_ValueError,
  175. "C-contiguous buffer is not indirect in "
  176. "dimension %d", dim);
  177. goto fail;
  178. } else if (unlikely(buf->suboffsets)) {
  179. PyErr_SetString(PyExc_ValueError,
  180. "Buffer exposes suboffsets but no strides");
  181. goto fail;
  182. }
  183. }
  184. return 1;
  185. fail:
  186. return 0;
  187. }
  188. static int
  189. __pyx_check_suboffsets(Py_buffer *buf, int dim, CYTHON_UNUSED int ndim, int spec)
  190. {
  191. // Todo: without PyBUF_INDIRECT we may not have suboffset information, i.e., the
  192. // ptr may not be set to NULL but may be uninitialized?
  193. if (spec & __Pyx_MEMVIEW_DIRECT) {
  194. if (unlikely(buf->suboffsets && buf->suboffsets[dim] >= 0)) {
  195. PyErr_Format(PyExc_ValueError,
  196. "Buffer not compatible with direct access "
  197. "in dimension %d.", dim);
  198. goto fail;
  199. }
  200. }
  201. if (spec & __Pyx_MEMVIEW_PTR) {
  202. if (unlikely(!buf->suboffsets || (buf->suboffsets[dim] < 0))) {
  203. PyErr_Format(PyExc_ValueError,
  204. "Buffer is not indirectly accessible "
  205. "in dimension %d.", dim);
  206. goto fail;
  207. }
  208. }
  209. return 1;
  210. fail:
  211. return 0;
  212. }
  213. static int
  214. __pyx_verify_contig(Py_buffer *buf, int ndim, int c_or_f_flag)
  215. {
  216. int i;
  217. if (c_or_f_flag & __Pyx_IS_F_CONTIG) {
  218. Py_ssize_t stride = 1;
  219. for (i = 0; i < ndim; i++) {
  220. if (unlikely(stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1)) {
  221. PyErr_SetString(PyExc_ValueError,
  222. "Buffer not fortran contiguous.");
  223. goto fail;
  224. }
  225. stride = stride * buf->shape[i];
  226. }
  227. } else if (c_or_f_flag & __Pyx_IS_C_CONTIG) {
  228. Py_ssize_t stride = 1;
  229. for (i = ndim - 1; i >- 1; i--) {
  230. if (unlikely(stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1)) {
  231. PyErr_SetString(PyExc_ValueError,
  232. "Buffer not C contiguous.");
  233. goto fail;
  234. }
  235. stride = stride * buf->shape[i];
  236. }
  237. }
  238. return 1;
  239. fail:
  240. return 0;
  241. }
  242. static int __Pyx_ValidateAndInit_memviewslice(
  243. int *axes_specs,
  244. int c_or_f_flag,
  245. int buf_flags,
  246. int ndim,
  247. __Pyx_TypeInfo *dtype,
  248. __Pyx_BufFmt_StackElem stack[],
  249. __Pyx_memviewslice *memviewslice,
  250. PyObject *original_obj)
  251. {
  252. struct __pyx_memoryview_obj *memview, *new_memview;
  253. __Pyx_RefNannyDeclarations
  254. Py_buffer *buf;
  255. int i, spec = 0, retval = -1;
  256. __Pyx_BufFmt_Context ctx;
  257. int from_memoryview = __pyx_memoryview_check(original_obj);
  258. __Pyx_RefNannySetupContext("ValidateAndInit_memviewslice", 0);
  259. if (from_memoryview && __pyx_typeinfo_cmp(dtype, ((struct __pyx_memoryview_obj *)
  260. original_obj)->typeinfo)) {
  261. /* We have a matching dtype, skip format parsing */
  262. memview = (struct __pyx_memoryview_obj *) original_obj;
  263. new_memview = NULL;
  264. } else {
  265. memview = (struct __pyx_memoryview_obj *) __pyx_memoryview_new(
  266. original_obj, buf_flags, 0, dtype);
  267. new_memview = memview;
  268. if (unlikely(!memview))
  269. goto fail;
  270. }
  271. buf = &memview->view;
  272. if (unlikely(buf->ndim != ndim)) {
  273. PyErr_Format(PyExc_ValueError,
  274. "Buffer has wrong number of dimensions (expected %d, got %d)",
  275. ndim, buf->ndim);
  276. goto fail;
  277. }
  278. if (new_memview) {
  279. __Pyx_BufFmt_Init(&ctx, stack, dtype);
  280. if (unlikely(!__Pyx_BufFmt_CheckString(&ctx, buf->format))) goto fail;
  281. }
  282. if (unlikely((unsigned) buf->itemsize != dtype->size)) {
  283. PyErr_Format(PyExc_ValueError,
  284. "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "u byte%s) "
  285. "does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "u byte%s)",
  286. buf->itemsize,
  287. (buf->itemsize > 1) ? "s" : "",
  288. dtype->name,
  289. dtype->size,
  290. (dtype->size > 1) ? "s" : "");
  291. goto fail;
  292. }
  293. /* Check axes */
  294. if (buf->len > 0) {
  295. // 0-sized arrays do not undergo these checks since their strides are
  296. // irrelevant and they are always both C- and F-contiguous.
  297. for (i = 0; i < ndim; i++) {
  298. spec = axes_specs[i];
  299. if (unlikely(!__pyx_check_strides(buf, i, ndim, spec)))
  300. goto fail;
  301. if (unlikely(!__pyx_check_suboffsets(buf, i, ndim, spec)))
  302. goto fail;
  303. }
  304. /* Check contiguity */
  305. if (unlikely(buf->strides && !__pyx_verify_contig(buf, ndim, c_or_f_flag)))
  306. goto fail;
  307. }
  308. /* Initialize */
  309. if (unlikely(__Pyx_init_memviewslice(memview, ndim, memviewslice,
  310. new_memview != NULL) == -1)) {
  311. goto fail;
  312. }
  313. retval = 0;
  314. goto no_fail;
  315. fail:
  316. Py_XDECREF(new_memview);
  317. retval = -1;
  318. no_fail:
  319. __Pyx_RefNannyFinishContext();
  320. return retval;
  321. }
  322. ////////// MemviewSliceInit //////////
  323. static int
  324. __Pyx_init_memviewslice(struct __pyx_memoryview_obj *memview,
  325. int ndim,
  326. {{memviewslice_name}} *memviewslice,
  327. int memview_is_new_reference)
  328. {
  329. __Pyx_RefNannyDeclarations
  330. int i, retval=-1;
  331. Py_buffer *buf = &memview->view;
  332. __Pyx_RefNannySetupContext("init_memviewslice", 0);
  333. if (unlikely(memviewslice->memview || memviewslice->data)) {
  334. PyErr_SetString(PyExc_ValueError,
  335. "memviewslice is already initialized!");
  336. goto fail;
  337. }
  338. if (buf->strides) {
  339. for (i = 0; i < ndim; i++) {
  340. memviewslice->strides[i] = buf->strides[i];
  341. }
  342. } else {
  343. Py_ssize_t stride = buf->itemsize;
  344. for (i = ndim - 1; i >= 0; i--) {
  345. memviewslice->strides[i] = stride;
  346. stride *= buf->shape[i];
  347. }
  348. }
  349. for (i = 0; i < ndim; i++) {
  350. memviewslice->shape[i] = buf->shape[i];
  351. if (buf->suboffsets) {
  352. memviewslice->suboffsets[i] = buf->suboffsets[i];
  353. } else {
  354. memviewslice->suboffsets[i] = -1;
  355. }
  356. }
  357. memviewslice->memview = memview;
  358. memviewslice->data = (char *)buf->buf;
  359. if (__pyx_add_acquisition_count(memview) == 0 && !memview_is_new_reference) {
  360. Py_INCREF(memview);
  361. }
  362. retval = 0;
  363. goto no_fail;
  364. fail:
  365. /* Don't decref, the memoryview may be borrowed. Let the caller do the cleanup */
  366. /* __Pyx_XDECREF(memviewslice->memview); */
  367. memviewslice->memview = 0;
  368. memviewslice->data = 0;
  369. retval = -1;
  370. no_fail:
  371. __Pyx_RefNannyFinishContext();
  372. return retval;
  373. }
  374. #ifndef Py_NO_RETURN
  375. // available since Py3.3
  376. #define Py_NO_RETURN
  377. #endif
  378. static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN {
  379. va_list vargs;
  380. char msg[200];
  381. #ifdef HAVE_STDARG_PROTOTYPES
  382. va_start(vargs, fmt);
  383. #else
  384. va_start(vargs);
  385. #endif
  386. vsnprintf(msg, 200, fmt, vargs);
  387. va_end(vargs);
  388. Py_FatalError(msg);
  389. }
  390. static CYTHON_INLINE int
  391. __pyx_add_acquisition_count_locked(__pyx_atomic_int *acquisition_count,
  392. PyThread_type_lock lock)
  393. {
  394. int result;
  395. PyThread_acquire_lock(lock, 1);
  396. result = (*acquisition_count)++;
  397. PyThread_release_lock(lock);
  398. return result;
  399. }
  400. static CYTHON_INLINE int
  401. __pyx_sub_acquisition_count_locked(__pyx_atomic_int *acquisition_count,
  402. PyThread_type_lock lock)
  403. {
  404. int result;
  405. PyThread_acquire_lock(lock, 1);
  406. result = (*acquisition_count)--;
  407. PyThread_release_lock(lock);
  408. return result;
  409. }
  410. static CYTHON_INLINE void
  411. __Pyx_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno)
  412. {
  413. int first_time;
  414. struct {{memview_struct_name}} *memview = memslice->memview;
  415. if (unlikely(!memview || (PyObject *) memview == Py_None))
  416. return; /* allow uninitialized memoryview assignment */
  417. if (unlikely(__pyx_get_slice_count(memview) < 0))
  418. __pyx_fatalerror("Acquisition count is %d (line %d)",
  419. __pyx_get_slice_count(memview), lineno);
  420. first_time = __pyx_add_acquisition_count(memview) == 0;
  421. if (unlikely(first_time)) {
  422. if (have_gil) {
  423. Py_INCREF((PyObject *) memview);
  424. } else {
  425. PyGILState_STATE _gilstate = PyGILState_Ensure();
  426. Py_INCREF((PyObject *) memview);
  427. PyGILState_Release(_gilstate);
  428. }
  429. }
  430. }
  431. static CYTHON_INLINE void __Pyx_XDEC_MEMVIEW({{memviewslice_name}} *memslice,
  432. int have_gil, int lineno) {
  433. int last_time;
  434. struct {{memview_struct_name}} *memview = memslice->memview;
  435. if (unlikely(!memview || (PyObject *) memview == Py_None)) {
  436. // we do not ref-count None
  437. memslice->memview = NULL;
  438. return;
  439. }
  440. if (unlikely(__pyx_get_slice_count(memview) <= 0))
  441. __pyx_fatalerror("Acquisition count is %d (line %d)",
  442. __pyx_get_slice_count(memview), lineno);
  443. last_time = __pyx_sub_acquisition_count(memview) == 1;
  444. memslice->data = NULL;
  445. if (unlikely(last_time)) {
  446. if (have_gil) {
  447. Py_CLEAR(memslice->memview);
  448. } else {
  449. PyGILState_STATE _gilstate = PyGILState_Ensure();
  450. Py_CLEAR(memslice->memview);
  451. PyGILState_Release(_gilstate);
  452. }
  453. } else {
  454. memslice->memview = NULL;
  455. }
  456. }
  457. ////////// MemviewSliceCopyTemplate.proto //////////
  458. static {{memviewslice_name}}
  459. __pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs,
  460. const char *mode, int ndim,
  461. size_t sizeof_dtype, int contig_flag,
  462. int dtype_is_object);
  463. ////////// MemviewSliceCopyTemplate //////////
  464. static {{memviewslice_name}}
  465. __pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs,
  466. const char *mode, int ndim,
  467. size_t sizeof_dtype, int contig_flag,
  468. int dtype_is_object)
  469. {
  470. __Pyx_RefNannyDeclarations
  471. int i;
  472. __Pyx_memviewslice new_mvs = {{memslice_init}};
  473. struct __pyx_memoryview_obj *from_memview = from_mvs->memview;
  474. Py_buffer *buf = &from_memview->view;
  475. PyObject *shape_tuple = NULL;
  476. PyObject *temp_int = NULL;
  477. struct __pyx_array_obj *array_obj = NULL;
  478. struct __pyx_memoryview_obj *memview_obj = NULL;
  479. __Pyx_RefNannySetupContext("__pyx_memoryview_copy_new_contig", 0);
  480. for (i = 0; i < ndim; i++) {
  481. if (unlikely(from_mvs->suboffsets[i] >= 0)) {
  482. PyErr_Format(PyExc_ValueError, "Cannot copy memoryview slice with "
  483. "indirect dimensions (axis %d)", i);
  484. goto fail;
  485. }
  486. }
  487. shape_tuple = PyTuple_New(ndim);
  488. if (unlikely(!shape_tuple)) {
  489. goto fail;
  490. }
  491. __Pyx_GOTREF(shape_tuple);
  492. for(i = 0; i < ndim; i++) {
  493. temp_int = PyInt_FromSsize_t(from_mvs->shape[i]);
  494. if(unlikely(!temp_int)) {
  495. goto fail;
  496. } else {
  497. PyTuple_SET_ITEM(shape_tuple, i, temp_int);
  498. temp_int = NULL;
  499. }
  500. }
  501. array_obj = __pyx_array_new(shape_tuple, sizeof_dtype, buf->format, (char *) mode, NULL);
  502. if (unlikely(!array_obj)) {
  503. goto fail;
  504. }
  505. __Pyx_GOTREF(array_obj);
  506. memview_obj = (struct __pyx_memoryview_obj *) __pyx_memoryview_new(
  507. (PyObject *) array_obj, contig_flag,
  508. dtype_is_object,
  509. from_mvs->memview->typeinfo);
  510. if (unlikely(!memview_obj))
  511. goto fail;
  512. /* initialize new_mvs */
  513. if (unlikely(__Pyx_init_memviewslice(memview_obj, ndim, &new_mvs, 1) < 0))
  514. goto fail;
  515. if (unlikely(__pyx_memoryview_copy_contents(*from_mvs, new_mvs, ndim, ndim,
  516. dtype_is_object) < 0))
  517. goto fail;
  518. goto no_fail;
  519. fail:
  520. __Pyx_XDECREF(new_mvs.memview);
  521. new_mvs.memview = NULL;
  522. new_mvs.data = NULL;
  523. no_fail:
  524. __Pyx_XDECREF(shape_tuple);
  525. __Pyx_XDECREF(temp_int);
  526. __Pyx_XDECREF(array_obj);
  527. __Pyx_RefNannyFinishContext();
  528. return new_mvs;
  529. }
  530. ////////// CopyContentsUtility.proto /////////
  531. #define {{func_cname}}(slice) \
  532. __pyx_memoryview_copy_new_contig(&slice, "{{mode}}", {{ndim}}, \
  533. sizeof({{dtype_decl}}), {{contig_flag}}, \
  534. {{dtype_is_object}})
  535. ////////// OverlappingSlices.proto //////////
  536. static int __pyx_slices_overlap({{memviewslice_name}} *slice1,
  537. {{memviewslice_name}} *slice2,
  538. int ndim, size_t itemsize);
  539. ////////// OverlappingSlices //////////
  540. /* Based on numpy's core/src/multiarray/array_assign.c */
  541. /* Gets a half-open range [start, end) which contains the array data */
  542. static void
  543. __pyx_get_array_memory_extents({{memviewslice_name}} *slice,
  544. void **out_start, void **out_end,
  545. int ndim, size_t itemsize)
  546. {
  547. char *start, *end;
  548. int i;
  549. start = end = slice->data;
  550. for (i = 0; i < ndim; i++) {
  551. Py_ssize_t stride = slice->strides[i];
  552. Py_ssize_t extent = slice->shape[i];
  553. if (extent == 0) {
  554. *out_start = *out_end = start;
  555. return;
  556. } else {
  557. if (stride > 0)
  558. end += stride * (extent - 1);
  559. else
  560. start += stride * (extent - 1);
  561. }
  562. }
  563. /* Return a half-open range */
  564. *out_start = start;
  565. *out_end = end + itemsize;
  566. }
  567. /* Returns 1 if the arrays have overlapping data, 0 otherwise */
  568. static int
  569. __pyx_slices_overlap({{memviewslice_name}} *slice1,
  570. {{memviewslice_name}} *slice2,
  571. int ndim, size_t itemsize)
  572. {
  573. void *start1, *end1, *start2, *end2;
  574. __pyx_get_array_memory_extents(slice1, &start1, &end1, ndim, itemsize);
  575. __pyx_get_array_memory_extents(slice2, &start2, &end2, ndim, itemsize);
  576. return (start1 < end2) && (start2 < end1);
  577. }
  578. ////////// MemviewSliceCheckContig.proto //////////
  579. #define __pyx_memviewslice_is_contig_{{contig_type}}{{ndim}}(slice) \
  580. __pyx_memviewslice_is_contig(slice, '{{contig_type}}', {{ndim}})
  581. ////////// MemviewSliceIsContig.proto //////////
  582. static int __pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim);/*proto*/
  583. ////////// MemviewSliceIsContig //////////
  584. static int
  585. __pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim)
  586. {
  587. int i, index, step, start;
  588. Py_ssize_t itemsize = mvs.memview->view.itemsize;
  589. if (order == 'F') {
  590. step = 1;
  591. start = 0;
  592. } else {
  593. step = -1;
  594. start = ndim - 1;
  595. }
  596. for (i = 0; i < ndim; i++) {
  597. index = start + step * i;
  598. if (mvs.suboffsets[index] >= 0 || mvs.strides[index] != itemsize)
  599. return 0;
  600. itemsize *= mvs.shape[index];
  601. }
  602. return 1;
  603. }
  604. /////////////// MemviewSliceIndex ///////////////
  605. static CYTHON_INLINE char *
  606. __pyx_memviewslice_index_full(const char *bufp, Py_ssize_t idx,
  607. Py_ssize_t stride, Py_ssize_t suboffset)
  608. {
  609. bufp = bufp + idx * stride;
  610. if (suboffset >= 0) {
  611. bufp = *((char **) bufp) + suboffset;
  612. }
  613. return (char *) bufp;
  614. }
  615. /////////////// MemviewDtypeToObject.proto ///////////////
  616. {{if to_py_function}}
  617. static CYTHON_INLINE PyObject *{{get_function}}(const char *itemp); /* proto */
  618. {{endif}}
  619. {{if from_py_function}}
  620. static CYTHON_INLINE int {{set_function}}(const char *itemp, PyObject *obj); /* proto */
  621. {{endif}}
  622. /////////////// MemviewDtypeToObject ///////////////
  623. {{#__pyx_memview_<dtype_name>_to_object}}
  624. /* Convert a dtype to or from a Python object */
  625. {{if to_py_function}}
  626. static CYTHON_INLINE PyObject *{{get_function}}(const char *itemp) {
  627. return (PyObject *) {{to_py_function}}(*({{dtype}} *) itemp);
  628. }
  629. {{endif}}
  630. {{if from_py_function}}
  631. static CYTHON_INLINE int {{set_function}}(const char *itemp, PyObject *obj) {
  632. {{dtype}} value = {{from_py_function}}(obj);
  633. if ({{error_condition}})
  634. return 0;
  635. *({{dtype}} *) itemp = value;
  636. return 1;
  637. }
  638. {{endif}}
  639. /////////////// MemviewObjectToObject.proto ///////////////
  640. /* Function callbacks (for memoryview object) for dtype object */
  641. static PyObject *{{get_function}}(const char *itemp); /* proto */
  642. static int {{set_function}}(const char *itemp, PyObject *obj); /* proto */
  643. /////////////// MemviewObjectToObject ///////////////
  644. static PyObject *{{get_function}}(const char *itemp) {
  645. PyObject *result = *(PyObject **) itemp;
  646. Py_INCREF(result);
  647. return result;
  648. }
  649. static int {{set_function}}(const char *itemp, PyObject *obj) {
  650. Py_INCREF(obj);
  651. Py_DECREF(*(PyObject **) itemp);
  652. *(PyObject **) itemp = obj;
  653. return 1;
  654. }
  655. /////////// ToughSlice //////////
  656. /* Dimension is indexed with 'start:stop:step' */
  657. if (unlikely(__pyx_memoryview_slice_memviewslice(
  658. &{{dst}},
  659. {{src}}.shape[{{dim}}], {{src}}.strides[{{dim}}], {{src}}.suboffsets[{{dim}}],
  660. {{dim}},
  661. {{new_ndim}},
  662. &{{get_suboffset_dim()}},
  663. {{start}},
  664. {{stop}},
  665. {{step}},
  666. {{int(have_start)}},
  667. {{int(have_stop)}},
  668. {{int(have_step)}},
  669. 1) < 0))
  670. {
  671. {{error_goto}}
  672. }
  673. ////////// SimpleSlice //////////
  674. /* Dimension is indexed with ':' only */
  675. {{dst}}.shape[{{new_ndim}}] = {{src}}.shape[{{dim}}];
  676. {{dst}}.strides[{{new_ndim}}] = {{src}}.strides[{{dim}}];
  677. {{if access == 'direct'}}
  678. {{dst}}.suboffsets[{{new_ndim}}] = -1;
  679. {{else}}
  680. {{dst}}.suboffsets[{{new_ndim}}] = {{src}}.suboffsets[{{dim}}];
  681. if ({{src}}.suboffsets[{{dim}}] >= 0)
  682. {{get_suboffset_dim()}} = {{new_ndim}};
  683. {{endif}}
  684. ////////// SliceIndex //////////
  685. // Dimension is indexed with an integer, we could use the ToughSlice
  686. // approach, but this is faster
  687. {
  688. Py_ssize_t __pyx_tmp_idx = {{idx}};
  689. {{if wraparound or boundscheck}}
  690. Py_ssize_t __pyx_tmp_shape = {{src}}.shape[{{dim}}];
  691. {{endif}}
  692. Py_ssize_t __pyx_tmp_stride = {{src}}.strides[{{dim}}];
  693. {{if wraparound}}
  694. if (__pyx_tmp_idx < 0)
  695. __pyx_tmp_idx += __pyx_tmp_shape;
  696. {{endif}}
  697. {{if boundscheck}}
  698. if (unlikely(!__Pyx_is_valid_index(__pyx_tmp_idx, __pyx_tmp_shape))) {
  699. {{if not have_gil}}
  700. #ifdef WITH_THREAD
  701. PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();
  702. #endif
  703. {{endif}}
  704. PyErr_SetString(PyExc_IndexError,
  705. "Index out of bounds (axis {{dim}})");
  706. {{if not have_gil}}
  707. #ifdef WITH_THREAD
  708. PyGILState_Release(__pyx_gilstate_save);
  709. #endif
  710. {{endif}}
  711. {{error_goto}}
  712. }
  713. {{endif}}
  714. {{if all_dimensions_direct}}
  715. {{dst}}.data += __pyx_tmp_idx * __pyx_tmp_stride;
  716. {{else}}
  717. if ({{get_suboffset_dim()}} < 0) {
  718. {{dst}}.data += __pyx_tmp_idx * __pyx_tmp_stride;
  719. /* This dimension is the first dimension, or is preceded by */
  720. /* direct or indirect dimensions that are indexed away. */
  721. /* Hence suboffset_dim must be less than zero, and we can have */
  722. /* our data pointer refer to another block by dereferencing. */
  723. /* slice.data -> B -> C becomes slice.data -> C */
  724. {{if indirect}}
  725. {
  726. Py_ssize_t __pyx_tmp_suboffset = {{src}}.suboffsets[{{dim}}];
  727. {{if generic}}
  728. if (__pyx_tmp_suboffset >= 0)
  729. {{endif}}
  730. {{dst}}.data = *((char **) {{dst}}.data) + __pyx_tmp_suboffset;
  731. }
  732. {{endif}}
  733. } else {
  734. {{dst}}.suboffsets[{{get_suboffset_dim()}}] += __pyx_tmp_idx * __pyx_tmp_stride;
  735. /* Note: dimension can not be indirect, the compiler will have */
  736. /* issued an error */
  737. }
  738. {{endif}}
  739. }
  740. ////////// FillStrided1DScalar.proto //////////
  741. static void
  742. __pyx_fill_slice_{{dtype_name}}({{type_decl}} *p, Py_ssize_t extent, Py_ssize_t stride,
  743. size_t itemsize, void *itemp);
  744. ////////// FillStrided1DScalar //////////
  745. /* Fill a slice with a scalar value. The dimension is direct and strided or contiguous */
  746. /* This can be used as a callback for the memoryview object to efficienty assign a scalar */
  747. /* Currently unused */
  748. static void
  749. __pyx_fill_slice_{{dtype_name}}({{type_decl}} *p, Py_ssize_t extent, Py_ssize_t stride,
  750. size_t itemsize, void *itemp)
  751. {
  752. Py_ssize_t i;
  753. {{type_decl}} item = *(({{type_decl}} *) itemp);
  754. {{type_decl}} *endp;
  755. stride /= sizeof({{type_decl}});
  756. endp = p + stride * extent;
  757. while (p < endp) {
  758. *p = item;
  759. p += stride;
  760. }
  761. }