array.pxi 75 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465
  1. # Licensed to the Apache Software Foundation (ASF) under one
  2. # or more contributor license agreements. See the NOTICE file
  3. # distributed with this work for additional information
  4. # regarding copyright ownership. The ASF licenses this file
  5. # to you under the Apache License, Version 2.0 (the
  6. # "License"); you may not use this file except in compliance
  7. # with the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing,
  12. # software distributed under the License is distributed on an
  13. # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  14. # KIND, either express or implied. See the License for the
  15. # specific language governing permissions and limitations
  16. # under the License.
  17. import os
  18. import warnings
  19. cdef _sequence_to_array(object sequence, object mask, object size,
  20. DataType type, CMemoryPool* pool, c_bool from_pandas):
  21. cdef:
  22. int64_t c_size
  23. PyConversionOptions options
  24. shared_ptr[CChunkedArray] chunked
  25. if type is not None:
  26. options.type = type.sp_type
  27. if size is not None:
  28. options.size = size
  29. options.from_pandas = from_pandas
  30. options.ignore_timezone = os.environ.get('PYARROW_IGNORE_TIMEZONE', False)
  31. with nogil:
  32. chunked = GetResultValue(
  33. ConvertPySequence(sequence, mask, options, pool)
  34. )
  35. if chunked.get().num_chunks() == 1:
  36. return pyarrow_wrap_array(chunked.get().chunk(0))
  37. else:
  38. return pyarrow_wrap_chunked_array(chunked)
  39. cdef inline _is_array_like(obj):
  40. if isinstance(obj, np.ndarray):
  41. return True
  42. return pandas_api._have_pandas_internal() and pandas_api.is_array_like(obj)
  43. def _ndarray_to_arrow_type(object values, DataType type):
  44. return pyarrow_wrap_data_type(_ndarray_to_type(values, type))
  45. cdef shared_ptr[CDataType] _ndarray_to_type(object values,
  46. DataType type) except *:
  47. cdef shared_ptr[CDataType] c_type
  48. dtype = values.dtype
  49. if type is None and dtype != object:
  50. with nogil:
  51. check_status(NumPyDtypeToArrow(dtype, &c_type))
  52. if type is not None:
  53. c_type = type.sp_type
  54. return c_type
  55. cdef _ndarray_to_array(object values, object mask, DataType type,
  56. c_bool from_pandas, c_bool safe, CMemoryPool* pool):
  57. cdef:
  58. shared_ptr[CChunkedArray] chunked_out
  59. shared_ptr[CDataType] c_type = _ndarray_to_type(values, type)
  60. CCastOptions cast_options = CCastOptions(safe)
  61. with nogil:
  62. check_status(NdarrayToArrow(pool, values, mask, from_pandas,
  63. c_type, cast_options, &chunked_out))
  64. if chunked_out.get().num_chunks() > 1:
  65. return pyarrow_wrap_chunked_array(chunked_out)
  66. else:
  67. return pyarrow_wrap_array(chunked_out.get().chunk(0))
  68. cdef _codes_to_indices(object codes, object mask, DataType type,
  69. MemoryPool memory_pool):
  70. """
  71. Convert the codes of a pandas Categorical to indices for a pyarrow
  72. DictionaryArray, taking into account missing values + mask
  73. """
  74. if mask is None:
  75. mask = codes == -1
  76. else:
  77. mask = mask | (codes == -1)
  78. return array(codes, mask=mask, type=type, memory_pool=memory_pool)
  79. def _handle_arrow_array_protocol(obj, type, mask, size):
  80. if mask is not None or size is not None:
  81. raise ValueError(
  82. "Cannot specify a mask or a size when passing an object that is "
  83. "converted with the __arrow_array__ protocol.")
  84. res = obj.__arrow_array__(type=type)
  85. if not isinstance(res, (Array, ChunkedArray)):
  86. raise TypeError("The object's __arrow_array__ method does not "
  87. "return a pyarrow Array or ChunkedArray.")
  88. return res
  89. def array(object obj, type=None, mask=None, size=None, from_pandas=None,
  90. bint safe=True, MemoryPool memory_pool=None):
  91. """
  92. Create pyarrow.Array instance from a Python object.
  93. Parameters
  94. ----------
  95. obj : sequence, iterable, ndarray or Series
  96. If both type and size are specified may be a single use iterable. If
  97. not strongly-typed, Arrow type will be inferred for resulting array.
  98. type : pyarrow.DataType
  99. Explicit type to attempt to coerce to, otherwise will be inferred from
  100. the data.
  101. mask : array[bool], optional
  102. Indicate which values are null (True) or not null (False).
  103. size : int64, optional
  104. Size of the elements. If the input is larger than size bail at this
  105. length. For iterators, if size is larger than the input iterator this
  106. will be treated as a "max size", but will involve an initial allocation
  107. of size followed by a resize to the actual size (so if you know the
  108. exact size specifying it correctly will give you better performance).
  109. from_pandas : bool, default None
  110. Use pandas's semantics for inferring nulls from values in
  111. ndarray-like data. If passed, the mask tasks precedence, but
  112. if a value is unmasked (not-null), but still null according to
  113. pandas semantics, then it is null. Defaults to False if not
  114. passed explicitly by user, or True if a pandas object is
  115. passed in.
  116. safe : bool, default True
  117. Check for overflows or other unsafe conversions.
  118. memory_pool : pyarrow.MemoryPool, optional
  119. If not passed, will allocate memory from the currently-set default
  120. memory pool.
  121. Returns
  122. -------
  123. array : pyarrow.Array or pyarrow.ChunkedArray
  124. A ChunkedArray instead of an Array is returned if:
  125. - the object data overflowed binary storage.
  126. - the object's ``__arrow_array__`` protocol method returned a chunked
  127. array.
  128. Notes
  129. -----
  130. Localized timestamps will currently be returned as UTC (pandas's native
  131. representation). Timezone-naive data will be implicitly interpreted as
  132. UTC.
  133. Converting to dictionary array will promote to a wider integer type for
  134. indices if the number of distinct values cannot be represented, even if
  135. the index type was explicitly set. This means that if there are more than
  136. 127 values the returned dictionary array's index type will be at least
  137. pa.int16() even if pa.int8() was passed to the function. Note that an
  138. explicit index type will not be demoted even if it is wider than required.
  139. Examples
  140. --------
  141. >>> import pandas as pd
  142. >>> import pyarrow as pa
  143. >>> pa.array(pd.Series([1, 2]))
  144. <pyarrow.lib.Int64Array object at 0x7f674e4c0e10>
  145. [
  146. 1,
  147. 2
  148. ]
  149. >>> pa.array(["a", "b", "a"], type=pa.dictionary(pa.int8(), pa.string()))
  150. <pyarrow.lib.DictionaryArray object at 0x7feb288d9040>
  151. -- dictionary:
  152. [
  153. "a",
  154. "b"
  155. ]
  156. -- indices:
  157. [
  158. 0,
  159. 1,
  160. 0
  161. ]
  162. >>> import numpy as np
  163. >>> pa.array(pd.Series([1, 2]), mask=np.array([0, 1], dtype=bool))
  164. <pyarrow.lib.Int64Array object at 0x7f9019e11208>
  165. [
  166. 1,
  167. null
  168. ]
  169. >>> arr = pa.array(range(1024), type=pa.dictionary(pa.int8(), pa.int64()))
  170. >>> arr.type.index_type
  171. DataType(int16)
  172. """
  173. cdef:
  174. CMemoryPool* pool = maybe_unbox_memory_pool(memory_pool)
  175. bint is_pandas_object = False
  176. bint c_from_pandas
  177. type = ensure_type(type, allow_none=True)
  178. if from_pandas is None:
  179. c_from_pandas = False
  180. else:
  181. c_from_pandas = from_pandas
  182. if hasattr(obj, '__arrow_array__'):
  183. return _handle_arrow_array_protocol(obj, type, mask, size)
  184. elif _is_array_like(obj):
  185. if mask is not None:
  186. # out argument unused
  187. mask = get_values(mask, &is_pandas_object)
  188. values = get_values(obj, &is_pandas_object)
  189. if is_pandas_object and from_pandas is None:
  190. c_from_pandas = True
  191. if isinstance(values, np.ma.MaskedArray):
  192. if mask is not None:
  193. raise ValueError("Cannot pass a numpy masked array and "
  194. "specify a mask at the same time")
  195. else:
  196. # don't use shrunken masks
  197. mask = None if values.mask is np.ma.nomask else values.mask
  198. values = values.data
  199. if mask is not None:
  200. if mask.dtype != np.bool_:
  201. raise TypeError("Mask must be boolean dtype")
  202. if mask.ndim != 1:
  203. raise ValueError("Mask must be 1D array")
  204. if len(values) != len(mask):
  205. raise ValueError(
  206. "Mask is a different length from sequence being converted")
  207. if hasattr(values, '__arrow_array__'):
  208. return _handle_arrow_array_protocol(values, type, mask, size)
  209. elif pandas_api.is_categorical(values):
  210. if type is not None:
  211. if type.id != Type_DICTIONARY:
  212. return _ndarray_to_array(
  213. np.asarray(values), mask, type, c_from_pandas, safe,
  214. pool)
  215. index_type = type.index_type
  216. value_type = type.value_type
  217. if values.ordered != type.ordered:
  218. warnings.warn(
  219. "The 'ordered' flag of the passed categorical values "
  220. "does not match the 'ordered' of the specified type. "
  221. "Using the flag of the values, but in the future this "
  222. "mismatch will raise a ValueError.",
  223. FutureWarning, stacklevel=2)
  224. else:
  225. index_type = None
  226. value_type = None
  227. indices = _codes_to_indices(
  228. values.codes, mask, index_type, memory_pool)
  229. try:
  230. dictionary = array(
  231. values.categories.values, type=value_type,
  232. memory_pool=memory_pool)
  233. except TypeError:
  234. # TODO when removing the deprecation warning, this whole
  235. # try/except can be removed (to bubble the TypeError of
  236. # the first array(..) call)
  237. if value_type is not None:
  238. warnings.warn(
  239. "The dtype of the 'categories' of the passed "
  240. "categorical values ({0}) does not match the "
  241. "specified type ({1}). For now ignoring the specified "
  242. "type, but in the future this mismatch will raise a "
  243. "TypeError".format(
  244. values.categories.dtype, value_type),
  245. FutureWarning, stacklevel=2)
  246. dictionary = array(
  247. values.categories.values, memory_pool=memory_pool)
  248. else:
  249. raise
  250. return DictionaryArray.from_arrays(
  251. indices, dictionary, ordered=values.ordered, safe=safe)
  252. else:
  253. if pandas_api.have_pandas:
  254. values, type = pandas_api.compat.get_datetimetz_type(
  255. values, obj.dtype, type)
  256. return _ndarray_to_array(values, mask, type, c_from_pandas, safe,
  257. pool)
  258. else:
  259. # ConvertPySequence does strict conversion if type is explicitly passed
  260. return _sequence_to_array(obj, mask, size, type, pool, c_from_pandas)
  261. def asarray(values, type=None):
  262. """
  263. Convert to pyarrow.Array, inferring type if not provided.
  264. Parameters
  265. ----------
  266. values : array-like
  267. This can be a sequence, numpy.ndarray, pyarrow.Array or
  268. pyarrow.ChunkedArray. If a ChunkedArray is passed, the output will be
  269. a ChunkedArray, otherwise the output will be a Array.
  270. type : string or DataType
  271. Explicitly construct the array with this type. Attempt to cast if
  272. indicated type is different.
  273. Returns
  274. -------
  275. arr : Array or ChunkedArray
  276. """
  277. if isinstance(values, (Array, ChunkedArray)):
  278. if type is not None and not values.type.equals(type):
  279. values = values.cast(type)
  280. return values
  281. else:
  282. return array(values, type=type)
  283. def nulls(size, type=None, MemoryPool memory_pool=None):
  284. """
  285. Create a strongly-typed Array instance with all elements null.
  286. Parameters
  287. ----------
  288. size : int
  289. Array length.
  290. type : pyarrow.DataType, default None
  291. Explicit type for the array. By default use NullType.
  292. memory_pool : MemoryPool, default None
  293. Arrow MemoryPool to use for allocations. Uses the default memory
  294. pool is not passed.
  295. Returns
  296. -------
  297. arr : Array
  298. Examples
  299. --------
  300. >>> import pyarrow as pa
  301. >>> pa.nulls(10)
  302. <pyarrow.lib.NullArray object at 0x7ffaf04c2e50>
  303. 10 nulls
  304. >>> pa.nulls(3, pa.uint32())
  305. <pyarrow.lib.UInt32Array object at 0x7ffaf04c2e50>
  306. [
  307. null,
  308. null,
  309. null
  310. ]
  311. """
  312. cdef:
  313. CMemoryPool* pool = maybe_unbox_memory_pool(memory_pool)
  314. int64_t length = size
  315. shared_ptr[CDataType] ty
  316. shared_ptr[CArray] arr
  317. type = ensure_type(type, allow_none=True)
  318. if type is None:
  319. type = null()
  320. ty = pyarrow_unwrap_data_type(type)
  321. with nogil:
  322. arr = GetResultValue(MakeArrayOfNull(ty, length, pool))
  323. return pyarrow_wrap_array(arr)
  324. def repeat(value, size, MemoryPool memory_pool=None):
  325. """
  326. Create an Array instance whose slots are the given scalar.
  327. Parameters
  328. ----------
  329. value: Scalar-like object
  330. Either a pyarrow.Scalar or any python object coercible to a Scalar.
  331. size : int
  332. Number of times to repeat the scalar in the output Array.
  333. memory_pool : MemoryPool, default None
  334. Arrow MemoryPool to use for allocations. Uses the default memory
  335. pool is not passed.
  336. Returns
  337. -------
  338. arr : Array
  339. Examples
  340. --------
  341. >>> import pyarrow as pa
  342. >>> pa.repeat(10, 3)
  343. <pyarrow.lib.Int64Array object at 0x7ffac03a2750>
  344. [
  345. 10,
  346. 10,
  347. 10
  348. ]
  349. >>> pa.repeat([1, 2], 2)
  350. <pyarrow.lib.ListArray object at 0x7ffaf04c2e50>
  351. [
  352. [
  353. 1,
  354. 2
  355. ],
  356. [
  357. 1,
  358. 2
  359. ]
  360. ]
  361. >>> pa.repeat("string", 3)
  362. <pyarrow.lib.StringArray object at 0x7ffac03a2750>
  363. [
  364. "string",
  365. "string",
  366. "string"
  367. ]
  368. >>> pa.repeat(pa.scalar({'a': 1, 'b': [1, 2]}), 2)
  369. <pyarrow.lib.StructArray object at 0x7ffac03a2750>
  370. -- is_valid: all not null
  371. -- child 0 type: int64
  372. [
  373. 1,
  374. 1
  375. ]
  376. -- child 1 type: list<item: int64>
  377. [
  378. [
  379. 1,
  380. 2
  381. ],
  382. [
  383. 1,
  384. 2
  385. ]
  386. ]
  387. """
  388. cdef:
  389. CMemoryPool* pool = maybe_unbox_memory_pool(memory_pool)
  390. int64_t length = size
  391. shared_ptr[CArray] c_array
  392. shared_ptr[CScalar] c_scalar
  393. if not isinstance(value, Scalar):
  394. value = scalar(value, memory_pool=memory_pool)
  395. c_scalar = (<Scalar> value).unwrap()
  396. with nogil:
  397. c_array = GetResultValue(
  398. MakeArrayFromScalar(deref(c_scalar), length, pool)
  399. )
  400. return pyarrow_wrap_array(c_array)
  401. def infer_type(values, mask=None, from_pandas=False):
  402. """
  403. Attempt to infer Arrow data type that can hold the passed Python
  404. sequence type in an Array object
  405. Parameters
  406. ----------
  407. values : array-like
  408. Sequence to infer type from.
  409. mask : ndarray (bool type), optional
  410. Optional exclusion mask where True marks null, False non-null.
  411. from_pandas : bool, default False
  412. Use pandas's NA/null sentinel values for type inference.
  413. Returns
  414. -------
  415. type : DataType
  416. """
  417. cdef:
  418. shared_ptr[CDataType] out
  419. c_bool use_pandas_sentinels = from_pandas
  420. if mask is not None and not isinstance(mask, np.ndarray):
  421. mask = np.array(mask, dtype=bool)
  422. out = GetResultValue(InferArrowType(values, mask, use_pandas_sentinels))
  423. return pyarrow_wrap_data_type(out)
  424. def _normalize_slice(object arrow_obj, slice key):
  425. """
  426. Slices with step not equal to 1 (or None) will produce a copy
  427. rather than a zero-copy view
  428. """
  429. cdef:
  430. Py_ssize_t start, stop, step
  431. Py_ssize_t n = len(arrow_obj)
  432. start = key.start or 0
  433. if start < 0:
  434. start += n
  435. if start < 0:
  436. start = 0
  437. elif start >= n:
  438. start = n
  439. stop = key.stop if key.stop is not None else n
  440. if stop < 0:
  441. stop += n
  442. if stop < 0:
  443. stop = 0
  444. elif stop >= n:
  445. stop = n
  446. step = key.step or 1
  447. if step != 1:
  448. if step < 0:
  449. # Negative steps require some special handling
  450. if key.start is None:
  451. start = n - 1
  452. if key.stop is None:
  453. stop = -1
  454. indices = np.arange(start, stop, step)
  455. return arrow_obj.take(indices)
  456. else:
  457. length = max(stop - start, 0)
  458. return arrow_obj.slice(start, length)
  459. cdef Py_ssize_t _normalize_index(Py_ssize_t index,
  460. Py_ssize_t length) except -1:
  461. if index < 0:
  462. index += length
  463. if index < 0:
  464. raise IndexError("index out of bounds")
  465. elif index >= length:
  466. raise IndexError("index out of bounds")
  467. return index
  468. cdef wrap_datum(const CDatum& datum):
  469. if datum.kind() == DatumType_ARRAY:
  470. return pyarrow_wrap_array(MakeArray(datum.array()))
  471. elif datum.kind() == DatumType_CHUNKED_ARRAY:
  472. return pyarrow_wrap_chunked_array(datum.chunked_array())
  473. elif datum.kind() == DatumType_RECORD_BATCH:
  474. return pyarrow_wrap_batch(datum.record_batch())
  475. elif datum.kind() == DatumType_TABLE:
  476. return pyarrow_wrap_table(datum.table())
  477. elif datum.kind() == DatumType_SCALAR:
  478. return pyarrow_wrap_scalar(datum.scalar())
  479. else:
  480. raise ValueError("Unable to wrap Datum in a Python object")
  481. cdef _append_array_buffers(const CArrayData* ad, list res):
  482. """
  483. Recursively append Buffer wrappers from *ad* and its children.
  484. """
  485. cdef size_t i, n
  486. assert ad != NULL
  487. n = ad.buffers.size()
  488. for i in range(n):
  489. buf = ad.buffers[i]
  490. res.append(pyarrow_wrap_buffer(buf)
  491. if buf.get() != NULL else None)
  492. n = ad.child_data.size()
  493. for i in range(n):
  494. _append_array_buffers(ad.child_data[i].get(), res)
  495. cdef _reduce_array_data(const CArrayData* ad):
  496. """
  497. Recursively dissect ArrayData to (pickable) tuples.
  498. """
  499. cdef size_t i, n
  500. assert ad != NULL
  501. n = ad.buffers.size()
  502. buffers = []
  503. for i in range(n):
  504. buf = ad.buffers[i]
  505. buffers.append(pyarrow_wrap_buffer(buf)
  506. if buf.get() != NULL else None)
  507. children = []
  508. n = ad.child_data.size()
  509. for i in range(n):
  510. children.append(_reduce_array_data(ad.child_data[i].get()))
  511. if ad.dictionary.get() != NULL:
  512. dictionary = _reduce_array_data(ad.dictionary.get())
  513. else:
  514. dictionary = None
  515. return pyarrow_wrap_data_type(ad.type), ad.length, ad.null_count, \
  516. ad.offset, buffers, children, dictionary
  517. cdef shared_ptr[CArrayData] _reconstruct_array_data(data):
  518. """
  519. Reconstruct CArrayData objects from the tuple structure generated
  520. by _reduce_array_data.
  521. """
  522. cdef:
  523. int64_t length, null_count, offset, i
  524. DataType dtype
  525. Buffer buf
  526. vector[shared_ptr[CBuffer]] c_buffers
  527. vector[shared_ptr[CArrayData]] c_children
  528. shared_ptr[CArrayData] c_dictionary
  529. dtype, length, null_count, offset, buffers, children, dictionary = data
  530. for i in range(len(buffers)):
  531. buf = buffers[i]
  532. if buf is None:
  533. c_buffers.push_back(shared_ptr[CBuffer]())
  534. else:
  535. c_buffers.push_back(buf.buffer)
  536. for i in range(len(children)):
  537. c_children.push_back(_reconstruct_array_data(children[i]))
  538. if dictionary is not None:
  539. c_dictionary = _reconstruct_array_data(dictionary)
  540. return CArrayData.MakeWithChildrenAndDictionary(
  541. dtype.sp_type,
  542. length,
  543. c_buffers,
  544. c_children,
  545. c_dictionary,
  546. null_count,
  547. offset)
  548. def _restore_array(data):
  549. """
  550. Reconstruct an Array from pickled ArrayData.
  551. """
  552. cdef shared_ptr[CArrayData] ad = _reconstruct_array_data(data)
  553. return pyarrow_wrap_array(MakeArray(ad))
  554. cdef class _PandasConvertible(_Weakrefable):
  555. def to_pandas(
  556. self,
  557. memory_pool=None,
  558. categories=None,
  559. bint strings_to_categorical=False,
  560. bint zero_copy_only=False,
  561. bint integer_object_nulls=False,
  562. bint date_as_object=True,
  563. bint timestamp_as_object=False,
  564. bint use_threads=True,
  565. bint deduplicate_objects=True,
  566. bint ignore_metadata=False,
  567. bint safe=True,
  568. bint split_blocks=False,
  569. bint self_destruct=False,
  570. types_mapper=None
  571. ):
  572. """
  573. Convert to a pandas-compatible NumPy array or DataFrame, as appropriate
  574. Parameters
  575. ----------
  576. memory_pool : MemoryPool, default None
  577. Arrow MemoryPool to use for allocations. Uses the default memory
  578. pool is not passed.
  579. strings_to_categorical : bool, default False
  580. Encode string (UTF8) and binary types to pandas.Categorical.
  581. categories: list, default empty
  582. List of fields that should be returned as pandas.Categorical. Only
  583. applies to table-like data structures.
  584. zero_copy_only : bool, default False
  585. Raise an ArrowException if this function call would require copying
  586. the underlying data.
  587. integer_object_nulls : bool, default False
  588. Cast integers with nulls to objects
  589. date_as_object : bool, default True
  590. Cast dates to objects. If False, convert to datetime64[ns] dtype.
  591. timestamp_as_object : bool, default False
  592. Cast non-nanosecond timestamps (np.datetime64) to objects. This is
  593. useful if you have timestamps that don't fit in the normal date
  594. range of nanosecond timestamps (1678 CE-2262 CE).
  595. If False, all timestamps are converted to datetime64[ns] dtype.
  596. use_threads: bool, default True
  597. Whether to parallelize the conversion using multiple threads.
  598. deduplicate_objects : bool, default False
  599. Do not create multiple copies Python objects when created, to save
  600. on memory use. Conversion will be slower.
  601. ignore_metadata : bool, default False
  602. If True, do not use the 'pandas' metadata to reconstruct the
  603. DataFrame index, if present
  604. safe : bool, default True
  605. For certain data types, a cast is needed in order to store the
  606. data in a pandas DataFrame or Series (e.g. timestamps are always
  607. stored as nanoseconds in pandas). This option controls whether it
  608. is a safe cast or not.
  609. split_blocks : bool, default False
  610. If True, generate one internal "block" for each column when
  611. creating a pandas.DataFrame from a RecordBatch or Table. While this
  612. can temporarily reduce memory note that various pandas operations
  613. can trigger "consolidation" which may balloon memory use.
  614. self_destruct : bool, default False
  615. EXPERIMENTAL: If True, attempt to deallocate the originating Arrow
  616. memory while converting the Arrow object to pandas. If you use the
  617. object after calling to_pandas with this option it will crash your
  618. program.
  619. Note that you may not see always memory usage improvements. For
  620. example, if multiple columns share an underlying allocation,
  621. memory can't be freed until all columns are converted.
  622. types_mapper : function, default None
  623. A function mapping a pyarrow DataType to a pandas ExtensionDtype.
  624. This can be used to override the default pandas type for conversion
  625. of built-in pyarrow types or in absence of pandas_metadata in the
  626. Table schema. The function receives a pyarrow DataType and is
  627. expected to return a pandas ExtensionDtype or ``None`` if the
  628. default conversion should be used for that type. If you have
  629. a dictionary mapping, you can pass ``dict.get`` as function.
  630. Returns
  631. -------
  632. pandas.Series or pandas.DataFrame depending on type of object
  633. """
  634. options = dict(
  635. pool=memory_pool,
  636. strings_to_categorical=strings_to_categorical,
  637. zero_copy_only=zero_copy_only,
  638. integer_object_nulls=integer_object_nulls,
  639. date_as_object=date_as_object,
  640. timestamp_as_object=timestamp_as_object,
  641. use_threads=use_threads,
  642. deduplicate_objects=deduplicate_objects,
  643. safe=safe,
  644. split_blocks=split_blocks,
  645. self_destruct=self_destruct
  646. )
  647. return self._to_pandas(options, categories=categories,
  648. ignore_metadata=ignore_metadata,
  649. types_mapper=types_mapper)
  650. cdef PandasOptions _convert_pandas_options(dict options):
  651. cdef PandasOptions result
  652. result.pool = maybe_unbox_memory_pool(options['pool'])
  653. result.strings_to_categorical = options['strings_to_categorical']
  654. result.zero_copy_only = options['zero_copy_only']
  655. result.integer_object_nulls = options['integer_object_nulls']
  656. result.date_as_object = options['date_as_object']
  657. result.timestamp_as_object = options['timestamp_as_object']
  658. result.use_threads = options['use_threads']
  659. result.deduplicate_objects = options['deduplicate_objects']
  660. result.safe_cast = options['safe']
  661. result.split_blocks = options['split_blocks']
  662. result.self_destruct = options['self_destruct']
  663. result.ignore_timezone = os.environ.get('PYARROW_IGNORE_TIMEZONE', False)
  664. return result
  665. cdef class Array(_PandasConvertible):
  666. """
  667. The base class for all Arrow arrays.
  668. """
  669. def __init__(self):
  670. raise TypeError("Do not call {}'s constructor directly, use one of "
  671. "the `pyarrow.Array.from_*` functions instead."
  672. .format(self.__class__.__name__))
  673. cdef void init(self, const shared_ptr[CArray]& sp_array) except *:
  674. self.sp_array = sp_array
  675. self.ap = sp_array.get()
  676. self.type = pyarrow_wrap_data_type(self.sp_array.get().type())
  677. def _debug_print(self):
  678. with nogil:
  679. check_status(DebugPrint(deref(self.ap), 0))
  680. def diff(self, Array other):
  681. """
  682. Compare contents of this array against another one.
  683. Return string containing the result of arrow::Diff comparing contents
  684. of this array against the other array.
  685. """
  686. cdef c_string result
  687. with nogil:
  688. result = self.ap.Diff(deref(other.ap))
  689. return frombytes(result, safe=True)
  690. def cast(self, object target_type, safe=True):
  691. """
  692. Cast array values to another data type
  693. See pyarrow.compute.cast for usage
  694. """
  695. return _pc().cast(self, target_type, safe=safe)
  696. def view(self, object target_type):
  697. """
  698. Return zero-copy "view" of array as another data type.
  699. The data types must have compatible columnar buffer layouts
  700. Parameters
  701. ----------
  702. target_type : DataType
  703. Type to construct view as.
  704. Returns
  705. -------
  706. view : Array
  707. """
  708. cdef DataType type = ensure_type(target_type)
  709. cdef shared_ptr[CArray] result
  710. with nogil:
  711. result = GetResultValue(self.ap.View(type.sp_type))
  712. return pyarrow_wrap_array(result)
  713. def sum(self, **kwargs):
  714. """
  715. Sum the values in a numerical array.
  716. """
  717. options = _pc().ScalarAggregateOptions(**kwargs)
  718. return _pc().call_function('sum', [self], options)
  719. def unique(self):
  720. """
  721. Compute distinct elements in array.
  722. """
  723. return _pc().call_function('unique', [self])
  724. def dictionary_encode(self, null_encoding='mask'):
  725. """
  726. Compute dictionary-encoded representation of array.
  727. """
  728. options = _pc().DictionaryEncodeOptions(null_encoding)
  729. return _pc().call_function('dictionary_encode', [self], options)
  730. def value_counts(self):
  731. """
  732. Compute counts of unique elements in array.
  733. Returns
  734. -------
  735. An array of <input type "Values", int64_t "Counts"> structs
  736. """
  737. return _pc().call_function('value_counts', [self])
  738. @staticmethod
  739. def from_pandas(obj, mask=None, type=None, bint safe=True,
  740. MemoryPool memory_pool=None):
  741. """
  742. Convert pandas.Series to an Arrow Array.
  743. This method uses Pandas semantics about what values indicate
  744. nulls. See pyarrow.array for more general conversion from arrays or
  745. sequences to Arrow arrays.
  746. Parameters
  747. ----------
  748. sequence : ndarray, pandas.Series, array-like
  749. mask : array (boolean), optional
  750. Indicate which values are null (True) or not null (False).
  751. type : pyarrow.DataType
  752. Explicit type to attempt to coerce to, otherwise will be inferred
  753. from the data.
  754. safe : bool, default True
  755. Check for overflows or other unsafe conversions.
  756. memory_pool : pyarrow.MemoryPool, optional
  757. If not passed, will allocate memory from the currently-set default
  758. memory pool.
  759. Notes
  760. -----
  761. Localized timestamps will currently be returned as UTC (pandas's native
  762. representation). Timezone-naive data will be implicitly interpreted as
  763. UTC.
  764. Returns
  765. -------
  766. array : pyarrow.Array or pyarrow.ChunkedArray
  767. ChunkedArray is returned if object data overflows binary buffer.
  768. """
  769. return array(obj, mask=mask, type=type, safe=safe, from_pandas=True,
  770. memory_pool=memory_pool)
  771. def __reduce__(self):
  772. return _restore_array, \
  773. (_reduce_array_data(self.sp_array.get().data().get()),)
  774. @staticmethod
  775. def from_buffers(DataType type, length, buffers, null_count=-1, offset=0,
  776. children=None):
  777. """
  778. Construct an Array from a sequence of buffers.
  779. The concrete type returned depends on the datatype.
  780. Parameters
  781. ----------
  782. type : DataType
  783. The value type of the array.
  784. length : int
  785. The number of values in the array.
  786. buffers : List[Buffer]
  787. The buffers backing this array.
  788. null_count : int, default -1
  789. The number of null entries in the array. Negative value means that
  790. the null count is not known.
  791. offset : int, default 0
  792. The array's logical offset (in values, not in bytes) from the
  793. start of each buffer.
  794. children : List[Array], default None
  795. Nested type children with length matching type.num_fields.
  796. Returns
  797. -------
  798. array : Array
  799. """
  800. cdef:
  801. Buffer buf
  802. Array child
  803. vector[shared_ptr[CBuffer]] c_buffers
  804. vector[shared_ptr[CArrayData]] c_child_data
  805. shared_ptr[CArrayData] array_data
  806. children = children or []
  807. if type.num_fields != len(children):
  808. raise ValueError("Type's expected number of children "
  809. "({0}) did not match the passed number "
  810. "({1}).".format(type.num_fields, len(children)))
  811. if type.num_buffers != len(buffers):
  812. raise ValueError("Type's expected number of buffers "
  813. "({0}) did not match the passed number "
  814. "({1}).".format(type.num_buffers, len(buffers)))
  815. for buf in buffers:
  816. # None will produce a null buffer pointer
  817. c_buffers.push_back(pyarrow_unwrap_buffer(buf))
  818. for child in children:
  819. c_child_data.push_back(child.ap.data())
  820. array_data = CArrayData.MakeWithChildren(type.sp_type, length,
  821. c_buffers, c_child_data,
  822. null_count, offset)
  823. cdef Array result = pyarrow_wrap_array(MakeArray(array_data))
  824. result.validate()
  825. return result
  826. @property
  827. def null_count(self):
  828. return self.sp_array.get().null_count()
  829. @property
  830. def nbytes(self):
  831. """
  832. Total number of bytes consumed by the elements of the array.
  833. """
  834. size = 0
  835. for buf in self.buffers():
  836. if buf is not None:
  837. size += buf.size
  838. return size
  839. def __sizeof__(self):
  840. return super(Array, self).__sizeof__() + self.nbytes
  841. def __iter__(self):
  842. for i in range(len(self)):
  843. yield self.getitem(i)
  844. def __repr__(self):
  845. type_format = object.__repr__(self)
  846. return '{0}\n{1}'.format(type_format, str(self))
  847. def to_string(self, int indent=0, int window=10):
  848. cdef:
  849. c_string result
  850. with nogil:
  851. check_status(
  852. PrettyPrint(
  853. deref(self.ap),
  854. PrettyPrintOptions(indent, window),
  855. &result
  856. )
  857. )
  858. return frombytes(result, safe=True)
  859. def format(self, **kwargs):
  860. import warnings
  861. warnings.warn('Array.format is deprecated, use Array.to_string')
  862. return self.to_string(**kwargs)
  863. def __str__(self):
  864. return self.to_string()
  865. def __eq__(self, other):
  866. try:
  867. return self.equals(other)
  868. except TypeError:
  869. # This also handles comparing with None
  870. # as Array.equals(None) raises a TypeError.
  871. return NotImplemented
  872. def equals(Array self, Array other not None):
  873. return self.ap.Equals(deref(other.ap))
  874. def __len__(self):
  875. return self.length()
  876. cdef int64_t length(self):
  877. if self.sp_array.get():
  878. return self.sp_array.get().length()
  879. else:
  880. return 0
  881. def is_null(self):
  882. """
  883. Return BooleanArray indicating the null values.
  884. """
  885. return _pc().is_null(self)
  886. def is_valid(self):
  887. """
  888. Return BooleanArray indicating the non-null values.
  889. """
  890. return _pc().is_valid(self)
  891. def fill_null(self, fill_value):
  892. """
  893. See pyarrow.compute.fill_null for usage.
  894. """
  895. return _pc().fill_null(self, fill_value)
  896. def __getitem__(self, key):
  897. """
  898. Slice or return value at given index
  899. Parameters
  900. ----------
  901. key : integer or slice
  902. Slices with step not equal to 1 (or None) will produce a copy
  903. rather than a zero-copy view
  904. Returns
  905. -------
  906. value : Scalar (index) or Array (slice)
  907. """
  908. if PySlice_Check(key):
  909. return _normalize_slice(self, key)
  910. return self.getitem(_normalize_index(key, self.length()))
  911. cdef getitem(self, int64_t i):
  912. return Scalar.wrap(GetResultValue(self.ap.GetScalar(i)))
  913. def slice(self, offset=0, length=None):
  914. """
  915. Compute zero-copy slice of this array.
  916. Parameters
  917. ----------
  918. offset : int, default 0
  919. Offset from start of array to slice.
  920. length : int, default None
  921. Length of slice (default is until end of Array starting from
  922. offset).
  923. Returns
  924. -------
  925. sliced : RecordBatch
  926. """
  927. cdef:
  928. shared_ptr[CArray] result
  929. if offset < 0:
  930. raise IndexError('Offset must be non-negative')
  931. offset = min(len(self), offset)
  932. if length is None:
  933. result = self.ap.Slice(offset)
  934. else:
  935. if length < 0:
  936. raise ValueError('Length must be non-negative')
  937. result = self.ap.Slice(offset, length)
  938. return pyarrow_wrap_array(result)
  939. def take(self, object indices):
  940. """
  941. Select values from an array. See pyarrow.compute.take for full usage.
  942. """
  943. return _pc().take(self, indices)
  944. def filter(self, Array mask, null_selection_behavior='drop'):
  945. """
  946. Select values from an array. See pyarrow.compute.filter for full usage.
  947. """
  948. return _pc().filter(self, mask, null_selection_behavior)
  949. def index(self, value, start=None, end=None, *, memory_pool=None):
  950. """
  951. Find the first index of a value.
  952. See pyarrow.compute.index for full usage.
  953. """
  954. return _pc().index(self, value, start, end, memory_pool=memory_pool)
  955. def _to_pandas(self, options, **kwargs):
  956. return _array_like_to_pandas(self, options)
  957. def __array__(self, dtype=None):
  958. values = self.to_numpy(zero_copy_only=False)
  959. if dtype is None:
  960. return values
  961. return values.astype(dtype)
  962. def to_numpy(self, zero_copy_only=True, writable=False):
  963. """
  964. Return a NumPy view or copy of this array (experimental).
  965. By default, tries to return a view of this array. This is only
  966. supported for primitive arrays with the same memory layout as NumPy
  967. (i.e. integers, floating point, ..) and without any nulls.
  968. Parameters
  969. ----------
  970. zero_copy_only : bool, default True
  971. If True, an exception will be raised if the conversion to a numpy
  972. array would require copying the underlying data (e.g. in presence
  973. of nulls, or for non-primitive types).
  974. writable : bool, default False
  975. For numpy arrays created with zero copy (view on the Arrow data),
  976. the resulting array is not writable (Arrow data is immutable).
  977. By setting this to True, a copy of the array is made to ensure
  978. it is writable.
  979. Returns
  980. -------
  981. array : numpy.ndarray
  982. """
  983. cdef:
  984. PyObject* out
  985. PandasOptions c_options
  986. object values
  987. if zero_copy_only and writable:
  988. raise ValueError(
  989. "Cannot return a writable array if asking for zero-copy")
  990. # If there are nulls and the array is a DictionaryArray
  991. # decoding the dictionary will make sure nulls are correctly handled.
  992. # Decoding a dictionary does imply a copy by the way,
  993. # so it can't be done if the user requested a zero_copy.
  994. c_options.decode_dictionaries = not zero_copy_only
  995. c_options.zero_copy_only = zero_copy_only
  996. with nogil:
  997. check_status(ConvertArrayToPandas(c_options, self.sp_array,
  998. self, &out))
  999. # wrap_array_output uses pandas to convert to Categorical, here
  1000. # always convert to numpy array without pandas dependency
  1001. array = PyObject_to_object(out)
  1002. if isinstance(array, dict):
  1003. array = np.take(array['dictionary'], array['indices'])
  1004. if writable and not array.flags.writeable:
  1005. # if the conversion already needed to a copy, writeable is True
  1006. array = array.copy()
  1007. return array
  1008. def to_pylist(self):
  1009. """
  1010. Convert to a list of native Python objects.
  1011. Returns
  1012. -------
  1013. lst : list
  1014. """
  1015. return [x.as_py() for x in self]
  1016. def tolist(self):
  1017. """
  1018. Alias of to_pylist for compatibility with NumPy.
  1019. """
  1020. return self.to_pylist()
  1021. def validate(self, *, full=False):
  1022. """
  1023. Perform validation checks. An exception is raised if validation fails.
  1024. By default only cheap validation checks are run. Pass `full=True`
  1025. for thorough validation checks (potentially O(n)).
  1026. Parameters
  1027. ----------
  1028. full: bool, default False
  1029. If True, run expensive checks, otherwise cheap checks only.
  1030. Raises
  1031. ------
  1032. ArrowInvalid
  1033. """
  1034. if full:
  1035. with nogil:
  1036. check_status(self.ap.ValidateFull())
  1037. else:
  1038. with nogil:
  1039. check_status(self.ap.Validate())
  1040. @property
  1041. def offset(self):
  1042. """
  1043. A relative position into another array's data.
  1044. The purpose is to enable zero-copy slicing. This value defaults to zero
  1045. but must be applied on all operations with the physical storage
  1046. buffers.
  1047. """
  1048. return self.sp_array.get().offset()
  1049. def buffers(self):
  1050. """
  1051. Return a list of Buffer objects pointing to this array's physical
  1052. storage.
  1053. To correctly interpret these buffers, you need to also apply the offset
  1054. multiplied with the size of the stored data type.
  1055. """
  1056. res = []
  1057. _append_array_buffers(self.sp_array.get().data().get(), res)
  1058. return res
  1059. def _export_to_c(self, uintptr_t out_ptr, uintptr_t out_schema_ptr=0):
  1060. """
  1061. Export to a C ArrowArray struct, given its pointer.
  1062. If a C ArrowSchema struct pointer is also given, the array type
  1063. is exported to it at the same time.
  1064. Parameters
  1065. ----------
  1066. out_ptr: int
  1067. The raw pointer to a C ArrowArray struct.
  1068. out_schema_ptr: int (optional)
  1069. The raw pointer to a C ArrowSchema struct.
  1070. Be careful: if you don't pass the ArrowArray struct to a consumer,
  1071. array memory will leak. This is a low-level function intended for
  1072. expert users.
  1073. """
  1074. with nogil:
  1075. check_status(ExportArray(deref(self.sp_array),
  1076. <ArrowArray*> out_ptr,
  1077. <ArrowSchema*> out_schema_ptr))
  1078. @staticmethod
  1079. def _import_from_c(uintptr_t in_ptr, type):
  1080. """
  1081. Import Array from a C ArrowArray struct, given its pointer
  1082. and the imported array type.
  1083. Parameters
  1084. ----------
  1085. in_ptr: int
  1086. The raw pointer to a C ArrowArray struct.
  1087. type: DataType or int
  1088. Either a DataType object, or the raw pointer to a C ArrowSchema
  1089. struct.
  1090. This is a low-level function intended for expert users.
  1091. """
  1092. cdef:
  1093. shared_ptr[CArray] c_array
  1094. c_type = pyarrow_unwrap_data_type(type)
  1095. if c_type == nullptr:
  1096. # Not a DataType object, perhaps a raw ArrowSchema pointer
  1097. type_ptr = <uintptr_t> type
  1098. with nogil:
  1099. c_array = GetResultValue(ImportArray(<ArrowArray*> in_ptr,
  1100. <ArrowSchema*> type_ptr))
  1101. else:
  1102. with nogil:
  1103. c_array = GetResultValue(ImportArray(<ArrowArray*> in_ptr,
  1104. c_type))
  1105. return pyarrow_wrap_array(c_array)
  1106. cdef _array_like_to_pandas(obj, options):
  1107. cdef:
  1108. PyObject* out
  1109. PandasOptions c_options = _convert_pandas_options(options)
  1110. original_type = obj.type
  1111. name = obj._name
  1112. # ARROW-3789(wesm): Convert date/timestamp types to datetime64[ns]
  1113. c_options.coerce_temporal_nanoseconds = True
  1114. if isinstance(obj, Array):
  1115. with nogil:
  1116. check_status(ConvertArrayToPandas(c_options,
  1117. (<Array> obj).sp_array,
  1118. obj, &out))
  1119. elif isinstance(obj, ChunkedArray):
  1120. with nogil:
  1121. check_status(libarrow.ConvertChunkedArrayToPandas(
  1122. c_options,
  1123. (<ChunkedArray> obj).sp_chunked_array,
  1124. obj, &out))
  1125. arr = wrap_array_output(out)
  1126. if (isinstance(original_type, TimestampType) and
  1127. options["timestamp_as_object"]):
  1128. # ARROW-5359 - need to specify object dtype to avoid pandas to
  1129. # coerce back to ns resolution
  1130. dtype = "object"
  1131. else:
  1132. dtype = None
  1133. result = pandas_api.series(arr, dtype=dtype, name=name)
  1134. if (isinstance(original_type, TimestampType) and
  1135. original_type.tz is not None and
  1136. # can be object dtype for non-ns and timestamp_as_object=True
  1137. result.dtype.kind == "M"):
  1138. from pyarrow.pandas_compat import make_tz_aware
  1139. result = make_tz_aware(result, original_type.tz)
  1140. return result
  1141. cdef wrap_array_output(PyObject* output):
  1142. cdef object obj = PyObject_to_object(output)
  1143. if isinstance(obj, dict):
  1144. return pandas_api.categorical_type(obj['indices'],
  1145. categories=obj['dictionary'],
  1146. ordered=obj['ordered'],
  1147. fastpath=True)
  1148. else:
  1149. return obj
  1150. cdef class NullArray(Array):
  1151. """
  1152. Concrete class for Arrow arrays of null data type.
  1153. """
  1154. cdef class BooleanArray(Array):
  1155. """
  1156. Concrete class for Arrow arrays of boolean data type.
  1157. """
  1158. @property
  1159. def false_count(self):
  1160. return (<CBooleanArray*> self.ap).false_count()
  1161. @property
  1162. def true_count(self):
  1163. return (<CBooleanArray*> self.ap).true_count()
  1164. cdef class NumericArray(Array):
  1165. """
  1166. A base class for Arrow numeric arrays.
  1167. """
  1168. cdef class IntegerArray(NumericArray):
  1169. """
  1170. A base class for Arrow integer arrays.
  1171. """
  1172. cdef class FloatingPointArray(NumericArray):
  1173. """
  1174. A base class for Arrow floating-point arrays.
  1175. """
  1176. cdef class Int8Array(IntegerArray):
  1177. """
  1178. Concrete class for Arrow arrays of int8 data type.
  1179. """
  1180. cdef class UInt8Array(IntegerArray):
  1181. """
  1182. Concrete class for Arrow arrays of uint8 data type.
  1183. """
  1184. cdef class Int16Array(IntegerArray):
  1185. """
  1186. Concrete class for Arrow arrays of int16 data type.
  1187. """
  1188. cdef class UInt16Array(IntegerArray):
  1189. """
  1190. Concrete class for Arrow arrays of uint16 data type.
  1191. """
  1192. cdef class Int32Array(IntegerArray):
  1193. """
  1194. Concrete class for Arrow arrays of int32 data type.
  1195. """
  1196. cdef class UInt32Array(IntegerArray):
  1197. """
  1198. Concrete class for Arrow arrays of uint32 data type.
  1199. """
  1200. cdef class Int64Array(IntegerArray):
  1201. """
  1202. Concrete class for Arrow arrays of int64 data type.
  1203. """
  1204. cdef class UInt64Array(IntegerArray):
  1205. """
  1206. Concrete class for Arrow arrays of uint64 data type.
  1207. """
  1208. cdef class Date32Array(NumericArray):
  1209. """
  1210. Concrete class for Arrow arrays of date32 data type.
  1211. """
  1212. cdef class Date64Array(NumericArray):
  1213. """
  1214. Concrete class for Arrow arrays of date64 data type.
  1215. """
  1216. cdef class TimestampArray(NumericArray):
  1217. """
  1218. Concrete class for Arrow arrays of timestamp data type.
  1219. """
  1220. cdef class Time32Array(NumericArray):
  1221. """
  1222. Concrete class for Arrow arrays of time32 data type.
  1223. """
  1224. cdef class Time64Array(NumericArray):
  1225. """
  1226. Concrete class for Arrow arrays of time64 data type.
  1227. """
  1228. cdef class DurationArray(NumericArray):
  1229. """
  1230. Concrete class for Arrow arrays of duration data type.
  1231. """
  1232. cdef class HalfFloatArray(FloatingPointArray):
  1233. """
  1234. Concrete class for Arrow arrays of float16 data type.
  1235. """
  1236. cdef class FloatArray(FloatingPointArray):
  1237. """
  1238. Concrete class for Arrow arrays of float32 data type.
  1239. """
  1240. cdef class DoubleArray(FloatingPointArray):
  1241. """
  1242. Concrete class for Arrow arrays of float64 data type.
  1243. """
  1244. cdef class FixedSizeBinaryArray(Array):
  1245. """
  1246. Concrete class for Arrow arrays of a fixed-size binary data type.
  1247. """
  1248. cdef class Decimal128Array(FixedSizeBinaryArray):
  1249. """
  1250. Concrete class for Arrow arrays of decimal128 data type.
  1251. """
  1252. cdef class Decimal256Array(FixedSizeBinaryArray):
  1253. """
  1254. Concrete class for Arrow arrays of decimal256 data type.
  1255. """
  1256. cdef class BaseListArray(Array):
  1257. def flatten(self):
  1258. """
  1259. Unnest this ListArray/LargeListArray by one level.
  1260. The returned Array is logically a concatenation of all the sub-lists
  1261. in this Array.
  1262. Note that this method is different from ``self.values()`` in that
  1263. it takes care of the slicing offset as well as null elements backed
  1264. by non-empty sub-lists.
  1265. Returns
  1266. -------
  1267. result : Array
  1268. """
  1269. return _pc().list_flatten(self)
  1270. def value_parent_indices(self):
  1271. """
  1272. Return array of same length as list child values array where each
  1273. output value is the index of the parent list array slot containing each
  1274. child value.
  1275. Examples
  1276. --------
  1277. >>> arr = pa.array([[1, 2, 3], [], None, [4]],
  1278. ... type=pa.list_(pa.int32()))
  1279. >>> arr.value_parent_indices()
  1280. <pyarrow.lib.Int32Array object at 0x7efc5db958a0>
  1281. [
  1282. 0,
  1283. 0,
  1284. 0,
  1285. 3
  1286. ]
  1287. """
  1288. return _pc().list_parent_indices(self)
  1289. def value_lengths(self):
  1290. """
  1291. Return integers array with values equal to the respective length of
  1292. each list element. Null list values are null in the output.
  1293. Examples
  1294. --------
  1295. >>> arr = pa.array([[1, 2, 3], [], None, [4]],
  1296. ... type=pa.list_(pa.int32()))
  1297. >>> arr.value_lengths()
  1298. <pyarrow.lib.Int32Array object at 0x7efc5db95910>
  1299. [
  1300. 3,
  1301. 0,
  1302. null,
  1303. 1
  1304. ]
  1305. """
  1306. return _pc().list_value_length(self)
  1307. cdef class ListArray(BaseListArray):
  1308. """
  1309. Concrete class for Arrow arrays of a list data type.
  1310. """
  1311. @staticmethod
  1312. def from_arrays(offsets, values, MemoryPool pool=None):
  1313. """
  1314. Construct ListArray from arrays of int32 offsets and values.
  1315. Parameters
  1316. ----------
  1317. offsets : Array (int32 type)
  1318. values : Array (any type)
  1319. Returns
  1320. -------
  1321. list_array : ListArray
  1322. Examples
  1323. --------
  1324. >>> values = pa.array([1, 2, 3, 4])
  1325. >>> offsets = pa.array([0, 2, 4])
  1326. >>> pa.ListArray.from_arrays(offsets, values)
  1327. <pyarrow.lib.ListArray object at 0x7fbde226bf40>
  1328. [
  1329. [
  1330. 0,
  1331. 1
  1332. ],
  1333. [
  1334. 2,
  1335. 3
  1336. ]
  1337. ]
  1338. # nulls in the offsets array become null lists
  1339. >>> offsets = pa.array([0, None, 2, 4])
  1340. >>> pa.ListArray.from_arrays(offsets, values)
  1341. <pyarrow.lib.ListArray object at 0x7fbde226bf40>
  1342. [
  1343. [
  1344. 0,
  1345. 1
  1346. ],
  1347. null,
  1348. [
  1349. 2,
  1350. 3
  1351. ]
  1352. ]
  1353. """
  1354. cdef:
  1355. Array _offsets, _values
  1356. shared_ptr[CArray] out
  1357. cdef CMemoryPool* cpool = maybe_unbox_memory_pool(pool)
  1358. _offsets = asarray(offsets, type='int32')
  1359. _values = asarray(values)
  1360. with nogil:
  1361. out = GetResultValue(
  1362. CListArray.FromArrays(_offsets.ap[0], _values.ap[0], cpool))
  1363. cdef Array result = pyarrow_wrap_array(out)
  1364. result.validate()
  1365. return result
  1366. @property
  1367. def values(self):
  1368. cdef CListArray* arr = <CListArray*> self.ap
  1369. return pyarrow_wrap_array(arr.values())
  1370. @property
  1371. def offsets(self):
  1372. """
  1373. Return the offsets as an int32 array.
  1374. """
  1375. return pyarrow_wrap_array((<CListArray*> self.ap).offsets())
  1376. cdef class LargeListArray(BaseListArray):
  1377. """
  1378. Concrete class for Arrow arrays of a large list data type.
  1379. Identical to ListArray, but 64-bit offsets.
  1380. """
  1381. @staticmethod
  1382. def from_arrays(offsets, values, MemoryPool pool=None):
  1383. """
  1384. Construct LargeListArray from arrays of int64 offsets and values.
  1385. Parameters
  1386. ----------
  1387. offsets : Array (int64 type)
  1388. values : Array (any type)
  1389. Returns
  1390. -------
  1391. list_array : LargeListArray
  1392. """
  1393. cdef:
  1394. Array _offsets, _values
  1395. shared_ptr[CArray] out
  1396. cdef CMemoryPool* cpool = maybe_unbox_memory_pool(pool)
  1397. _offsets = asarray(offsets, type='int64')
  1398. _values = asarray(values)
  1399. with nogil:
  1400. out = GetResultValue(
  1401. CLargeListArray.FromArrays(_offsets.ap[0], _values.ap[0],
  1402. cpool))
  1403. cdef Array result = pyarrow_wrap_array(out)
  1404. result.validate()
  1405. return result
  1406. @property
  1407. def values(self):
  1408. cdef CLargeListArray* arr = <CLargeListArray*> self.ap
  1409. return pyarrow_wrap_array(arr.values())
  1410. @property
  1411. def offsets(self):
  1412. """
  1413. Return the offsets as an int64 array.
  1414. """
  1415. return pyarrow_wrap_array((<CLargeListArray*> self.ap).offsets())
  1416. cdef class MapArray(Array):
  1417. """
  1418. Concrete class for Arrow arrays of a map data type.
  1419. """
  1420. @staticmethod
  1421. def from_arrays(offsets, keys, items, MemoryPool pool=None):
  1422. """
  1423. Construct MapArray from arrays of int32 offsets and key, item arrays.
  1424. Parameters
  1425. ----------
  1426. offsets : array-like or sequence (int32 type)
  1427. keys : array-like or sequence (any type)
  1428. items : array-like or sequence (any type)
  1429. Returns
  1430. -------
  1431. map_array : MapArray
  1432. """
  1433. cdef:
  1434. Array _offsets, _keys, _items
  1435. shared_ptr[CArray] out
  1436. cdef CMemoryPool* cpool = maybe_unbox_memory_pool(pool)
  1437. _offsets = asarray(offsets, type='int32')
  1438. _keys = asarray(keys)
  1439. _items = asarray(items)
  1440. with nogil:
  1441. out = GetResultValue(
  1442. CMapArray.FromArrays(_offsets.sp_array,
  1443. _keys.sp_array,
  1444. _items.sp_array, cpool))
  1445. cdef Array result = pyarrow_wrap_array(out)
  1446. result.validate()
  1447. return result
  1448. @property
  1449. def keys(self):
  1450. return pyarrow_wrap_array((<CMapArray*> self.ap).keys())
  1451. @property
  1452. def items(self):
  1453. return pyarrow_wrap_array((<CMapArray*> self.ap).items())
  1454. cdef class FixedSizeListArray(Array):
  1455. """
  1456. Concrete class for Arrow arrays of a fixed size list data type.
  1457. """
  1458. @staticmethod
  1459. def from_arrays(values, int32_t list_size):
  1460. """
  1461. Construct FixedSizeListArray from array of values and a list length.
  1462. Parameters
  1463. ----------
  1464. values : Array (any type)
  1465. list_size : int
  1466. The fixed length of the lists.
  1467. Returns
  1468. -------
  1469. FixedSizeListArray
  1470. """
  1471. cdef:
  1472. Array _values
  1473. CResult[shared_ptr[CArray]] c_result
  1474. _values = asarray(values)
  1475. with nogil:
  1476. c_result = CFixedSizeListArray.FromArrays(
  1477. _values.sp_array, list_size)
  1478. cdef Array result = pyarrow_wrap_array(GetResultValue(c_result))
  1479. result.validate()
  1480. return result
  1481. @property
  1482. def values(self):
  1483. return self.flatten()
  1484. def flatten(self):
  1485. """
  1486. Unnest this FixedSizeListArray by one level.
  1487. Returns
  1488. -------
  1489. result : Array
  1490. """
  1491. cdef CFixedSizeListArray* arr = <CFixedSizeListArray*> self.ap
  1492. return pyarrow_wrap_array(arr.values())
  1493. cdef class UnionArray(Array):
  1494. """
  1495. Concrete class for Arrow arrays of a Union data type.
  1496. """
  1497. def child(self, int pos):
  1498. import warnings
  1499. warnings.warn("child is deprecated, use field", FutureWarning)
  1500. return self.field(pos)
  1501. def field(self, int pos):
  1502. """
  1503. Return the given child field as an individual array.
  1504. For sparse unions, the returned array has its offset, length,
  1505. and null count adjusted.
  1506. For dense unions, the returned array is unchanged.
  1507. """
  1508. cdef shared_ptr[CArray] result
  1509. result = (<CUnionArray*> self.ap).field(pos)
  1510. if result != NULL:
  1511. return pyarrow_wrap_array(result)
  1512. raise KeyError("UnionArray does not have child {}".format(pos))
  1513. @property
  1514. def type_codes(self):
  1515. """Get the type codes array."""
  1516. buf = pyarrow_wrap_buffer((<CUnionArray*> self.ap).type_codes())
  1517. return Array.from_buffers(int8(), len(self), [None, buf])
  1518. @property
  1519. def offsets(self):
  1520. """
  1521. Get the value offsets array (dense arrays only).
  1522. Does not account for any slice offset.
  1523. """
  1524. if self.type.mode != "dense":
  1525. raise ArrowTypeError("Can only get value offsets for dense arrays")
  1526. cdef CDenseUnionArray* dense = <CDenseUnionArray*> self.ap
  1527. buf = pyarrow_wrap_buffer(dense.value_offsets())
  1528. return Array.from_buffers(int32(), len(self), [None, buf])
  1529. @staticmethod
  1530. def from_dense(Array types, Array value_offsets, list children,
  1531. list field_names=None, list type_codes=None):
  1532. """
  1533. Construct dense UnionArray from arrays of int8 types, int32 offsets and
  1534. children arrays
  1535. Parameters
  1536. ----------
  1537. types : Array (int8 type)
  1538. value_offsets : Array (int32 type)
  1539. children : list
  1540. field_names : list
  1541. type_codes : list
  1542. Returns
  1543. -------
  1544. union_array : UnionArray
  1545. """
  1546. cdef:
  1547. shared_ptr[CArray] out
  1548. vector[shared_ptr[CArray]] c
  1549. Array child
  1550. vector[c_string] c_field_names
  1551. vector[int8_t] c_type_codes
  1552. for child in children:
  1553. c.push_back(child.sp_array)
  1554. if field_names is not None:
  1555. for x in field_names:
  1556. c_field_names.push_back(tobytes(x))
  1557. if type_codes is not None:
  1558. for x in type_codes:
  1559. c_type_codes.push_back(x)
  1560. with nogil:
  1561. out = GetResultValue(CDenseUnionArray.Make(
  1562. deref(types.ap), deref(value_offsets.ap), c, c_field_names,
  1563. c_type_codes))
  1564. cdef Array result = pyarrow_wrap_array(out)
  1565. result.validate()
  1566. return result
  1567. @staticmethod
  1568. def from_sparse(Array types, list children, list field_names=None,
  1569. list type_codes=None):
  1570. """
  1571. Construct sparse UnionArray from arrays of int8 types and children
  1572. arrays
  1573. Parameters
  1574. ----------
  1575. types : Array (int8 type)
  1576. children : list
  1577. field_names : list
  1578. type_codes : list
  1579. Returns
  1580. -------
  1581. union_array : UnionArray
  1582. """
  1583. cdef:
  1584. shared_ptr[CArray] out
  1585. vector[shared_ptr[CArray]] c
  1586. Array child
  1587. vector[c_string] c_field_names
  1588. vector[int8_t] c_type_codes
  1589. for child in children:
  1590. c.push_back(child.sp_array)
  1591. if field_names is not None:
  1592. for x in field_names:
  1593. c_field_names.push_back(tobytes(x))
  1594. if type_codes is not None:
  1595. for x in type_codes:
  1596. c_type_codes.push_back(x)
  1597. with nogil:
  1598. out = GetResultValue(CSparseUnionArray.Make(
  1599. deref(types.ap), c, c_field_names, c_type_codes))
  1600. cdef Array result = pyarrow_wrap_array(out)
  1601. result.validate()
  1602. return result
  1603. cdef class StringArray(Array):
  1604. """
  1605. Concrete class for Arrow arrays of string (or utf8) data type.
  1606. """
  1607. @staticmethod
  1608. def from_buffers(int length, Buffer value_offsets, Buffer data,
  1609. Buffer null_bitmap=None, int null_count=-1,
  1610. int offset=0):
  1611. """
  1612. Construct a StringArray from value_offsets and data buffers.
  1613. If there are nulls in the data, also a null_bitmap and the matching
  1614. null_count must be passed.
  1615. Parameters
  1616. ----------
  1617. length : int
  1618. value_offsets : Buffer
  1619. data : Buffer
  1620. null_bitmap : Buffer, optional
  1621. null_count : int, default 0
  1622. offset : int, default 0
  1623. Returns
  1624. -------
  1625. string_array : StringArray
  1626. """
  1627. return Array.from_buffers(utf8(), length,
  1628. [null_bitmap, value_offsets, data],
  1629. null_count, offset)
  1630. cdef class LargeStringArray(Array):
  1631. """
  1632. Concrete class for Arrow arrays of large string (or utf8) data type.
  1633. """
  1634. @staticmethod
  1635. def from_buffers(int length, Buffer value_offsets, Buffer data,
  1636. Buffer null_bitmap=None, int null_count=-1,
  1637. int offset=0):
  1638. """
  1639. Construct a LargeStringArray from value_offsets and data buffers.
  1640. If there are nulls in the data, also a null_bitmap and the matching
  1641. null_count must be passed.
  1642. Parameters
  1643. ----------
  1644. length : int
  1645. value_offsets : Buffer
  1646. data : Buffer
  1647. null_bitmap : Buffer, optional
  1648. null_count : int, default 0
  1649. offset : int, default 0
  1650. Returns
  1651. -------
  1652. string_array : StringArray
  1653. """
  1654. return Array.from_buffers(large_utf8(), length,
  1655. [null_bitmap, value_offsets, data],
  1656. null_count, offset)
  1657. cdef class BinaryArray(Array):
  1658. """
  1659. Concrete class for Arrow arrays of variable-sized binary data type.
  1660. """
  1661. @property
  1662. def total_values_length(self):
  1663. """
  1664. The number of bytes from beginning to end of the data buffer addressed
  1665. by the offsets of this BinaryArray.
  1666. """
  1667. return (<CBinaryArray*> self.ap).total_values_length()
  1668. cdef class LargeBinaryArray(Array):
  1669. """
  1670. Concrete class for Arrow arrays of large variable-sized binary data type.
  1671. """
  1672. @property
  1673. def total_values_length(self):
  1674. """
  1675. The number of bytes from beginning to end of the data buffer addressed
  1676. by the offsets of this LargeBinaryArray.
  1677. """
  1678. return (<CLargeBinaryArray*> self.ap).total_values_length()
  1679. cdef class DictionaryArray(Array):
  1680. """
  1681. Concrete class for dictionary-encoded Arrow arrays.
  1682. """
  1683. def dictionary_encode(self):
  1684. return self
  1685. def dictionary_decode(self):
  1686. """
  1687. Decodes the DictionaryArray to an Array.
  1688. """
  1689. return self.dictionary.take(self.indices)
  1690. @property
  1691. def dictionary(self):
  1692. cdef CDictionaryArray* darr = <CDictionaryArray*>(self.ap)
  1693. if self._dictionary is None:
  1694. self._dictionary = pyarrow_wrap_array(darr.dictionary())
  1695. return self._dictionary
  1696. @property
  1697. def indices(self):
  1698. cdef CDictionaryArray* darr = <CDictionaryArray*>(self.ap)
  1699. if self._indices is None:
  1700. self._indices = pyarrow_wrap_array(darr.indices())
  1701. return self._indices
  1702. @staticmethod
  1703. def from_arrays(indices, dictionary, mask=None, bint ordered=False,
  1704. bint from_pandas=False, bint safe=True,
  1705. MemoryPool memory_pool=None):
  1706. """
  1707. Construct a DictionaryArray from indices and values.
  1708. Parameters
  1709. ----------
  1710. indices : pyarrow.Array, numpy.ndarray or pandas.Series, int type
  1711. Non-negative integers referencing the dictionary values by zero
  1712. based index.
  1713. dictionary : pyarrow.Array, ndarray or pandas.Series
  1714. The array of values referenced by the indices.
  1715. mask : ndarray or pandas.Series, bool type
  1716. True values indicate that indices are actually null.
  1717. from_pandas : bool, default False
  1718. If True, the indices should be treated as though they originated in
  1719. a pandas.Categorical (null encoded as -1).
  1720. ordered : bool, default False
  1721. Set to True if the category values are ordered.
  1722. safe : bool, default True
  1723. If True, check that the dictionary indices are in range.
  1724. memory_pool : MemoryPool, default None
  1725. For memory allocations, if required, otherwise uses default pool.
  1726. Returns
  1727. -------
  1728. dict_array : DictionaryArray
  1729. """
  1730. cdef:
  1731. Array _indices, _dictionary
  1732. shared_ptr[CDataType] c_type
  1733. shared_ptr[CArray] c_result
  1734. if isinstance(indices, Array):
  1735. if mask is not None:
  1736. raise NotImplementedError(
  1737. "mask not implemented with Arrow array inputs yet")
  1738. _indices = indices
  1739. else:
  1740. if from_pandas:
  1741. _indices = _codes_to_indices(indices, mask, None, memory_pool)
  1742. else:
  1743. _indices = array(indices, mask=mask, memory_pool=memory_pool)
  1744. if isinstance(dictionary, Array):
  1745. _dictionary = dictionary
  1746. else:
  1747. _dictionary = array(dictionary, memory_pool=memory_pool)
  1748. if not isinstance(_indices, IntegerArray):
  1749. raise ValueError('Indices must be integer type')
  1750. cdef c_bool c_ordered = ordered
  1751. c_type.reset(new CDictionaryType(_indices.type.sp_type,
  1752. _dictionary.sp_array.get().type(),
  1753. c_ordered))
  1754. if safe:
  1755. with nogil:
  1756. c_result = GetResultValue(
  1757. CDictionaryArray.FromArrays(c_type, _indices.sp_array,
  1758. _dictionary.sp_array))
  1759. else:
  1760. c_result.reset(new CDictionaryArray(c_type, _indices.sp_array,
  1761. _dictionary.sp_array))
  1762. cdef Array result = pyarrow_wrap_array(c_result)
  1763. result.validate()
  1764. return result
  1765. cdef class StructArray(Array):
  1766. """
  1767. Concrete class for Arrow arrays of a struct data type.
  1768. """
  1769. def field(self, index):
  1770. """
  1771. Retrieves the child array belonging to field.
  1772. Parameters
  1773. ----------
  1774. index : Union[int, str]
  1775. Index / position or name of the field.
  1776. Returns
  1777. -------
  1778. result : Array
  1779. """
  1780. cdef:
  1781. CStructArray* arr = <CStructArray*> self.ap
  1782. shared_ptr[CArray] child
  1783. if isinstance(index, (bytes, str)):
  1784. child = arr.GetFieldByName(tobytes(index))
  1785. if child == nullptr:
  1786. raise KeyError(index)
  1787. elif isinstance(index, int):
  1788. child = arr.field(
  1789. <int>_normalize_index(index, self.ap.num_fields()))
  1790. else:
  1791. raise TypeError('Expected integer or string index')
  1792. return pyarrow_wrap_array(child)
  1793. def flatten(self, MemoryPool memory_pool=None):
  1794. """
  1795. Return one individual array for each field in the struct.
  1796. Parameters
  1797. ----------
  1798. memory_pool : MemoryPool, default None
  1799. For memory allocations, if required, otherwise use default pool.
  1800. Returns
  1801. -------
  1802. result : List[Array]
  1803. """
  1804. cdef:
  1805. vector[shared_ptr[CArray]] arrays
  1806. CMemoryPool* pool = maybe_unbox_memory_pool(memory_pool)
  1807. CStructArray* sarr = <CStructArray*> self.ap
  1808. with nogil:
  1809. arrays = GetResultValue(sarr.Flatten(pool))
  1810. return [pyarrow_wrap_array(arr) for arr in arrays]
  1811. @staticmethod
  1812. def from_arrays(arrays, names=None, fields=None, mask=None,
  1813. memory_pool=None):
  1814. """
  1815. Construct StructArray from collection of arrays representing
  1816. each field in the struct.
  1817. Either field names or field instances must be passed.
  1818. Parameters
  1819. ----------
  1820. arrays : sequence of Array
  1821. names : List[str] (optional)
  1822. Field names for each struct child.
  1823. fields : List[Field] (optional)
  1824. Field instances for each struct child.
  1825. mask : pyarrow.Array[bool] (optional)
  1826. Indicate which values are null (True) or not null (False).
  1827. memory_pool : MemoryPool (optional)
  1828. For memory allocations, if required, otherwise uses default pool.
  1829. Returns
  1830. -------
  1831. result : StructArray
  1832. """
  1833. cdef:
  1834. shared_ptr[CArray] c_array
  1835. shared_ptr[CBuffer] c_mask
  1836. vector[shared_ptr[CArray]] c_arrays
  1837. vector[c_string] c_names
  1838. vector[shared_ptr[CField]] c_fields
  1839. CResult[shared_ptr[CArray]] c_result
  1840. ssize_t num_arrays
  1841. ssize_t length
  1842. ssize_t i
  1843. Field py_field
  1844. DataType struct_type
  1845. if names is None and fields is None:
  1846. raise ValueError('Must pass either names or fields')
  1847. if names is not None and fields is not None:
  1848. raise ValueError('Must pass either names or fields, not both')
  1849. if mask is None:
  1850. c_mask = shared_ptr[CBuffer]()
  1851. elif isinstance(mask, Array):
  1852. if mask.type.id != Type_BOOL:
  1853. raise ValueError('Mask must be a pyarrow.Array of type bool')
  1854. if mask.null_count != 0:
  1855. raise ValueError('Mask must not contain nulls')
  1856. inverted_mask = _pc().invert(mask, memory_pool=memory_pool)
  1857. c_mask = pyarrow_unwrap_buffer(inverted_mask.buffers()[1])
  1858. else:
  1859. raise ValueError('Mask must be a pyarrow.Array of type bool')
  1860. arrays = [asarray(x) for x in arrays]
  1861. for arr in arrays:
  1862. c_array = pyarrow_unwrap_array(arr)
  1863. if c_array == nullptr:
  1864. raise TypeError(f"Expected Array, got {arr.__class__}")
  1865. c_arrays.push_back(c_array)
  1866. if names is not None:
  1867. for name in names:
  1868. c_names.push_back(tobytes(name))
  1869. else:
  1870. for item in fields:
  1871. if isinstance(item, tuple):
  1872. py_field = field(*item)
  1873. else:
  1874. py_field = item
  1875. c_fields.push_back(py_field.sp_field)
  1876. if (c_arrays.size() == 0 and c_names.size() == 0 and
  1877. c_fields.size() == 0):
  1878. # The C++ side doesn't allow this
  1879. return array([], struct([]))
  1880. if names is not None:
  1881. # XXX Cannot pass "nullptr" for a shared_ptr<T> argument:
  1882. # https://github.com/cython/cython/issues/3020
  1883. c_result = CStructArray.MakeFromFieldNames(
  1884. c_arrays, c_names, c_mask, -1, 0)
  1885. else:
  1886. c_result = CStructArray.MakeFromFields(
  1887. c_arrays, c_fields, c_mask, -1, 0)
  1888. cdef Array result = pyarrow_wrap_array(GetResultValue(c_result))
  1889. result.validate()
  1890. return result
  1891. cdef class ExtensionArray(Array):
  1892. """
  1893. Concrete class for Arrow extension arrays.
  1894. """
  1895. @property
  1896. def storage(self):
  1897. cdef:
  1898. CExtensionArray* ext_array = <CExtensionArray*>(self.ap)
  1899. return pyarrow_wrap_array(ext_array.storage())
  1900. @staticmethod
  1901. def from_storage(BaseExtensionType typ, Array storage):
  1902. """
  1903. Construct ExtensionArray from type and storage array.
  1904. Parameters
  1905. ----------
  1906. typ: DataType
  1907. The extension type for the result array.
  1908. storage: Array
  1909. The underlying storage for the result array.
  1910. Returns
  1911. -------
  1912. ext_array : ExtensionArray
  1913. """
  1914. cdef:
  1915. shared_ptr[CExtensionArray] ext_array
  1916. if storage.type != typ.storage_type:
  1917. raise TypeError("Incompatible storage type {0} "
  1918. "for extension type {1}".format(storage.type, typ))
  1919. ext_array = make_shared[CExtensionArray](typ.sp_type, storage.sp_array)
  1920. cdef Array result = pyarrow_wrap_array(<shared_ptr[CArray]> ext_array)
  1921. result.validate()
  1922. return result
  1923. def _to_pandas(self, options, **kwargs):
  1924. pandas_dtype = None
  1925. try:
  1926. pandas_dtype = self.type.to_pandas_dtype()
  1927. except NotImplementedError:
  1928. pass
  1929. # pandas ExtensionDtype that implements conversion from pyarrow
  1930. if hasattr(pandas_dtype, '__from_arrow__'):
  1931. arr = pandas_dtype.__from_arrow__(self)
  1932. return pandas_api.series(arr)
  1933. # otherwise convert the storage array with the base implementation
  1934. return Array._to_pandas(self.storage, options, **kwargs)
  1935. def to_numpy(self, **kwargs):
  1936. """
  1937. Convert extension array to a numpy ndarray.
  1938. See Also
  1939. --------
  1940. Array.to_numpy
  1941. """
  1942. return self.storage.to_numpy(**kwargs)
  1943. cdef dict _array_classes = {
  1944. _Type_NA: NullArray,
  1945. _Type_BOOL: BooleanArray,
  1946. _Type_UINT8: UInt8Array,
  1947. _Type_UINT16: UInt16Array,
  1948. _Type_UINT32: UInt32Array,
  1949. _Type_UINT64: UInt64Array,
  1950. _Type_INT8: Int8Array,
  1951. _Type_INT16: Int16Array,
  1952. _Type_INT32: Int32Array,
  1953. _Type_INT64: Int64Array,
  1954. _Type_DATE32: Date32Array,
  1955. _Type_DATE64: Date64Array,
  1956. _Type_TIMESTAMP: TimestampArray,
  1957. _Type_TIME32: Time32Array,
  1958. _Type_TIME64: Time64Array,
  1959. _Type_DURATION: DurationArray,
  1960. _Type_HALF_FLOAT: HalfFloatArray,
  1961. _Type_FLOAT: FloatArray,
  1962. _Type_DOUBLE: DoubleArray,
  1963. _Type_LIST: ListArray,
  1964. _Type_LARGE_LIST: LargeListArray,
  1965. _Type_MAP: MapArray,
  1966. _Type_FIXED_SIZE_LIST: FixedSizeListArray,
  1967. _Type_SPARSE_UNION: UnionArray,
  1968. _Type_DENSE_UNION: UnionArray,
  1969. _Type_BINARY: BinaryArray,
  1970. _Type_STRING: StringArray,
  1971. _Type_LARGE_BINARY: LargeBinaryArray,
  1972. _Type_LARGE_STRING: LargeStringArray,
  1973. _Type_DICTIONARY: DictionaryArray,
  1974. _Type_FIXED_SIZE_BINARY: FixedSizeBinaryArray,
  1975. _Type_DECIMAL128: Decimal128Array,
  1976. _Type_DECIMAL256: Decimal256Array,
  1977. _Type_STRUCT: StructArray,
  1978. _Type_EXTENSION: ExtensionArray,
  1979. }
  1980. cdef object get_array_class_from_type(
  1981. const shared_ptr[CDataType]& sp_data_type):
  1982. cdef CDataType* data_type = sp_data_type.get()
  1983. if data_type == NULL:
  1984. raise ValueError('Array data type was NULL')
  1985. if data_type.id() == _Type_EXTENSION:
  1986. py_ext_data_type = pyarrow_wrap_data_type(sp_data_type)
  1987. return py_ext_data_type.__arrow_ext_class__()
  1988. else:
  1989. return _array_classes[data_type.id()]
  1990. cdef object get_values(object obj, bint* is_series):
  1991. if pandas_api.is_series(obj) or pandas_api.is_index(obj):
  1992. result = pandas_api.get_values(obj)
  1993. is_series[0] = True
  1994. elif isinstance(obj, np.ndarray):
  1995. result = obj
  1996. is_series[0] = False
  1997. else:
  1998. result = pandas_api.series(obj).values
  1999. is_series[0] = False
  2000. return result
  2001. def concat_arrays(arrays, MemoryPool memory_pool=None):
  2002. """
  2003. Concatenate the given arrays.
  2004. The contents of the input arrays are copied into the returned array.
  2005. Raises
  2006. ------
  2007. ArrowInvalid : if not all of the arrays have the same type.
  2008. Parameters
  2009. ----------
  2010. arrays : iterable of pyarrow.Array
  2011. Arrays to concatenate, must be identically typed.
  2012. memory_pool : MemoryPool, default None
  2013. For memory allocations. If None, the default pool is used.
  2014. """
  2015. cdef:
  2016. vector[shared_ptr[CArray]] c_arrays
  2017. shared_ptr[CArray] c_concatenated
  2018. CMemoryPool* pool = maybe_unbox_memory_pool(memory_pool)
  2019. for array in arrays:
  2020. if not isinstance(array, Array):
  2021. raise TypeError("Iterable should contain Array objects, "
  2022. "got {0} instead".format(type(array)))
  2023. c_arrays.push_back(pyarrow_unwrap_array(array))
  2024. with nogil:
  2025. c_concatenated = GetResultValue(Concatenate(c_arrays, pool))
  2026. return pyarrow_wrap_array(c_concatenated)
  2027. def _empty_array(DataType type):
  2028. """
  2029. Create empty array of the given type.
  2030. """
  2031. if type.id == Type_DICTIONARY:
  2032. arr = DictionaryArray.from_arrays(
  2033. _empty_array(type.index_type), _empty_array(type.value_type),
  2034. ordered=type.ordered)
  2035. else:
  2036. arr = array([], type=type)
  2037. return arr