shape_base.py 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900
  1. __all__ = ['atleast_1d', 'atleast_2d', 'atleast_3d', 'block', 'hstack',
  2. 'stack', 'vstack']
  3. import functools
  4. import itertools
  5. import operator
  6. import warnings
  7. from . import numeric as _nx
  8. from . import overrides
  9. from ._asarray import array, asanyarray
  10. from .multiarray import normalize_axis_index
  11. from . import fromnumeric as _from_nx
  12. array_function_dispatch = functools.partial(
  13. overrides.array_function_dispatch, module='numpy')
  14. def _atleast_1d_dispatcher(*arys):
  15. return arys
  16. @array_function_dispatch(_atleast_1d_dispatcher)
  17. def atleast_1d(*arys):
  18. """
  19. Convert inputs to arrays with at least one dimension.
  20. Scalar inputs are converted to 1-dimensional arrays, whilst
  21. higher-dimensional inputs are preserved.
  22. Parameters
  23. ----------
  24. arys1, arys2, ... : array_like
  25. One or more input arrays.
  26. Returns
  27. -------
  28. ret : ndarray
  29. An array, or list of arrays, each with ``a.ndim >= 1``.
  30. Copies are made only if necessary.
  31. See Also
  32. --------
  33. atleast_2d, atleast_3d
  34. Examples
  35. --------
  36. >>> np.atleast_1d(1.0)
  37. array([1.])
  38. >>> x = np.arange(9.0).reshape(3,3)
  39. >>> np.atleast_1d(x)
  40. array([[0., 1., 2.],
  41. [3., 4., 5.],
  42. [6., 7., 8.]])
  43. >>> np.atleast_1d(x) is x
  44. True
  45. >>> np.atleast_1d(1, [3, 4])
  46. [array([1]), array([3, 4])]
  47. """
  48. res = []
  49. for ary in arys:
  50. ary = asanyarray(ary)
  51. if ary.ndim == 0:
  52. result = ary.reshape(1)
  53. else:
  54. result = ary
  55. res.append(result)
  56. if len(res) == 1:
  57. return res[0]
  58. else:
  59. return res
  60. def _atleast_2d_dispatcher(*arys):
  61. return arys
  62. @array_function_dispatch(_atleast_2d_dispatcher)
  63. def atleast_2d(*arys):
  64. """
  65. View inputs as arrays with at least two dimensions.
  66. Parameters
  67. ----------
  68. arys1, arys2, ... : array_like
  69. One or more array-like sequences. Non-array inputs are converted
  70. to arrays. Arrays that already have two or more dimensions are
  71. preserved.
  72. Returns
  73. -------
  74. res, res2, ... : ndarray
  75. An array, or list of arrays, each with ``a.ndim >= 2``.
  76. Copies are avoided where possible, and views with two or more
  77. dimensions are returned.
  78. See Also
  79. --------
  80. atleast_1d, atleast_3d
  81. Examples
  82. --------
  83. >>> np.atleast_2d(3.0)
  84. array([[3.]])
  85. >>> x = np.arange(3.0)
  86. >>> np.atleast_2d(x)
  87. array([[0., 1., 2.]])
  88. >>> np.atleast_2d(x).base is x
  89. True
  90. >>> np.atleast_2d(1, [1, 2], [[1, 2]])
  91. [array([[1]]), array([[1, 2]]), array([[1, 2]])]
  92. """
  93. res = []
  94. for ary in arys:
  95. ary = asanyarray(ary)
  96. if ary.ndim == 0:
  97. result = ary.reshape(1, 1)
  98. elif ary.ndim == 1:
  99. result = ary[_nx.newaxis, :]
  100. else:
  101. result = ary
  102. res.append(result)
  103. if len(res) == 1:
  104. return res[0]
  105. else:
  106. return res
  107. def _atleast_3d_dispatcher(*arys):
  108. return arys
  109. @array_function_dispatch(_atleast_3d_dispatcher)
  110. def atleast_3d(*arys):
  111. """
  112. View inputs as arrays with at least three dimensions.
  113. Parameters
  114. ----------
  115. arys1, arys2, ... : array_like
  116. One or more array-like sequences. Non-array inputs are converted to
  117. arrays. Arrays that already have three or more dimensions are
  118. preserved.
  119. Returns
  120. -------
  121. res1, res2, ... : ndarray
  122. An array, or list of arrays, each with ``a.ndim >= 3``. Copies are
  123. avoided where possible, and views with three or more dimensions are
  124. returned. For example, a 1-D array of shape ``(N,)`` becomes a view
  125. of shape ``(1, N, 1)``, and a 2-D array of shape ``(M, N)`` becomes a
  126. view of shape ``(M, N, 1)``.
  127. See Also
  128. --------
  129. atleast_1d, atleast_2d
  130. Examples
  131. --------
  132. >>> np.atleast_3d(3.0)
  133. array([[[3.]]])
  134. >>> x = np.arange(3.0)
  135. >>> np.atleast_3d(x).shape
  136. (1, 3, 1)
  137. >>> x = np.arange(12.0).reshape(4,3)
  138. >>> np.atleast_3d(x).shape
  139. (4, 3, 1)
  140. >>> np.atleast_3d(x).base is x.base # x is a reshape, so not base itself
  141. True
  142. >>> for arr in np.atleast_3d([1, 2], [[1, 2]], [[[1, 2]]]):
  143. ... print(arr, arr.shape) # doctest: +SKIP
  144. ...
  145. [[[1]
  146. [2]]] (1, 2, 1)
  147. [[[1]
  148. [2]]] (1, 2, 1)
  149. [[[1 2]]] (1, 1, 2)
  150. """
  151. res = []
  152. for ary in arys:
  153. ary = asanyarray(ary)
  154. if ary.ndim == 0:
  155. result = ary.reshape(1, 1, 1)
  156. elif ary.ndim == 1:
  157. result = ary[_nx.newaxis, :, _nx.newaxis]
  158. elif ary.ndim == 2:
  159. result = ary[:, :, _nx.newaxis]
  160. else:
  161. result = ary
  162. res.append(result)
  163. if len(res) == 1:
  164. return res[0]
  165. else:
  166. return res
  167. def _arrays_for_stack_dispatcher(arrays, stacklevel=4):
  168. if not hasattr(arrays, '__getitem__') and hasattr(arrays, '__iter__'):
  169. warnings.warn('arrays to stack must be passed as a "sequence" type '
  170. 'such as list or tuple. Support for non-sequence '
  171. 'iterables such as generators is deprecated as of '
  172. 'NumPy 1.16 and will raise an error in the future.',
  173. FutureWarning, stacklevel=stacklevel)
  174. return ()
  175. return arrays
  176. def _vhstack_dispatcher(tup):
  177. return _arrays_for_stack_dispatcher(tup)
  178. @array_function_dispatch(_vhstack_dispatcher)
  179. def vstack(tup):
  180. """
  181. Stack arrays in sequence vertically (row wise).
  182. This is equivalent to concatenation along the first axis after 1-D arrays
  183. of shape `(N,)` have been reshaped to `(1,N)`. Rebuilds arrays divided by
  184. `vsplit`.
  185. This function makes most sense for arrays with up to 3 dimensions. For
  186. instance, for pixel-data with a height (first axis), width (second axis),
  187. and r/g/b channels (third axis). The functions `concatenate`, `stack` and
  188. `block` provide more general stacking and concatenation operations.
  189. Parameters
  190. ----------
  191. tup : sequence of ndarrays
  192. The arrays must have the same shape along all but the first axis.
  193. 1-D arrays must have the same length.
  194. Returns
  195. -------
  196. stacked : ndarray
  197. The array formed by stacking the given arrays, will be at least 2-D.
  198. See Also
  199. --------
  200. concatenate : Join a sequence of arrays along an existing axis.
  201. stack : Join a sequence of arrays along a new axis.
  202. block : Assemble an nd-array from nested lists of blocks.
  203. hstack : Stack arrays in sequence horizontally (column wise).
  204. dstack : Stack arrays in sequence depth wise (along third axis).
  205. column_stack : Stack 1-D arrays as columns into a 2-D array.
  206. vsplit : Split an array into multiple sub-arrays vertically (row-wise).
  207. Examples
  208. --------
  209. >>> a = np.array([1, 2, 3])
  210. >>> b = np.array([2, 3, 4])
  211. >>> np.vstack((a,b))
  212. array([[1, 2, 3],
  213. [2, 3, 4]])
  214. >>> a = np.array([[1], [2], [3]])
  215. >>> b = np.array([[2], [3], [4]])
  216. >>> np.vstack((a,b))
  217. array([[1],
  218. [2],
  219. [3],
  220. [2],
  221. [3],
  222. [4]])
  223. """
  224. if not overrides.ARRAY_FUNCTION_ENABLED:
  225. # raise warning if necessary
  226. _arrays_for_stack_dispatcher(tup, stacklevel=2)
  227. arrs = atleast_2d(*tup)
  228. if not isinstance(arrs, list):
  229. arrs = [arrs]
  230. return _nx.concatenate(arrs, 0)
  231. @array_function_dispatch(_vhstack_dispatcher)
  232. def hstack(tup):
  233. """
  234. Stack arrays in sequence horizontally (column wise).
  235. This is equivalent to concatenation along the second axis, except for 1-D
  236. arrays where it concatenates along the first axis. Rebuilds arrays divided
  237. by `hsplit`.
  238. This function makes most sense for arrays with up to 3 dimensions. For
  239. instance, for pixel-data with a height (first axis), width (second axis),
  240. and r/g/b channels (third axis). The functions `concatenate`, `stack` and
  241. `block` provide more general stacking and concatenation operations.
  242. Parameters
  243. ----------
  244. tup : sequence of ndarrays
  245. The arrays must have the same shape along all but the second axis,
  246. except 1-D arrays which can be any length.
  247. Returns
  248. -------
  249. stacked : ndarray
  250. The array formed by stacking the given arrays.
  251. See Also
  252. --------
  253. concatenate : Join a sequence of arrays along an existing axis.
  254. stack : Join a sequence of arrays along a new axis.
  255. block : Assemble an nd-array from nested lists of blocks.
  256. vstack : Stack arrays in sequence vertically (row wise).
  257. dstack : Stack arrays in sequence depth wise (along third axis).
  258. column_stack : Stack 1-D arrays as columns into a 2-D array.
  259. hsplit : Split an array into multiple sub-arrays horizontally (column-wise).
  260. Examples
  261. --------
  262. >>> a = np.array((1,2,3))
  263. >>> b = np.array((2,3,4))
  264. >>> np.hstack((a,b))
  265. array([1, 2, 3, 2, 3, 4])
  266. >>> a = np.array([[1],[2],[3]])
  267. >>> b = np.array([[2],[3],[4]])
  268. >>> np.hstack((a,b))
  269. array([[1, 2],
  270. [2, 3],
  271. [3, 4]])
  272. """
  273. if not overrides.ARRAY_FUNCTION_ENABLED:
  274. # raise warning if necessary
  275. _arrays_for_stack_dispatcher(tup, stacklevel=2)
  276. arrs = atleast_1d(*tup)
  277. if not isinstance(arrs, list):
  278. arrs = [arrs]
  279. # As a special case, dimension 0 of 1-dimensional arrays is "horizontal"
  280. if arrs and arrs[0].ndim == 1:
  281. return _nx.concatenate(arrs, 0)
  282. else:
  283. return _nx.concatenate(arrs, 1)
  284. def _stack_dispatcher(arrays, axis=None, out=None):
  285. arrays = _arrays_for_stack_dispatcher(arrays, stacklevel=6)
  286. if out is not None:
  287. # optimize for the typical case where only arrays is provided
  288. arrays = list(arrays)
  289. arrays.append(out)
  290. return arrays
  291. @array_function_dispatch(_stack_dispatcher)
  292. def stack(arrays, axis=0, out=None):
  293. """
  294. Join a sequence of arrays along a new axis.
  295. The ``axis`` parameter specifies the index of the new axis in the
  296. dimensions of the result. For example, if ``axis=0`` it will be the first
  297. dimension and if ``axis=-1`` it will be the last dimension.
  298. .. versionadded:: 1.10.0
  299. Parameters
  300. ----------
  301. arrays : sequence of array_like
  302. Each array must have the same shape.
  303. axis : int, optional
  304. The axis in the result array along which the input arrays are stacked.
  305. out : ndarray, optional
  306. If provided, the destination to place the result. The shape must be
  307. correct, matching that of what stack would have returned if no
  308. out argument were specified.
  309. Returns
  310. -------
  311. stacked : ndarray
  312. The stacked array has one more dimension than the input arrays.
  313. See Also
  314. --------
  315. concatenate : Join a sequence of arrays along an existing axis.
  316. block : Assemble an nd-array from nested lists of blocks.
  317. split : Split array into a list of multiple sub-arrays of equal size.
  318. Examples
  319. --------
  320. >>> arrays = [np.random.randn(3, 4) for _ in range(10)]
  321. >>> np.stack(arrays, axis=0).shape
  322. (10, 3, 4)
  323. >>> np.stack(arrays, axis=1).shape
  324. (3, 10, 4)
  325. >>> np.stack(arrays, axis=2).shape
  326. (3, 4, 10)
  327. >>> a = np.array([1, 2, 3])
  328. >>> b = np.array([2, 3, 4])
  329. >>> np.stack((a, b))
  330. array([[1, 2, 3],
  331. [2, 3, 4]])
  332. >>> np.stack((a, b), axis=-1)
  333. array([[1, 2],
  334. [2, 3],
  335. [3, 4]])
  336. """
  337. if not overrides.ARRAY_FUNCTION_ENABLED:
  338. # raise warning if necessary
  339. _arrays_for_stack_dispatcher(arrays, stacklevel=2)
  340. arrays = [asanyarray(arr) for arr in arrays]
  341. if not arrays:
  342. raise ValueError('need at least one array to stack')
  343. shapes = {arr.shape for arr in arrays}
  344. if len(shapes) != 1:
  345. raise ValueError('all input arrays must have the same shape')
  346. result_ndim = arrays[0].ndim + 1
  347. axis = normalize_axis_index(axis, result_ndim)
  348. sl = (slice(None),) * axis + (_nx.newaxis,)
  349. expanded_arrays = [arr[sl] for arr in arrays]
  350. return _nx.concatenate(expanded_arrays, axis=axis, out=out)
  351. # Internal functions to eliminate the overhead of repeated dispatch in one of
  352. # the two possible paths inside np.block.
  353. # Use getattr to protect against __array_function__ being disabled.
  354. _size = getattr(_from_nx.size, '__wrapped__', _from_nx.size)
  355. _ndim = getattr(_from_nx.ndim, '__wrapped__', _from_nx.ndim)
  356. _concatenate = getattr(_from_nx.concatenate, '__wrapped__', _from_nx.concatenate)
  357. def _block_format_index(index):
  358. """
  359. Convert a list of indices ``[0, 1, 2]`` into ``"arrays[0][1][2]"``.
  360. """
  361. idx_str = ''.join('[{}]'.format(i) for i in index if i is not None)
  362. return 'arrays' + idx_str
  363. def _block_check_depths_match(arrays, parent_index=[]):
  364. """
  365. Recursive function checking that the depths of nested lists in `arrays`
  366. all match. Mismatch raises a ValueError as described in the block
  367. docstring below.
  368. The entire index (rather than just the depth) needs to be calculated
  369. for each innermost list, in case an error needs to be raised, so that
  370. the index of the offending list can be printed as part of the error.
  371. Parameters
  372. ----------
  373. arrays : nested list of arrays
  374. The arrays to check
  375. parent_index : list of int
  376. The full index of `arrays` within the nested lists passed to
  377. `_block_check_depths_match` at the top of the recursion.
  378. Returns
  379. -------
  380. first_index : list of int
  381. The full index of an element from the bottom of the nesting in
  382. `arrays`. If any element at the bottom is an empty list, this will
  383. refer to it, and the last index along the empty axis will be None.
  384. max_arr_ndim : int
  385. The maximum of the ndims of the arrays nested in `arrays`.
  386. final_size: int
  387. The number of elements in the final array. This is used the motivate
  388. the choice of algorithm used using benchmarking wisdom.
  389. """
  390. if type(arrays) is tuple:
  391. # not strictly necessary, but saves us from:
  392. # - more than one way to do things - no point treating tuples like
  393. # lists
  394. # - horribly confusing behaviour that results when tuples are
  395. # treated like ndarray
  396. raise TypeError(
  397. '{} is a tuple. '
  398. 'Only lists can be used to arrange blocks, and np.block does '
  399. 'not allow implicit conversion from tuple to ndarray.'.format(
  400. _block_format_index(parent_index)
  401. )
  402. )
  403. elif type(arrays) is list and len(arrays) > 0:
  404. idxs_ndims = (_block_check_depths_match(arr, parent_index + [i])
  405. for i, arr in enumerate(arrays))
  406. first_index, max_arr_ndim, final_size = next(idxs_ndims)
  407. for index, ndim, size in idxs_ndims:
  408. final_size += size
  409. if ndim > max_arr_ndim:
  410. max_arr_ndim = ndim
  411. if len(index) != len(first_index):
  412. raise ValueError(
  413. "List depths are mismatched. First element was at depth "
  414. "{}, but there is an element at depth {} ({})".format(
  415. len(first_index),
  416. len(index),
  417. _block_format_index(index)
  418. )
  419. )
  420. # propagate our flag that indicates an empty list at the bottom
  421. if index[-1] is None:
  422. first_index = index
  423. return first_index, max_arr_ndim, final_size
  424. elif type(arrays) is list and len(arrays) == 0:
  425. # We've 'bottomed out' on an empty list
  426. return parent_index + [None], 0, 0
  427. else:
  428. # We've 'bottomed out' - arrays is either a scalar or an array
  429. size = _size(arrays)
  430. return parent_index, _ndim(arrays), size
  431. def _atleast_nd(a, ndim):
  432. # Ensures `a` has at least `ndim` dimensions by prepending
  433. # ones to `a.shape` as necessary
  434. return array(a, ndmin=ndim, copy=False, subok=True)
  435. def _accumulate(values):
  436. return list(itertools.accumulate(values))
  437. def _concatenate_shapes(shapes, axis):
  438. """Given array shapes, return the resulting shape and slices prefixes.
  439. These help in nested concatation.
  440. Returns
  441. -------
  442. shape: tuple of int
  443. This tuple satisfies:
  444. ```
  445. shape, _ = _concatenate_shapes([arr.shape for shape in arrs], axis)
  446. shape == concatenate(arrs, axis).shape
  447. ```
  448. slice_prefixes: tuple of (slice(start, end), )
  449. For a list of arrays being concatenated, this returns the slice
  450. in the larger array at axis that needs to be sliced into.
  451. For example, the following holds:
  452. ```
  453. ret = concatenate([a, b, c], axis)
  454. _, (sl_a, sl_b, sl_c) = concatenate_slices([a, b, c], axis)
  455. ret[(slice(None),) * axis + sl_a] == a
  456. ret[(slice(None),) * axis + sl_b] == b
  457. ret[(slice(None),) * axis + sl_c] == c
  458. ```
  459. These are called slice prefixes since they are used in the recursive
  460. blocking algorithm to compute the left-most slices during the
  461. recursion. Therefore, they must be prepended to rest of the slice
  462. that was computed deeper in the recursion.
  463. These are returned as tuples to ensure that they can quickly be added
  464. to existing slice tuple without creating a new tuple every time.
  465. """
  466. # Cache a result that will be reused.
  467. shape_at_axis = [shape[axis] for shape in shapes]
  468. # Take a shape, any shape
  469. first_shape = shapes[0]
  470. first_shape_pre = first_shape[:axis]
  471. first_shape_post = first_shape[axis+1:]
  472. if any(shape[:axis] != first_shape_pre or
  473. shape[axis+1:] != first_shape_post for shape in shapes):
  474. raise ValueError(
  475. 'Mismatched array shapes in block along axis {}.'.format(axis))
  476. shape = (first_shape_pre + (sum(shape_at_axis),) + first_shape[axis+1:])
  477. offsets_at_axis = _accumulate(shape_at_axis)
  478. slice_prefixes = [(slice(start, end),)
  479. for start, end in zip([0] + offsets_at_axis,
  480. offsets_at_axis)]
  481. return shape, slice_prefixes
  482. def _block_info_recursion(arrays, max_depth, result_ndim, depth=0):
  483. """
  484. Returns the shape of the final array, along with a list
  485. of slices and a list of arrays that can be used for assignment inside the
  486. new array
  487. Parameters
  488. ----------
  489. arrays : nested list of arrays
  490. The arrays to check
  491. max_depth : list of int
  492. The number of nested lists
  493. result_ndim: int
  494. The number of dimensions in thefinal array.
  495. Returns
  496. -------
  497. shape : tuple of int
  498. The shape that the final array will take on.
  499. slices: list of tuple of slices
  500. The slices into the full array required for assignment. These are
  501. required to be prepended with ``(Ellipsis, )`` to obtain to correct
  502. final index.
  503. arrays: list of ndarray
  504. The data to assign to each slice of the full array
  505. """
  506. if depth < max_depth:
  507. shapes, slices, arrays = zip(
  508. *[_block_info_recursion(arr, max_depth, result_ndim, depth+1)
  509. for arr in arrays])
  510. axis = result_ndim - max_depth + depth
  511. shape, slice_prefixes = _concatenate_shapes(shapes, axis)
  512. # Prepend the slice prefix and flatten the slices
  513. slices = [slice_prefix + the_slice
  514. for slice_prefix, inner_slices in zip(slice_prefixes, slices)
  515. for the_slice in inner_slices]
  516. # Flatten the array list
  517. arrays = functools.reduce(operator.add, arrays)
  518. return shape, slices, arrays
  519. else:
  520. # We've 'bottomed out' - arrays is either a scalar or an array
  521. # type(arrays) is not list
  522. # Return the slice and the array inside a list to be consistent with
  523. # the recursive case.
  524. arr = _atleast_nd(arrays, result_ndim)
  525. return arr.shape, [()], [arr]
  526. def _block(arrays, max_depth, result_ndim, depth=0):
  527. """
  528. Internal implementation of block based on repeated concatenation.
  529. `arrays` is the argument passed to
  530. block. `max_depth` is the depth of nested lists within `arrays` and
  531. `result_ndim` is the greatest of the dimensions of the arrays in
  532. `arrays` and the depth of the lists in `arrays` (see block docstring
  533. for details).
  534. """
  535. if depth < max_depth:
  536. arrs = [_block(arr, max_depth, result_ndim, depth+1)
  537. for arr in arrays]
  538. return _concatenate(arrs, axis=-(max_depth-depth))
  539. else:
  540. # We've 'bottomed out' - arrays is either a scalar or an array
  541. # type(arrays) is not list
  542. return _atleast_nd(arrays, result_ndim)
  543. def _block_dispatcher(arrays):
  544. # Use type(...) is list to match the behavior of np.block(), which special
  545. # cases list specifically rather than allowing for generic iterables or
  546. # tuple. Also, we know that list.__array_function__ will never exist.
  547. if type(arrays) is list:
  548. for subarrays in arrays:
  549. yield from _block_dispatcher(subarrays)
  550. else:
  551. yield arrays
  552. @array_function_dispatch(_block_dispatcher)
  553. def block(arrays):
  554. """
  555. Assemble an nd-array from nested lists of blocks.
  556. Blocks in the innermost lists are concatenated (see `concatenate`) along
  557. the last dimension (-1), then these are concatenated along the
  558. second-last dimension (-2), and so on until the outermost list is reached.
  559. Blocks can be of any dimension, but will not be broadcasted using the normal
  560. rules. Instead, leading axes of size 1 are inserted, to make ``block.ndim``
  561. the same for all blocks. This is primarily useful for working with scalars,
  562. and means that code like ``np.block([v, 1])`` is valid, where
  563. ``v.ndim == 1``.
  564. When the nested list is two levels deep, this allows block matrices to be
  565. constructed from their components.
  566. .. versionadded:: 1.13.0
  567. Parameters
  568. ----------
  569. arrays : nested list of array_like or scalars (but not tuples)
  570. If passed a single ndarray or scalar (a nested list of depth 0), this
  571. is returned unmodified (and not copied).
  572. Elements shapes must match along the appropriate axes (without
  573. broadcasting), but leading 1s will be prepended to the shape as
  574. necessary to make the dimensions match.
  575. Returns
  576. -------
  577. block_array : ndarray
  578. The array assembled from the given blocks.
  579. The dimensionality of the output is equal to the greatest of:
  580. * the dimensionality of all the inputs
  581. * the depth to which the input list is nested
  582. Raises
  583. ------
  584. ValueError
  585. * If list depths are mismatched - for instance, ``[[a, b], c]`` is
  586. illegal, and should be spelt ``[[a, b], [c]]``
  587. * If lists are empty - for instance, ``[[a, b], []]``
  588. See Also
  589. --------
  590. concatenate : Join a sequence of arrays along an existing axis.
  591. stack : Join a sequence of arrays along a new axis.
  592. vstack : Stack arrays in sequence vertically (row wise).
  593. hstack : Stack arrays in sequence horizontally (column wise).
  594. dstack : Stack arrays in sequence depth wise (along third axis).
  595. column_stack : Stack 1-D arrays as columns into a 2-D array.
  596. vsplit : Split an array into multiple sub-arrays vertically (row-wise).
  597. Notes
  598. -----
  599. When called with only scalars, ``np.block`` is equivalent to an ndarray
  600. call. So ``np.block([[1, 2], [3, 4]])`` is equivalent to
  601. ``np.array([[1, 2], [3, 4]])``.
  602. This function does not enforce that the blocks lie on a fixed grid.
  603. ``np.block([[a, b], [c, d]])`` is not restricted to arrays of the form::
  604. AAAbb
  605. AAAbb
  606. cccDD
  607. But is also allowed to produce, for some ``a, b, c, d``::
  608. AAAbb
  609. AAAbb
  610. cDDDD
  611. Since concatenation happens along the last axis first, `block` is _not_
  612. capable of producing the following directly::
  613. AAAbb
  614. cccbb
  615. cccDD
  616. Matlab's "square bracket stacking", ``[A, B, ...; p, q, ...]``, is
  617. equivalent to ``np.block([[A, B, ...], [p, q, ...]])``.
  618. Examples
  619. --------
  620. The most common use of this function is to build a block matrix
  621. >>> A = np.eye(2) * 2
  622. >>> B = np.eye(3) * 3
  623. >>> np.block([
  624. ... [A, np.zeros((2, 3))],
  625. ... [np.ones((3, 2)), B ]
  626. ... ])
  627. array([[2., 0., 0., 0., 0.],
  628. [0., 2., 0., 0., 0.],
  629. [1., 1., 3., 0., 0.],
  630. [1., 1., 0., 3., 0.],
  631. [1., 1., 0., 0., 3.]])
  632. With a list of depth 1, `block` can be used as `hstack`
  633. >>> np.block([1, 2, 3]) # hstack([1, 2, 3])
  634. array([1, 2, 3])
  635. >>> a = np.array([1, 2, 3])
  636. >>> b = np.array([2, 3, 4])
  637. >>> np.block([a, b, 10]) # hstack([a, b, 10])
  638. array([ 1, 2, 3, 2, 3, 4, 10])
  639. >>> A = np.ones((2, 2), int)
  640. >>> B = 2 * A
  641. >>> np.block([A, B]) # hstack([A, B])
  642. array([[1, 1, 2, 2],
  643. [1, 1, 2, 2]])
  644. With a list of depth 2, `block` can be used in place of `vstack`:
  645. >>> a = np.array([1, 2, 3])
  646. >>> b = np.array([2, 3, 4])
  647. >>> np.block([[a], [b]]) # vstack([a, b])
  648. array([[1, 2, 3],
  649. [2, 3, 4]])
  650. >>> A = np.ones((2, 2), int)
  651. >>> B = 2 * A
  652. >>> np.block([[A], [B]]) # vstack([A, B])
  653. array([[1, 1],
  654. [1, 1],
  655. [2, 2],
  656. [2, 2]])
  657. It can also be used in places of `atleast_1d` and `atleast_2d`
  658. >>> a = np.array(0)
  659. >>> b = np.array([1])
  660. >>> np.block([a]) # atleast_1d(a)
  661. array([0])
  662. >>> np.block([b]) # atleast_1d(b)
  663. array([1])
  664. >>> np.block([[a]]) # atleast_2d(a)
  665. array([[0]])
  666. >>> np.block([[b]]) # atleast_2d(b)
  667. array([[1]])
  668. """
  669. arrays, list_ndim, result_ndim, final_size = _block_setup(arrays)
  670. # It was found through benchmarking that making an array of final size
  671. # around 256x256 was faster by straight concatenation on a
  672. # i7-7700HQ processor and dual channel ram 2400MHz.
  673. # It didn't seem to matter heavily on the dtype used.
  674. #
  675. # A 2D array using repeated concatenation requires 2 copies of the array.
  676. #
  677. # The fastest algorithm will depend on the ratio of CPU power to memory
  678. # speed.
  679. # One can monitor the results of the benchmark
  680. # https://pv.github.io/numpy-bench/#bench_shape_base.Block2D.time_block2d
  681. # to tune this parameter until a C version of the `_block_info_recursion`
  682. # algorithm is implemented which would likely be faster than the python
  683. # version.
  684. if list_ndim * final_size > (2 * 512 * 512):
  685. return _block_slicing(arrays, list_ndim, result_ndim)
  686. else:
  687. return _block_concatenate(arrays, list_ndim, result_ndim)
  688. # These helper functions are mostly used for testing.
  689. # They allow us to write tests that directly call `_block_slicing`
  690. # or `_block_concatenate` without blocking large arrays to force the wisdom
  691. # to trigger the desired path.
  692. def _block_setup(arrays):
  693. """
  694. Returns
  695. (`arrays`, list_ndim, result_ndim, final_size)
  696. """
  697. bottom_index, arr_ndim, final_size = _block_check_depths_match(arrays)
  698. list_ndim = len(bottom_index)
  699. if bottom_index and bottom_index[-1] is None:
  700. raise ValueError(
  701. 'List at {} cannot be empty'.format(
  702. _block_format_index(bottom_index)
  703. )
  704. )
  705. result_ndim = max(arr_ndim, list_ndim)
  706. return arrays, list_ndim, result_ndim, final_size
  707. def _block_slicing(arrays, list_ndim, result_ndim):
  708. shape, slices, arrays = _block_info_recursion(
  709. arrays, list_ndim, result_ndim)
  710. dtype = _nx.result_type(*[arr.dtype for arr in arrays])
  711. # Test preferring F only in the case that all input arrays are F
  712. F_order = all(arr.flags['F_CONTIGUOUS'] for arr in arrays)
  713. C_order = all(arr.flags['C_CONTIGUOUS'] for arr in arrays)
  714. order = 'F' if F_order and not C_order else 'C'
  715. result = _nx.empty(shape=shape, dtype=dtype, order=order)
  716. # Note: In a c implementation, the function
  717. # PyArray_CreateMultiSortedStridePerm could be used for more advanced
  718. # guessing of the desired order.
  719. for the_slice, arr in zip(slices, arrays):
  720. result[(Ellipsis,) + the_slice] = arr
  721. return result
  722. def _block_concatenate(arrays, list_ndim, result_ndim):
  723. result = _block(arrays, list_ndim, result_ndim)
  724. if list_ndim == 0:
  725. # Catch an edge case where _block returns a view because
  726. # `arrays` is a single numpy array and not a list of numpy arrays.
  727. # This might copy scalars or lists twice, but this isn't a likely
  728. # usecase for those interested in performance
  729. result = result.copy()
  730. return result