Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1__all__ = ['atleast_1d', 'atleast_2d', 'atleast_3d', 'block', 'hstack', 

2 'stack', 'vstack'] 

3 

4import functools 

5import itertools 

6import operator 

7import warnings 

8 

9from . import numeric as _nx 

10from . import overrides 

11from ._asarray import array, asanyarray 

12from .multiarray import normalize_axis_index 

13from . import fromnumeric as _from_nx 

14 

15 

16array_function_dispatch = functools.partial( 

17 overrides.array_function_dispatch, module='numpy') 

18 

19 

20def _atleast_1d_dispatcher(*arys): 

21 return arys 

22 

23 

24@array_function_dispatch(_atleast_1d_dispatcher) 

25def atleast_1d(*arys): 

26 """ 

27 Convert inputs to arrays with at least one dimension. 

28 

29 Scalar inputs are converted to 1-dimensional arrays, whilst 

30 higher-dimensional inputs are preserved. 

31 

32 Parameters 

33 ---------- 

34 arys1, arys2, ... : array_like 

35 One or more input arrays. 

36 

37 Returns 

38 ------- 

39 ret : ndarray 

40 An array, or list of arrays, each with ``a.ndim >= 1``. 

41 Copies are made only if necessary. 

42 

43 See Also 

44 -------- 

45 atleast_2d, atleast_3d 

46 

47 Examples 

48 -------- 

49 >>> np.atleast_1d(1.0) 

50 array([1.]) 

51 

52 >>> x = np.arange(9.0).reshape(3,3) 

53 >>> np.atleast_1d(x) 

54 array([[0., 1., 2.], 

55 [3., 4., 5.], 

56 [6., 7., 8.]]) 

57 >>> np.atleast_1d(x) is x 

58 True 

59 

60 >>> np.atleast_1d(1, [3, 4]) 

61 [array([1]), array([3, 4])] 

62 

63 """ 

64 res = [] 

65 for ary in arys: 

66 ary = asanyarray(ary) 

67 if ary.ndim == 0: 

68 result = ary.reshape(1) 

69 else: 

70 result = ary 

71 res.append(result) 

72 if len(res) == 1: 

73 return res[0] 

74 else: 

75 return res 

76 

77 

78def _atleast_2d_dispatcher(*arys): 

79 return arys 

80 

81 

82@array_function_dispatch(_atleast_2d_dispatcher) 

83def atleast_2d(*arys): 

84 """ 

85 View inputs as arrays with at least two dimensions. 

86 

87 Parameters 

88 ---------- 

89 arys1, arys2, ... : array_like 

90 One or more array-like sequences. Non-array inputs are converted 

91 to arrays. Arrays that already have two or more dimensions are 

92 preserved. 

93 

94 Returns 

95 ------- 

96 res, res2, ... : ndarray 

97 An array, or list of arrays, each with ``a.ndim >= 2``. 

98 Copies are avoided where possible, and views with two or more 

99 dimensions are returned. 

100 

101 See Also 

102 -------- 

103 atleast_1d, atleast_3d 

104 

105 Examples 

106 -------- 

107 >>> np.atleast_2d(3.0) 

108 array([[3.]]) 

109 

110 >>> x = np.arange(3.0) 

111 >>> np.atleast_2d(x) 

112 array([[0., 1., 2.]]) 

113 >>> np.atleast_2d(x).base is x 

114 True 

115 

116 >>> np.atleast_2d(1, [1, 2], [[1, 2]]) 

117 [array([[1]]), array([[1, 2]]), array([[1, 2]])] 

118 

119 """ 

120 res = [] 

121 for ary in arys: 

122 ary = asanyarray(ary) 

123 if ary.ndim == 0: 

124 result = ary.reshape(1, 1) 

125 elif ary.ndim == 1: 

126 result = ary[_nx.newaxis, :] 

127 else: 

128 result = ary 

129 res.append(result) 

130 if len(res) == 1: 

131 return res[0] 

132 else: 

133 return res 

134 

135 

136def _atleast_3d_dispatcher(*arys): 

137 return arys 

138 

139 

140@array_function_dispatch(_atleast_3d_dispatcher) 

141def atleast_3d(*arys): 

142 """ 

143 View inputs as arrays with at least three dimensions. 

144 

145 Parameters 

146 ---------- 

147 arys1, arys2, ... : array_like 

148 One or more array-like sequences. Non-array inputs are converted to 

149 arrays. Arrays that already have three or more dimensions are 

150 preserved. 

151 

152 Returns 

153 ------- 

154 res1, res2, ... : ndarray 

155 An array, or list of arrays, each with ``a.ndim >= 3``. Copies are 

156 avoided where possible, and views with three or more dimensions are 

157 returned. For example, a 1-D array of shape ``(N,)`` becomes a view 

158 of shape ``(1, N, 1)``, and a 2-D array of shape ``(M, N)`` becomes a 

159 view of shape ``(M, N, 1)``. 

160 

161 See Also 

162 -------- 

163 atleast_1d, atleast_2d 

164 

165 Examples 

166 -------- 

167 >>> np.atleast_3d(3.0) 

168 array([[[3.]]]) 

169 

170 >>> x = np.arange(3.0) 

171 >>> np.atleast_3d(x).shape 

172 (1, 3, 1) 

173 

174 >>> x = np.arange(12.0).reshape(4,3) 

175 >>> np.atleast_3d(x).shape 

176 (4, 3, 1) 

177 >>> np.atleast_3d(x).base is x.base # x is a reshape, so not base itself 

178 True 

179 

180 >>> for arr in np.atleast_3d([1, 2], [[1, 2]], [[[1, 2]]]): 

181 ... print(arr, arr.shape) # doctest: +SKIP 

182 ... 

183 [[[1] 

184 [2]]] (1, 2, 1) 

185 [[[1] 

186 [2]]] (1, 2, 1) 

187 [[[1 2]]] (1, 1, 2) 

188 

189 """ 

190 res = [] 

191 for ary in arys: 

192 ary = asanyarray(ary) 

193 if ary.ndim == 0: 

194 result = ary.reshape(1, 1, 1) 

195 elif ary.ndim == 1: 

196 result = ary[_nx.newaxis, :, _nx.newaxis] 

197 elif ary.ndim == 2: 

198 result = ary[:, :, _nx.newaxis] 

199 else: 

200 result = ary 

201 res.append(result) 

202 if len(res) == 1: 

203 return res[0] 

204 else: 

205 return res 

206 

207 

208def _arrays_for_stack_dispatcher(arrays, stacklevel=4): 

209 if not hasattr(arrays, '__getitem__') and hasattr(arrays, '__iter__'): 

210 warnings.warn('arrays to stack must be passed as a "sequence" type ' 

211 'such as list or tuple. Support for non-sequence ' 

212 'iterables such as generators is deprecated as of ' 

213 'NumPy 1.16 and will raise an error in the future.', 

214 FutureWarning, stacklevel=stacklevel) 

215 return () 

216 return arrays 

217 

218 

219def _vhstack_dispatcher(tup): 

220 return _arrays_for_stack_dispatcher(tup) 

221 

222 

223@array_function_dispatch(_vhstack_dispatcher) 

224def vstack(tup): 

225 """ 

226 Stack arrays in sequence vertically (row wise). 

227 

228 This is equivalent to concatenation along the first axis after 1-D arrays 

229 of shape `(N,)` have been reshaped to `(1,N)`. Rebuilds arrays divided by 

230 `vsplit`. 

231 

232 This function makes most sense for arrays with up to 3 dimensions. For 

233 instance, for pixel-data with a height (first axis), width (second axis), 

234 and r/g/b channels (third axis). The functions `concatenate`, `stack` and 

235 `block` provide more general stacking and concatenation operations. 

236 

237 Parameters 

238 ---------- 

239 tup : sequence of ndarrays 

240 The arrays must have the same shape along all but the first axis. 

241 1-D arrays must have the same length. 

242 

243 Returns 

244 ------- 

245 stacked : ndarray 

246 The array formed by stacking the given arrays, will be at least 2-D. 

247 

248 See Also 

249 -------- 

250 concatenate : Join a sequence of arrays along an existing axis. 

251 stack : Join a sequence of arrays along a new axis. 

252 block : Assemble an nd-array from nested lists of blocks. 

253 hstack : Stack arrays in sequence horizontally (column wise). 

254 dstack : Stack arrays in sequence depth wise (along third axis). 

255 column_stack : Stack 1-D arrays as columns into a 2-D array. 

256 vsplit : Split an array into multiple sub-arrays vertically (row-wise). 

257 

258 Examples 

259 -------- 

260 >>> a = np.array([1, 2, 3]) 

261 >>> b = np.array([2, 3, 4]) 

262 >>> np.vstack((a,b)) 

263 array([[1, 2, 3], 

264 [2, 3, 4]]) 

265 

266 >>> a = np.array([[1], [2], [3]]) 

267 >>> b = np.array([[2], [3], [4]]) 

268 >>> np.vstack((a,b)) 

269 array([[1], 

270 [2], 

271 [3], 

272 [2], 

273 [3], 

274 [4]]) 

275 

276 """ 

277 if not overrides.ARRAY_FUNCTION_ENABLED: 

278 # raise warning if necessary 

279 _arrays_for_stack_dispatcher(tup, stacklevel=2) 

280 arrs = atleast_2d(*tup) 

281 if not isinstance(arrs, list): 

282 arrs = [arrs] 

283 return _nx.concatenate(arrs, 0) 

284 

285 

286@array_function_dispatch(_vhstack_dispatcher) 

287def hstack(tup): 

288 """ 

289 Stack arrays in sequence horizontally (column wise). 

290 

291 This is equivalent to concatenation along the second axis, except for 1-D 

292 arrays where it concatenates along the first axis. Rebuilds arrays divided 

293 by `hsplit`. 

294 

295 This function makes most sense for arrays with up to 3 dimensions. For 

296 instance, for pixel-data with a height (first axis), width (second axis), 

297 and r/g/b channels (third axis). The functions `concatenate`, `stack` and 

298 `block` provide more general stacking and concatenation operations. 

299 

300 Parameters 

301 ---------- 

302 tup : sequence of ndarrays 

303 The arrays must have the same shape along all but the second axis, 

304 except 1-D arrays which can be any length. 

305 

306 Returns 

307 ------- 

308 stacked : ndarray 

309 The array formed by stacking the given arrays. 

310 

311 See Also 

312 -------- 

313 concatenate : Join a sequence of arrays along an existing axis. 

314 stack : Join a sequence of arrays along a new axis. 

315 block : Assemble an nd-array from nested lists of blocks. 

316 vstack : Stack arrays in sequence vertically (row wise). 

317 dstack : Stack arrays in sequence depth wise (along third axis). 

318 column_stack : Stack 1-D arrays as columns into a 2-D array. 

319 hsplit : Split an array into multiple sub-arrays horizontally (column-wise). 

320 

321 Examples 

322 -------- 

323 >>> a = np.array((1,2,3)) 

324 >>> b = np.array((2,3,4)) 

325 >>> np.hstack((a,b)) 

326 array([1, 2, 3, 2, 3, 4]) 

327 >>> a = np.array([[1],[2],[3]]) 

328 >>> b = np.array([[2],[3],[4]]) 

329 >>> np.hstack((a,b)) 

330 array([[1, 2], 

331 [2, 3], 

332 [3, 4]]) 

333 

334 """ 

335 if not overrides.ARRAY_FUNCTION_ENABLED: 

336 # raise warning if necessary 

337 _arrays_for_stack_dispatcher(tup, stacklevel=2) 

338 

339 arrs = atleast_1d(*tup) 

340 if not isinstance(arrs, list): 

341 arrs = [arrs] 

342 # As a special case, dimension 0 of 1-dimensional arrays is "horizontal" 

343 if arrs and arrs[0].ndim == 1: 

344 return _nx.concatenate(arrs, 0) 

345 else: 

346 return _nx.concatenate(arrs, 1) 

347 

348 

349def _stack_dispatcher(arrays, axis=None, out=None): 

350 arrays = _arrays_for_stack_dispatcher(arrays, stacklevel=6) 

351 if out is not None: 

352 # optimize for the typical case where only arrays is provided 

353 arrays = list(arrays) 

354 arrays.append(out) 

355 return arrays 

356 

357 

358@array_function_dispatch(_stack_dispatcher) 

359def stack(arrays, axis=0, out=None): 

360 """ 

361 Join a sequence of arrays along a new axis. 

362 

363 The ``axis`` parameter specifies the index of the new axis in the 

364 dimensions of the result. For example, if ``axis=0`` it will be the first 

365 dimension and if ``axis=-1`` it will be the last dimension. 

366 

367 .. versionadded:: 1.10.0 

368 

369 Parameters 

370 ---------- 

371 arrays : sequence of array_like 

372 Each array must have the same shape. 

373 

374 axis : int, optional 

375 The axis in the result array along which the input arrays are stacked. 

376 

377 out : ndarray, optional 

378 If provided, the destination to place the result. The shape must be 

379 correct, matching that of what stack would have returned if no 

380 out argument were specified. 

381 

382 Returns 

383 ------- 

384 stacked : ndarray 

385 The stacked array has one more dimension than the input arrays. 

386 

387 See Also 

388 -------- 

389 concatenate : Join a sequence of arrays along an existing axis. 

390 block : Assemble an nd-array from nested lists of blocks. 

391 split : Split array into a list of multiple sub-arrays of equal size. 

392 

393 Examples 

394 -------- 

395 >>> arrays = [np.random.randn(3, 4) for _ in range(10)] 

396 >>> np.stack(arrays, axis=0).shape 

397 (10, 3, 4) 

398 

399 >>> np.stack(arrays, axis=1).shape 

400 (3, 10, 4) 

401 

402 >>> np.stack(arrays, axis=2).shape 

403 (3, 4, 10) 

404 

405 >>> a = np.array([1, 2, 3]) 

406 >>> b = np.array([2, 3, 4]) 

407 >>> np.stack((a, b)) 

408 array([[1, 2, 3], 

409 [2, 3, 4]]) 

410 

411 >>> np.stack((a, b), axis=-1) 

412 array([[1, 2], 

413 [2, 3], 

414 [3, 4]]) 

415 

416 """ 

417 if not overrides.ARRAY_FUNCTION_ENABLED: 

418 # raise warning if necessary 

419 _arrays_for_stack_dispatcher(arrays, stacklevel=2) 

420 

421 arrays = [asanyarray(arr) for arr in arrays] 

422 if not arrays: 

423 raise ValueError('need at least one array to stack') 

424 

425 shapes = {arr.shape for arr in arrays} 

426 if len(shapes) != 1: 

427 raise ValueError('all input arrays must have the same shape') 

428 

429 result_ndim = arrays[0].ndim + 1 

430 axis = normalize_axis_index(axis, result_ndim) 

431 

432 sl = (slice(None),) * axis + (_nx.newaxis,) 

433 expanded_arrays = [arr[sl] for arr in arrays] 

434 return _nx.concatenate(expanded_arrays, axis=axis, out=out) 

435 

436 

437# Internal functions to eliminate the overhead of repeated dispatch in one of 

438# the two possible paths inside np.block. 

439# Use getattr to protect against __array_function__ being disabled. 

440_size = getattr(_from_nx.size, '__wrapped__', _from_nx.size) 

441_ndim = getattr(_from_nx.ndim, '__wrapped__', _from_nx.ndim) 

442_concatenate = getattr(_from_nx.concatenate, '__wrapped__', _from_nx.concatenate) 

443 

444 

445def _block_format_index(index): 

446 """ 

447 Convert a list of indices ``[0, 1, 2]`` into ``"arrays[0][1][2]"``. 

448 """ 

449 idx_str = ''.join('[{}]'.format(i) for i in index if i is not None) 

450 return 'arrays' + idx_str 

451 

452 

453def _block_check_depths_match(arrays, parent_index=[]): 

454 """ 

455 Recursive function checking that the depths of nested lists in `arrays` 

456 all match. Mismatch raises a ValueError as described in the block 

457 docstring below. 

458 

459 The entire index (rather than just the depth) needs to be calculated 

460 for each innermost list, in case an error needs to be raised, so that 

461 the index of the offending list can be printed as part of the error. 

462 

463 Parameters 

464 ---------- 

465 arrays : nested list of arrays 

466 The arrays to check 

467 parent_index : list of int 

468 The full index of `arrays` within the nested lists passed to 

469 `_block_check_depths_match` at the top of the recursion. 

470 

471 Returns 

472 ------- 

473 first_index : list of int 

474 The full index of an element from the bottom of the nesting in 

475 `arrays`. If any element at the bottom is an empty list, this will 

476 refer to it, and the last index along the empty axis will be None. 

477 max_arr_ndim : int 

478 The maximum of the ndims of the arrays nested in `arrays`. 

479 final_size: int 

480 The number of elements in the final array. This is used the motivate 

481 the choice of algorithm used using benchmarking wisdom. 

482 

483 """ 

484 if type(arrays) is tuple: 

485 # not strictly necessary, but saves us from: 

486 # - more than one way to do things - no point treating tuples like 

487 # lists 

488 # - horribly confusing behaviour that results when tuples are 

489 # treated like ndarray 

490 raise TypeError( 

491 '{} is a tuple. ' 

492 'Only lists can be used to arrange blocks, and np.block does ' 

493 'not allow implicit conversion from tuple to ndarray.'.format( 

494 _block_format_index(parent_index) 

495 ) 

496 ) 

497 elif type(arrays) is list and len(arrays) > 0: 

498 idxs_ndims = (_block_check_depths_match(arr, parent_index + [i]) 

499 for i, arr in enumerate(arrays)) 

500 

501 first_index, max_arr_ndim, final_size = next(idxs_ndims) 

502 for index, ndim, size in idxs_ndims: 

503 final_size += size 

504 if ndim > max_arr_ndim: 

505 max_arr_ndim = ndim 

506 if len(index) != len(first_index): 

507 raise ValueError( 

508 "List depths are mismatched. First element was at depth " 

509 "{}, but there is an element at depth {} ({})".format( 

510 len(first_index), 

511 len(index), 

512 _block_format_index(index) 

513 ) 

514 ) 

515 # propagate our flag that indicates an empty list at the bottom 

516 if index[-1] is None: 

517 first_index = index 

518 

519 return first_index, max_arr_ndim, final_size 

520 elif type(arrays) is list and len(arrays) == 0: 

521 # We've 'bottomed out' on an empty list 

522 return parent_index + [None], 0, 0 

523 else: 

524 # We've 'bottomed out' - arrays is either a scalar or an array 

525 size = _size(arrays) 

526 return parent_index, _ndim(arrays), size 

527 

528 

529def _atleast_nd(a, ndim): 

530 # Ensures `a` has at least `ndim` dimensions by prepending 

531 # ones to `a.shape` as necessary 

532 return array(a, ndmin=ndim, copy=False, subok=True) 

533 

534 

535def _accumulate(values): 

536 return list(itertools.accumulate(values)) 

537 

538 

539def _concatenate_shapes(shapes, axis): 

540 """Given array shapes, return the resulting shape and slices prefixes. 

541 

542 These help in nested concatation. 

543 Returns 

544 ------- 

545 shape: tuple of int 

546 This tuple satisfies: 

547 ``` 

548 shape, _ = _concatenate_shapes([arr.shape for shape in arrs], axis) 

549 shape == concatenate(arrs, axis).shape 

550 ``` 

551 

552 slice_prefixes: tuple of (slice(start, end), ) 

553 For a list of arrays being concatenated, this returns the slice 

554 in the larger array at axis that needs to be sliced into. 

555 

556 For example, the following holds: 

557 ``` 

558 ret = concatenate([a, b, c], axis) 

559 _, (sl_a, sl_b, sl_c) = concatenate_slices([a, b, c], axis) 

560 

561 ret[(slice(None),) * axis + sl_a] == a 

562 ret[(slice(None),) * axis + sl_b] == b 

563 ret[(slice(None),) * axis + sl_c] == c 

564 ``` 

565 

566 These are called slice prefixes since they are used in the recursive 

567 blocking algorithm to compute the left-most slices during the 

568 recursion. Therefore, they must be prepended to rest of the slice 

569 that was computed deeper in the recursion. 

570 

571 These are returned as tuples to ensure that they can quickly be added 

572 to existing slice tuple without creating a new tuple every time. 

573 

574 """ 

575 # Cache a result that will be reused. 

576 shape_at_axis = [shape[axis] for shape in shapes] 

577 

578 # Take a shape, any shape 

579 first_shape = shapes[0] 

580 first_shape_pre = first_shape[:axis] 

581 first_shape_post = first_shape[axis+1:] 

582 

583 if any(shape[:axis] != first_shape_pre or 

584 shape[axis+1:] != first_shape_post for shape in shapes): 

585 raise ValueError( 

586 'Mismatched array shapes in block along axis {}.'.format(axis)) 

587 

588 shape = (first_shape_pre + (sum(shape_at_axis),) + first_shape[axis+1:]) 

589 

590 offsets_at_axis = _accumulate(shape_at_axis) 

591 slice_prefixes = [(slice(start, end),) 

592 for start, end in zip([0] + offsets_at_axis, 

593 offsets_at_axis)] 

594 return shape, slice_prefixes 

595 

596 

597def _block_info_recursion(arrays, max_depth, result_ndim, depth=0): 

598 """ 

599 Returns the shape of the final array, along with a list 

600 of slices and a list of arrays that can be used for assignment inside the 

601 new array 

602 

603 Parameters 

604 ---------- 

605 arrays : nested list of arrays 

606 The arrays to check 

607 max_depth : list of int 

608 The number of nested lists 

609 result_ndim: int 

610 The number of dimensions in thefinal array. 

611 

612 Returns 

613 ------- 

614 shape : tuple of int 

615 The shape that the final array will take on. 

616 slices: list of tuple of slices 

617 The slices into the full array required for assignment. These are 

618 required to be prepended with ``(Ellipsis, )`` to obtain to correct 

619 final index. 

620 arrays: list of ndarray 

621 The data to assign to each slice of the full array 

622 

623 """ 

624 if depth < max_depth: 

625 shapes, slices, arrays = zip( 

626 *[_block_info_recursion(arr, max_depth, result_ndim, depth+1) 

627 for arr in arrays]) 

628 

629 axis = result_ndim - max_depth + depth 

630 shape, slice_prefixes = _concatenate_shapes(shapes, axis) 

631 

632 # Prepend the slice prefix and flatten the slices 

633 slices = [slice_prefix + the_slice 

634 for slice_prefix, inner_slices in zip(slice_prefixes, slices) 

635 for the_slice in inner_slices] 

636 

637 # Flatten the array list 

638 arrays = functools.reduce(operator.add, arrays) 

639 

640 return shape, slices, arrays 

641 else: 

642 # We've 'bottomed out' - arrays is either a scalar or an array 

643 # type(arrays) is not list 

644 # Return the slice and the array inside a list to be consistent with 

645 # the recursive case. 

646 arr = _atleast_nd(arrays, result_ndim) 

647 return arr.shape, [()], [arr] 

648 

649 

650def _block(arrays, max_depth, result_ndim, depth=0): 

651 """ 

652 Internal implementation of block based on repeated concatenation. 

653 `arrays` is the argument passed to 

654 block. `max_depth` is the depth of nested lists within `arrays` and 

655 `result_ndim` is the greatest of the dimensions of the arrays in 

656 `arrays` and the depth of the lists in `arrays` (see block docstring 

657 for details). 

658 """ 

659 if depth < max_depth: 

660 arrs = [_block(arr, max_depth, result_ndim, depth+1) 

661 for arr in arrays] 

662 return _concatenate(arrs, axis=-(max_depth-depth)) 

663 else: 

664 # We've 'bottomed out' - arrays is either a scalar or an array 

665 # type(arrays) is not list 

666 return _atleast_nd(arrays, result_ndim) 

667 

668 

669def _block_dispatcher(arrays): 

670 # Use type(...) is list to match the behavior of np.block(), which special 

671 # cases list specifically rather than allowing for generic iterables or 

672 # tuple. Also, we know that list.__array_function__ will never exist. 

673 if type(arrays) is list: 

674 for subarrays in arrays: 

675 yield from _block_dispatcher(subarrays) 

676 else: 

677 yield arrays 

678 

679 

680@array_function_dispatch(_block_dispatcher) 

681def block(arrays): 

682 """ 

683 Assemble an nd-array from nested lists of blocks. 

684 

685 Blocks in the innermost lists are concatenated (see `concatenate`) along 

686 the last dimension (-1), then these are concatenated along the 

687 second-last dimension (-2), and so on until the outermost list is reached. 

688 

689 Blocks can be of any dimension, but will not be broadcasted using the normal 

690 rules. Instead, leading axes of size 1 are inserted, to make ``block.ndim`` 

691 the same for all blocks. This is primarily useful for working with scalars, 

692 and means that code like ``np.block([v, 1])`` is valid, where 

693 ``v.ndim == 1``. 

694 

695 When the nested list is two levels deep, this allows block matrices to be 

696 constructed from their components. 

697 

698 .. versionadded:: 1.13.0 

699 

700 Parameters 

701 ---------- 

702 arrays : nested list of array_like or scalars (but not tuples) 

703 If passed a single ndarray or scalar (a nested list of depth 0), this 

704 is returned unmodified (and not copied). 

705 

706 Elements shapes must match along the appropriate axes (without 

707 broadcasting), but leading 1s will be prepended to the shape as 

708 necessary to make the dimensions match. 

709 

710 Returns 

711 ------- 

712 block_array : ndarray 

713 The array assembled from the given blocks. 

714 

715 The dimensionality of the output is equal to the greatest of: 

716 * the dimensionality of all the inputs 

717 * the depth to which the input list is nested 

718 

719 Raises 

720 ------ 

721 ValueError 

722 * If list depths are mismatched - for instance, ``[[a, b], c]`` is 

723 illegal, and should be spelt ``[[a, b], [c]]`` 

724 * If lists are empty - for instance, ``[[a, b], []]`` 

725 

726 See Also 

727 -------- 

728 concatenate : Join a sequence of arrays along an existing axis. 

729 stack : Join a sequence of arrays along a new axis. 

730 vstack : Stack arrays in sequence vertically (row wise). 

731 hstack : Stack arrays in sequence horizontally (column wise). 

732 dstack : Stack arrays in sequence depth wise (along third axis). 

733 column_stack : Stack 1-D arrays as columns into a 2-D array. 

734 vsplit : Split an array into multiple sub-arrays vertically (row-wise). 

735 

736 Notes 

737 ----- 

738 

739 When called with only scalars, ``np.block`` is equivalent to an ndarray 

740 call. So ``np.block([[1, 2], [3, 4]])`` is equivalent to 

741 ``np.array([[1, 2], [3, 4]])``. 

742 

743 This function does not enforce that the blocks lie on a fixed grid. 

744 ``np.block([[a, b], [c, d]])`` is not restricted to arrays of the form:: 

745 

746 AAAbb 

747 AAAbb 

748 cccDD 

749 

750 But is also allowed to produce, for some ``a, b, c, d``:: 

751 

752 AAAbb 

753 AAAbb 

754 cDDDD 

755 

756 Since concatenation happens along the last axis first, `block` is _not_ 

757 capable of producing the following directly:: 

758 

759 AAAbb 

760 cccbb 

761 cccDD 

762 

763 Matlab's "square bracket stacking", ``[A, B, ...; p, q, ...]``, is 

764 equivalent to ``np.block([[A, B, ...], [p, q, ...]])``. 

765 

766 Examples 

767 -------- 

768 The most common use of this function is to build a block matrix 

769 

770 >>> A = np.eye(2) * 2 

771 >>> B = np.eye(3) * 3 

772 >>> np.block([ 

773 ... [A, np.zeros((2, 3))], 

774 ... [np.ones((3, 2)), B ] 

775 ... ]) 

776 array([[2., 0., 0., 0., 0.], 

777 [0., 2., 0., 0., 0.], 

778 [1., 1., 3., 0., 0.], 

779 [1., 1., 0., 3., 0.], 

780 [1., 1., 0., 0., 3.]]) 

781 

782 With a list of depth 1, `block` can be used as `hstack` 

783 

784 >>> np.block([1, 2, 3]) # hstack([1, 2, 3]) 

785 array([1, 2, 3]) 

786 

787 >>> a = np.array([1, 2, 3]) 

788 >>> b = np.array([2, 3, 4]) 

789 >>> np.block([a, b, 10]) # hstack([a, b, 10]) 

790 array([ 1, 2, 3, 2, 3, 4, 10]) 

791 

792 >>> A = np.ones((2, 2), int) 

793 >>> B = 2 * A 

794 >>> np.block([A, B]) # hstack([A, B]) 

795 array([[1, 1, 2, 2], 

796 [1, 1, 2, 2]]) 

797 

798 With a list of depth 2, `block` can be used in place of `vstack`: 

799 

800 >>> a = np.array([1, 2, 3]) 

801 >>> b = np.array([2, 3, 4]) 

802 >>> np.block([[a], [b]]) # vstack([a, b]) 

803 array([[1, 2, 3], 

804 [2, 3, 4]]) 

805 

806 >>> A = np.ones((2, 2), int) 

807 >>> B = 2 * A 

808 >>> np.block([[A], [B]]) # vstack([A, B]) 

809 array([[1, 1], 

810 [1, 1], 

811 [2, 2], 

812 [2, 2]]) 

813 

814 It can also be used in places of `atleast_1d` and `atleast_2d` 

815 

816 >>> a = np.array(0) 

817 >>> b = np.array([1]) 

818 >>> np.block([a]) # atleast_1d(a) 

819 array([0]) 

820 >>> np.block([b]) # atleast_1d(b) 

821 array([1]) 

822 

823 >>> np.block([[a]]) # atleast_2d(a) 

824 array([[0]]) 

825 >>> np.block([[b]]) # atleast_2d(b) 

826 array([[1]]) 

827 

828 

829 """ 

830 arrays, list_ndim, result_ndim, final_size = _block_setup(arrays) 

831 

832 # It was found through benchmarking that making an array of final size 

833 # around 256x256 was faster by straight concatenation on a 

834 # i7-7700HQ processor and dual channel ram 2400MHz. 

835 # It didn't seem to matter heavily on the dtype used. 

836 # 

837 # A 2D array using repeated concatenation requires 2 copies of the array. 

838 # 

839 # The fastest algorithm will depend on the ratio of CPU power to memory 

840 # speed. 

841 # One can monitor the results of the benchmark 

842 # https://pv.github.io/numpy-bench/#bench_shape_base.Block2D.time_block2d 

843 # to tune this parameter until a C version of the `_block_info_recursion` 

844 # algorithm is implemented which would likely be faster than the python 

845 # version. 

846 if list_ndim * final_size > (2 * 512 * 512): 

847 return _block_slicing(arrays, list_ndim, result_ndim) 

848 else: 

849 return _block_concatenate(arrays, list_ndim, result_ndim) 

850 

851 

852# These helper functions are mostly used for testing. 

853# They allow us to write tests that directly call `_block_slicing` 

854# or `_block_concatenate` without blocking large arrays to force the wisdom 

855# to trigger the desired path. 

856def _block_setup(arrays): 

857 """ 

858 Returns 

859 (`arrays`, list_ndim, result_ndim, final_size) 

860 """ 

861 bottom_index, arr_ndim, final_size = _block_check_depths_match(arrays) 

862 list_ndim = len(bottom_index) 

863 if bottom_index and bottom_index[-1] is None: 

864 raise ValueError( 

865 'List at {} cannot be empty'.format( 

866 _block_format_index(bottom_index) 

867 ) 

868 ) 

869 result_ndim = max(arr_ndim, list_ndim) 

870 return arrays, list_ndim, result_ndim, final_size 

871 

872 

873def _block_slicing(arrays, list_ndim, result_ndim): 

874 shape, slices, arrays = _block_info_recursion( 

875 arrays, list_ndim, result_ndim) 

876 dtype = _nx.result_type(*[arr.dtype for arr in arrays]) 

877 

878 # Test preferring F only in the case that all input arrays are F 

879 F_order = all(arr.flags['F_CONTIGUOUS'] for arr in arrays) 

880 C_order = all(arr.flags['C_CONTIGUOUS'] for arr in arrays) 

881 order = 'F' if F_order and not C_order else 'C' 

882 result = _nx.empty(shape=shape, dtype=dtype, order=order) 

883 # Note: In a c implementation, the function 

884 # PyArray_CreateMultiSortedStridePerm could be used for more advanced 

885 # guessing of the desired order. 

886 

887 for the_slice, arr in zip(slices, arrays): 

888 result[(Ellipsis,) + the_slice] = arr 

889 return result 

890 

891 

892def _block_concatenate(arrays, list_ndim, result_ndim): 

893 result = _block(arrays, list_ndim, result_ndim) 

894 if list_ndim == 0: 

895 # Catch an edge case where _block returns a view because 

896 # `arrays` is a single numpy array and not a list of numpy arrays. 

897 # This might copy scalars or lists twice, but this isn't a likely 

898 # usecase for those interested in performance 

899 result = result.copy() 

900 return result