For use with nn_sequential.
Shape
Input:
(*, S_start,..., S_i, ..., S_end, *)
, whereS_i
is the size at dimensioni
and*
means any number of dimensions including none.Output:
(*, S_start*...*S_i*...S_end, *)
.
Examples
if (torch_is_installed()) {
input <- torch_randn(32, 1, 5, 5)
m <- nn_flatten()
m(input)
}
#> torch_tensor
#> Columns 1 to 10-0.3199 -0.5470 0.6900 -0.4410 0.7617 0.1433 -0.9380 0.3535 -1.2080 0.1720
#> 0.3672 0.3245 2.4987 0.0329 -0.2421 1.7749 -0.3858 0.0299 -1.5719 -1.0935
#> 0.1906 1.1518 -0.8113 0.7621 -0.2383 0.6764 -1.0068 0.3444 -0.9040 0.3299
#> 0.5682 1.0434 -1.4358 -0.6868 0.1968 0.9005 -0.0376 1.1348 -0.4826 -1.2142
#> -0.1885 1.0115 0.0010 0.3205 -0.9197 -2.8422 0.0036 -1.0949 -0.4103 -2.4228
#> -0.1616 0.9570 -0.9730 -0.5115 -0.7795 -0.6466 1.2477 1.9586 1.5921 0.3300
#> -0.2595 1.1271 -0.5637 -0.2949 -1.3485 0.6355 1.6048 0.0823 -1.3432 1.5985
#> -0.0718 -0.4439 1.4137 0.3199 0.4508 -1.1021 -0.7063 0.1385 0.1935 -0.0515
#> 0.4836 -1.0455 -0.0227 0.0323 2.5342 0.5683 0.6712 1.1591 -1.5967 0.7863
#> 0.0818 -1.7147 -0.4228 -0.6275 0.3657 -1.3283 0.2849 1.2881 0.0971 1.0030
#> -0.1574 0.3463 -0.1375 0.8296 -1.7232 -0.7998 -1.2898 0.7290 -0.3915 -0.0916
#> 1.4269 -1.1700 1.5849 1.6556 -0.2754 2.1463 -0.4575 0.1554 1.1298 -1.4076
#> -0.2696 -0.2911 1.5270 0.0218 -0.1632 0.6899 -0.5760 -0.4658 1.7844 -0.7699
#> 0.9222 -0.6399 -1.0585 1.7109 -0.5417 0.5917 -0.0074 1.2141 -0.0365 -0.6880
#> -0.5276 -0.2794 -1.1153 -1.7366 0.0878 2.1912 0.1281 -0.2582 0.3688 -0.6942
#> 0.1641 -1.8687 0.2645 -1.0567 0.8558 1.7184 0.1966 -0.4893 1.3731 -0.2916
#> -0.2796 -0.0801 -2.6216 1.2877 -0.7383 1.4559 -0.7376 -0.8191 0.9838 -0.2870
#> -0.5289 0.2997 0.3484 0.4236 2.0946 -0.5954 -0.9860 1.9025 1.1765 2.6436
#> -0.6257 0.7267 -1.3255 -1.3947 0.3862 0.4103 0.1376 -0.3802 -1.6978 -0.8255
#> 0.1994 1.1267 -0.5165 0.2751 0.3473 -0.2350 -2.8236 0.4303 0.1155 0.1129
#> -1.1642 -0.2489 0.1635 -1.0925 0.6888 -0.7258 -0.4762 -0.4694 -0.3709 -0.4298
#> 0.7046 -0.1109 -0.0571 0.2755 1.1823 -0.2505 0.4844 -0.0388 0.8161 0.0671
#> 0.1539 1.1621 -0.4768 -0.2405 1.7020 1.4349 1.3076 -0.0111 0.6385 1.2546
#> 0.2940 -0.5328 0.2567 -1.5142 -2.0561 -0.1865 -0.1219 0.9133 -0.2866 -0.7963
#> -1.6937 -0.0177 -0.7141 -0.9364 -1.3136 -0.8536 -0.8504 -0.5033 0.1461 0.9744
#> 0.5276 -0.4393 0.8308 1.0943 0.1384 -0.4463 0.8638 0.4720 0.5617 -0.6370
#> 0.4416 -0.0977 -0.4188 0.0910 0.4841 -1.3073 -0.7666 -0.2519 -1.9687 -0.2757
#> 1.4955 -0.4640 0.4704 0.0696 0.7688 2.1967 -0.0799 -1.0043 -1.7932 0.0867
#> -0.5049 -1.3557 0.7561 0.8408 -0.8754 0.2716 0.0061 0.0291 -0.7523 -1.2831
#> -0.1470 -1.1432 -0.5351 -0.7528 -0.4445 -0.5141 0.7747 1.7050 0.7797 -0.8719
#> ... [the output was truncated (use n=-1 to disable)]
#> [ CPUFloatType{32,25} ]