4.4 KiB
4.4 KiB
In [ ]:
import numpy as np
In [ ]:
# create a collection of time series
# in real life, this data comes from an experiment/simulation
n_series = 30
len_one_series = 2**21 # ➔ 2^21 ≈ 2 millions (8Bytes x 2^21/2^20 [MB] = 16 MB)
time_series = []
for idx in range(n_series):
time_series.append(np.zeros((len_one_series,1), dtype='float64'))
In [ ]:
# how much memory does one time series need?
ts_size = time_series[0].nbytes/2**20 # -> 2^20 is 1MB
print('Size of one time series (MB):', ts_size)
print('Size of collection (MB):', n_series*ts_size)
In [ ]:
# let's load the collection in one big array
def load_data_row(x, time_series):
"""Store one time series per raw"""
for row, ts in enumerate(time_series):
x[row,:] = ts
return x
In [ ]:
# let's load the collection in one big array
def load_data_column(x, time_series):
"""Store one time series per column"""
for column, ts in enumerate(time_series):
x[:,column] = ts
return x
In [ ]:
x = np.zeros((n_series, len_one_series, 1), dtype='float64')
%timeit load_data_row(x, time_series)
In [ ]:
x = np.zeros((len_one_series, n_series, 1), dtype='float64')
%timeit load_data_column(x, time_series)
In [ ]: