@inproceedings{calandriello2016analysis, abstract = {Large-scale kernel ridge regression (KRR) is limited by the need to store a large kernel matrix Kt. To avoid storing the entire matrix Kt, Nyström methods subsample a subset of columns of the kernel matrix, and efficiently find an approximate KRR solution on the reconstructed Kt . The chosen subsampling distribution in turn affects the statistical and computational tradeoffs. For KRR problems, [15, 1] show that a sampling distribution proportional to the ridge leverage scores (RLSs) provides strong reconstruction guarantees for Kt. While exact RLSs are as difficult to compute as a KRR solution, we may be able to approximate them well enough. In this paper, we study KRR problems in a sequential setting and introduce the INK-ESTIMATE algorithm, that incrementally computes the RLSs estimates. INK-ESTIMATE maintains a small sketch of Kt, that at each step is used to compute an intermediate es- timate of the RLSs. First, our sketch update does not require access to previously seen columns, and therefore a single pass over the kernel ma- trix is sufficient. Second, the algorithm requires a fixed, small space budget to run dependent only on the effective dimension of the kernel matrix. Finally, our sketch provides strong approximation guarantees on the distance ∥Kt−Kt∥2 , and on the statistical risk of the approximate KRR solution at any time, because all our guarantees hold at any intermediate step.}, author = {Calandriello, Daniele and Lazaric, Alessandro and Valko, Michal}, booktitle = {Uncertainty in Artificial Intelligence}, title = {{Analysis of Nystr{\"{o}}m method with sequential ridge leverage scores}}, year = {2016} }