@inproceedings{valko2013stochastic, abstract = {We study the problem of global maximization of a function f given a finite number of evaluations perturbed by noise. We consider a very weak assumption on the function, namely that it is locally smooth (in some precise sense) with respect to some semi-metric, around one of its global maxima. Compared to previous works on bandits in general spaces (Kleinberg et al., 2008; Bubeck et al., 2011a) our algorithm does not require the knowledge of this semi-metric. Our algorithm, StoSOO, follows an optimistic strategy to iteratively construct upper confidence bounds over the hierarchical partitions of the function domain to decide which point to sample next. A finite-time analysis of StoSOO shows that it performs almost as well as the best specifically-tuned algorithms even though the local smoothness of the function is not known.}, author = {Valko, Michal and Carpentier, Alexandra and Munos, R\'{e}mi}, booktitle = {30th International Conference on Machine Learning}, title = {{Stochastic Simultaneous Optimistic Optimization}}, year = {2013} }