@techreport{publications48670, volume = {24-1505}, month = {February}, author = {Cl{\'e}ment Lalanne and S{\'e}bastien Gadat}, series = {TSE Working Paper}, booktitle = {TSE Working Paper}, type = {Working Paper}, address = {Toulouse}, title = {Privately Learning Smooth Distributions on the Hypercube by Projections}, publisher = {TSE Working Paper}, year = {2024}, institution = {Universit{\'e} Toulouse Capitole}, url = {https://publications.ut-capitole.fr/id/eprint/48670/}, abstract = {Fueled by the ever-increasing need for statistics that guarantee the privacy of their training sets, this article studies the centrally-private estimation of Sobolev-smooth densities of probability over the hypercube in dimension d. The contributions of this article are two-fold : firstly, it generalizes the one-dimensional results of (Lalanne et al., 2023b) to non-integer levels of smoothness and to a high-dimensional setting, which is important for two reasons : it is more suited for modern learning tasks, and it allows understanding the relations between privacy, dimensionality and smoothness, which is a central question with differential privacy. Secondly, this article presents a private strategy of estimation that is data-driven (usually referred to as adaptive in Statistics) in order to privately choose an estimator that achieves a good bias-variance trade-off among a finite family of private projection estimators without prior knowledge of the ground-truth smoothness {\ensuremath{\beta}}. This is achieved by adapting the Lepskii method for private selection, by adding a new penalization term that makes the estimation privacy-aware.} }