@Article{JML-4-157, author = {Ying , Lexing}, title = {Tangent Differential Privacy}, journal = {Journal of Machine Learning}, year = {2025}, volume = {4}, number = {3}, pages = {157--165}, abstract = {
Differential privacy is a framework for protecting the identity of individual data points in the decision-making process. In this note, we propose a new form of differential privacy, known as tangent differential privacy. Compared to the usual differential privacy, which is defined uniformly across data distributions, tangent differential privacy is tailored to a specific data distribution of interest. It also allows for general distribution distances such as total variation distance and Wasserstein distance. In the context of risk minimization, we demonstrate that entropic regularization ensures tangent differential privacy under relatively general conditions on the risk function.
}, issn = {2790-2048}, doi = {https://doi.org/10.4208/jml.240928}, url = {http://global-sci.org/intro/article_detail/jml/24378.html} }