@article{1BCCGLPPRS_ACM2021,
author = {Bernardini, Giulia and Chen, Huiping and Conte, Alessio and Grossi, Roberto and Loukides, Grigorios and Pisanti, Nadia and Pissis, Solon P. and Rosone, Giovanna and Sweering, Michelle},
title = {Combinatorial Algorithms for String Sanitization},
year = {2021},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
volume = {15},
number = {1},
issn = {1556-4681},
url = {https://doi.org/10.1145/3418683},
doi = {10.1145/3418683},
abstract = {String data are often disseminated to support applications such as location-based service provision or DNA sequence analysis. This dissemination, however, may expose sensitive patterns that model confidential knowledge (e.g., trips to mental health clinics from a string representing a user’s location history). In this article, we consider the problem of sanitizing a string by concealing the occurrences of sensitive patterns, while maintaining data utility, in two settings that are relevant to many common string processing tasks.In the first setting, we aim to generate the minimal-length string that preserves the order of appearance and frequency of all non-sensitive patterns. Such a string allows accurately performing tasks based on the sequential nature and pattern frequencies of the string. To construct such a string, we propose a time-optimal algorithm, TFS-ALGO. We also propose another time-optimal algorithm, PFS-ALGO, which preserves a partial order of appearance of non-sensitive patterns but produces a much shorter string that can be analyzed more efficiently. The strings produced by either of these algorithms are constructed by concatenating non-sensitive parts of the input string. However, it is possible to detect the sensitive patterns by “reversing” the concatenation operations. In response, we propose a heuristic, MCSR-ALGO, which replaces letters in the strings output by the algorithms with carefully selected letters, so that sensitive patterns are not reinstated, implausible patterns are not introduced, and occurrences of spurious patterns are prevented. In the second setting, we aim to generate a string that is at minimal edit distance from the original string, in addition to preserving the order of appearance and frequency of all non-sensitive patterns. To construct such a string, we propose an algorithm, ETFS-ALGO, based on solving specific instances of approximate regular expression matching.We implemented our sanitization approach that applies TFS-ALGO, PFS-ALGO, and then MCSR-ALGO, and experimentally show that it is effective and efficient. We also show that TFS-ALGO is nearly as effective at minimizing the edit distance as ETFS-ALGO, while being substantially more efficient than ETFS-ALGO.},
journal = {ACM Trans. Knowl. Discov. Data},
articleno = {8},
numpages = {34},
keywords = {data sanitization, Data privacy, strings, sequences, sensitive knowledge, knowledge hiding}
note={Accesso Aperto MIUR. This work has been partly funded by the project CMACBioSeq (Combinatorial methods for analysis and compression of biological sequences) grant n. RBSI146R5L of the SIR 2014 program of MIUR.}
}