@inproceedings{sitchinava-esa19, author = {Peyman Afshani and Rolf Fagerberg and David Hammer and Riko Jacob and Irina Kostitsyna and Ulrich Meyer and Manuel Penschuck and Nodari Sitchinava}, title = {Fragile complexity of comparison-based algorithms}, booktitle = {Proceedings of the 27th Annual European Symposium on Algorithms}, series = {ESA '19}, pages = {2:1-2:19}, year = {2019}, month = {9}, award = {ESA Track A Best Paper Award}, doi = {10.4230/LIPIcs.ESA.2019.2}, arxiv = {https://arxiv.org/abs/1901.02857}, abstract = " We initiate a study of algorithms with a focus on the computational complexity of individual elements, and introduce the \emph{fragile complexity} of comparison-based algorithms as the maximal number of comparisons any individual element takes part in. We give a number of upper and lower bounds on the fragile complexity for fundamental problems, including \textsc{Minimum}, \textsc{Selection}, \textsc{Sorting} and \textsc{Heap Construction}. The results include both deterministic and randomized upper and lower bounds, and demonstrate a separation between the two settings for a number of problems. The depth of a comparator network is a straight-forward upper bound on the worst case fragile complexity of the corresponding fragile algorithm. We prove that fragile complexity is a different and strictly easier property than the depth of comparator networks, in the sense that for some problems a fragile complexity equal to the best network depth can be achieved with less total work and that with randomization, even a lower fragile complexity is possible." }