summaryrefslogtreecommitdiff
path: root/malloc/memusage.c
diff options
context:
space:
mode:
authorAndreas Schwab <schwab@redhat.com>2009-10-29 11:25:20 -0700
committerUlrich Drepper <drepper@redhat.com>2009-10-29 11:25:20 -0700
commit22bc5239e1c7d97b0642af6c135af994586f8e82 (patch)
tree5a239a09214ad5b6a058e49f7a8c87f3136ad66a /malloc/memusage.c
parentd94760f944cebf05b239dd0b65c3b40a5577013b (diff)
downloadglibc-22bc5239e1c7d97b0642af6c135af994586f8e82.tar.gz
Fix wrap-around in memusage.
Diffstat (limited to 'malloc/memusage.c')
-rw-r--r--malloc/memusage.c9
1 files changed, 5 insertions, 4 deletions
diff --git a/malloc/memusage.c b/malloc/memusage.c
index fcd58dc684..382261c1c4 100644
--- a/malloc/memusage.c
+++ b/malloc/memusage.c
@@ -163,15 +163,16 @@ update_data (struct header *result, size_t len, size_t old_len)
if (fd != -1)
{
uatomic32_t idx = catomic_exchange_and_add (&buffer_cnt, 1);
- if (idx >= 2 * buffer_size)
+ if (idx + 1 >= 2 * buffer_size)
{
/* We try to reset the counter to the correct range. If
this fails because of another thread increasing the
counter it does not matter since that thread will take
care of the correction. */
- uatomic32_t reset = idx % (2 * buffer_size);
- catomic_compare_and_exchange_val_acq (&buffer_cnt, reset, idx);
- idx = reset;
+ uatomic32_t reset = (idx + 1) % (2 * buffer_size);
+ catomic_compare_and_exchange_val_acq (&buffer_cnt, reset, idx + 1);
+ if (idx >= 2 * buffer_size)
+ idx = reset - 1;
}
assert (idx < 2 * DEFAULT_BUFFER_SIZE);