Re: Statistical aggregate functions are not working with PARTIALaggregation

Поиск
Список
Период
Сортировка
От Kyotaro HORIGUCHI
Тема Re: Statistical aggregate functions are not working with PARTIALaggregation
Дата
Msg-id 20190509.141814.182954005.horiguchi.kyotaro@lab.ntt.co.jp
обсуждение исходный текст
Ответ на Re: Statistical aggregate functions are not working with PARTIAL aggregation  (Greg Stark <stark@mit.edu>)
Список pgsql-hackers
At Thu, 09 May 2019 11:17:46 +0900 (Tokyo Standard Time), Kyotaro HORIGUCHI <horiguchi.kyotaro@lab.ntt.co.jp> wrote in
<20190509.111746.217492977.horiguchi.kyotaro@lab.ntt.co.jp>
> Valgrind doesn't detect the overruning read since the block
> doesn't has 'MEMNOACCESS' region, since the requested size is
> just 64 bytes.
> 
> Thus the attached patch let valgrind detect the overrun.

So the attached patch makes palloc always attach the MEMNOACCESS
region and sentinel byte. The issue under discussion is detected
with this patch either. (But in return memory usage gets larger.)

regards.

-- 
Kyotaro Horiguchi
NTT Open Source Software Center
From 5c25b69822fdd86d05871f61cf30e47f514853ae Mon Sep 17 00:00:00 2001
From: Kyotaro Horiguchi <horiguchi.kyotaro@lab.ntt.co.jp>
Date: Thu, 9 May 2019 13:18:33 +0900
Subject: [PATCH] Always put sentinel in allocated memory blocks.

Currently allocated memory blocks doesn't have sentinel byte and
valgrind NOACCESS region if requested size is equal to chunk size. The
behavior largily diminishes the chance of overrun dection. This patch
let allocated memory blocks are always armed with the stuff when
MEMORY_CONTEXT_CHECKING is defined.
---
 src/backend/utils/mmgr/aset.c       | 51 +++++++++++++++++++++----------------
 src/backend/utils/mmgr/generation.c | 35 +++++++++++++++----------
 src/backend/utils/mmgr/slab.c       | 23 ++++++++++-------
 3 files changed, 64 insertions(+), 45 deletions(-)

diff --git a/src/backend/utils/mmgr/aset.c b/src/backend/utils/mmgr/aset.c
index 08aff333a4..15ef5c5afa 100644
--- a/src/backend/utils/mmgr/aset.c
+++ b/src/backend/utils/mmgr/aset.c
@@ -297,6 +297,13 @@ static const MemoryContextMethods AllocSetMethods = {
 #endif
 };
 
+/* to keep room for sentinel in allocated chunks */
+#ifdef MEMORY_CONTEXT_CHECKING
+#define REQUIRED_SIZE(x) ((x) + 1)
+#else
+#define REQUIRED_SIZE(x) (x)
+#endif
+
 /*
  * Table for AllocSetFreeIndex
  */
@@ -726,9 +733,9 @@ AllocSetAlloc(MemoryContext context, Size size)
      * If requested size exceeds maximum for chunks, allocate an entire block
      * for this request.
      */
-    if (size > set->allocChunkLimit)
+    if (REQUIRED_SIZE(size) > set->allocChunkLimit)
     {
-        chunk_size = MAXALIGN(size);
+        chunk_size = MAXALIGN(REQUIRED_SIZE(size));
         blksize = chunk_size + ALLOC_BLOCKHDRSZ + ALLOC_CHUNKHDRSZ;
         block = (AllocBlock) malloc(blksize);
         if (block == NULL)
@@ -742,8 +749,8 @@ AllocSetAlloc(MemoryContext context, Size size)
 #ifdef MEMORY_CONTEXT_CHECKING
         chunk->requested_size = size;
         /* set mark to catch clobber of "unused" space */
-        if (size < chunk_size)
-            set_sentinel(AllocChunkGetPointer(chunk), size);
+        Assert (size < chunk_size);
+        set_sentinel(AllocChunkGetPointer(chunk), size);
 #endif
 #ifdef RANDOMIZE_ALLOCATED_MEMORY
         /* fill the allocated space with junk */
@@ -787,7 +794,7 @@ AllocSetAlloc(MemoryContext context, Size size)
      * If one is found, remove it from the free list, make it again a member
      * of the alloc set and return its data address.
      */
-    fidx = AllocSetFreeIndex(size);
+    fidx = AllocSetFreeIndex(REQUIRED_SIZE(size));
     chunk = set->freelist[fidx];
     if (chunk != NULL)
     {
@@ -800,8 +807,8 @@ AllocSetAlloc(MemoryContext context, Size size)
 #ifdef MEMORY_CONTEXT_CHECKING
         chunk->requested_size = size;
         /* set mark to catch clobber of "unused" space */
-        if (size < chunk->size)
-            set_sentinel(AllocChunkGetPointer(chunk), size);
+        Assert (size < chunk->size);
+        set_sentinel(AllocChunkGetPointer(chunk), size);
 #endif
 #ifdef RANDOMIZE_ALLOCATED_MEMORY
         /* fill the allocated space with junk */
@@ -824,7 +831,7 @@ AllocSetAlloc(MemoryContext context, Size size)
      * Choose the actual chunk size to allocate.
      */
     chunk_size = (1 << ALLOC_MINBITS) << fidx;
-    Assert(chunk_size >= size);
+    Assert(chunk_size >= REQUIRED_SIZE(size));
 
     /*
      * If there is enough room in the active allocation block, we will put the
@@ -959,8 +966,8 @@ AllocSetAlloc(MemoryContext context, Size size)
 #ifdef MEMORY_CONTEXT_CHECKING
     chunk->requested_size = size;
     /* set mark to catch clobber of "unused" space */
-    if (size < chunk->size)
-        set_sentinel(AllocChunkGetPointer(chunk), size);
+    Assert (size < chunk->size);
+    set_sentinel(AllocChunkGetPointer(chunk), size);
 #endif
 #ifdef RANDOMIZE_ALLOCATED_MEMORY
     /* fill the allocated space with junk */
@@ -996,10 +1003,10 @@ AllocSetFree(MemoryContext context, void *pointer)
 
 #ifdef MEMORY_CONTEXT_CHECKING
     /* Test for someone scribbling on unused space in chunk */
-    if (chunk->requested_size < chunk->size)
-        if (!sentinel_ok(pointer, chunk->requested_size))
-            elog(WARNING, "detected write past chunk end in %s %p",
-                 set->header.name, chunk);
+    Assert (chunk->requested_size < chunk->size);
+    if (!sentinel_ok(pointer, chunk->requested_size))
+        elog(WARNING, "detected write past chunk end in %s %p",
+             set->header.name, chunk);
 #endif
 
     if (chunk->size > set->allocChunkLimit)
@@ -1078,10 +1085,10 @@ AllocSetRealloc(MemoryContext context, void *pointer, Size size)
 
 #ifdef MEMORY_CONTEXT_CHECKING
     /* Test for someone scribbling on unused space in chunk */
-    if (chunk->requested_size < oldsize)
-        if (!sentinel_ok(pointer, chunk->requested_size))
-            elog(WARNING, "detected write past chunk end in %s %p",
-                 set->header.name, chunk);
+    Assert (chunk->requested_size < oldsize);
+    if (!sentinel_ok(pointer, chunk->requested_size))
+        elog(WARNING, "detected write past chunk end in %s %p",
+             set->header.name, chunk);
 #endif
 
     /*
@@ -1089,7 +1096,7 @@ AllocSetRealloc(MemoryContext context, void *pointer, Size size)
      * allocated area already is >= the new size.  (In particular, we always
      * fall out here if the requested size is a decrease.)
      */
-    if (oldsize >= size)
+    if (oldsize >= REQUIRED_SIZE(size))
     {
 #ifdef MEMORY_CONTEXT_CHECKING
         Size        oldrequest = chunk->requested_size;
@@ -1157,7 +1164,7 @@ AllocSetRealloc(MemoryContext context, void *pointer, Size size)
             elog(ERROR, "could not find block containing chunk %p", chunk);
 
         /* Do the realloc */
-        chksize = MAXALIGN(size);
+        chksize = MAXALIGN(REQUIRED_SIZE(size));
         blksize = chksize + ALLOC_BLOCKHDRSZ + ALLOC_CHUNKHDRSZ;
         block = (AllocBlock) realloc(block, blksize);
         if (block == NULL)
@@ -1197,8 +1204,8 @@ AllocSetRealloc(MemoryContext context, void *pointer, Size size)
         chunk->requested_size = size;
 
         /* set mark to catch clobber of "unused" space */
-        if (size < chunk->size)
-            set_sentinel(pointer, size);
+        Assert (size < chunk->size);
+        set_sentinel(pointer, size);
 #else                            /* !MEMORY_CONTEXT_CHECKING */
 
         /*
diff --git a/src/backend/utils/mmgr/generation.c b/src/backend/utils/mmgr/generation.c
index 02a23694cb..344f634ecb 100644
--- a/src/backend/utils/mmgr/generation.c
+++ b/src/backend/utils/mmgr/generation.c
@@ -178,6 +178,13 @@ static const MemoryContextMethods GenerationMethods = {
 #endif
 };
 
+/* to keep room for sentinel in allocated chunks */
+#ifdef MEMORY_CONTEXT_CHECKING
+#define REQUIRED_SIZE(x) ((x) + 1)
+#else
+#define REQUIRED_SIZE(x) (x)
+#endif
+
 /* ----------
  * Debug macros
  * ----------
@@ -341,7 +348,7 @@ GenerationAlloc(MemoryContext context, Size size)
     GenerationContext *set = (GenerationContext *) context;
     GenerationBlock *block;
     GenerationChunk *chunk;
-    Size        chunk_size = MAXALIGN(size);
+    Size        chunk_size = MAXALIGN(REQUIRED_SIZE(size));
 
     /* is it an over-sized chunk? if yes, allocate special block */
     if (chunk_size > set->blockSize / 8)
@@ -368,8 +375,8 @@ GenerationAlloc(MemoryContext context, Size size)
 #ifdef MEMORY_CONTEXT_CHECKING
         chunk->requested_size = size;
         /* set mark to catch clobber of "unused" space */
-        if (size < chunk_size)
-            set_sentinel(GenerationChunkGetPointer(chunk), size);
+        Assert (size < chunk_size);
+        set_sentinel(GenerationChunkGetPointer(chunk), size);
 #endif
 #ifdef RANDOMIZE_ALLOCATED_MEMORY
         /* fill the allocated space with junk */
@@ -446,8 +453,8 @@ GenerationAlloc(MemoryContext context, Size size)
 #ifdef MEMORY_CONTEXT_CHECKING
     chunk->requested_size = size;
     /* set mark to catch clobber of "unused" space */
-    if (size < chunk->size)
-        set_sentinel(GenerationChunkGetPointer(chunk), size);
+    Assert (size < chunk->size);
+    set_sentinel(GenerationChunkGetPointer(chunk), size);
 #endif
 #ifdef RANDOMIZE_ALLOCATED_MEMORY
     /* fill the allocated space with junk */
@@ -485,10 +492,10 @@ GenerationFree(MemoryContext context, void *pointer)
 
 #ifdef MEMORY_CONTEXT_CHECKING
     /* Test for someone scribbling on unused space in chunk */
-    if (chunk->requested_size < chunk->size)
-        if (!sentinel_ok(pointer, chunk->requested_size))
-            elog(WARNING, "detected write past chunk end in %s %p",
-                 ((MemoryContext) set)->name, chunk);
+    Assert (chunk->requested_size < chunk->size);
+    if (!sentinel_ok(pointer, chunk->requested_size))
+        elog(WARNING, "detected write past chunk end in %s %p",
+             ((MemoryContext) set)->name, chunk);
 #endif
 
 #ifdef CLOBBER_FREED_MEMORY
@@ -546,10 +553,10 @@ GenerationRealloc(MemoryContext context, void *pointer, Size size)
 
 #ifdef MEMORY_CONTEXT_CHECKING
     /* Test for someone scribbling on unused space in chunk */
-    if (chunk->requested_size < oldsize)
-        if (!sentinel_ok(pointer, chunk->requested_size))
-            elog(WARNING, "detected write past chunk end in %s %p",
-                 ((MemoryContext) set)->name, chunk);
+    Assert (chunk->requested_size < oldsize);
+    if (!sentinel_ok(pointer, chunk->requested_size))
+        elog(WARNING, "detected write past chunk end in %s %p",
+             ((MemoryContext) set)->name, chunk);
 #endif
 
     /*
@@ -562,7 +569,7 @@ GenerationRealloc(MemoryContext context, void *pointer, Size size)
      *
      * XXX Perhaps we should annotate this condition with unlikely()?
      */
-    if (oldsize >= size)
+    if (oldsize >= REQUIRED_SIZE(size))
     {
 #ifdef MEMORY_CONTEXT_CHECKING
         Size        oldrequest = chunk->requested_size;
diff --git a/src/backend/utils/mmgr/slab.c b/src/backend/utils/mmgr/slab.c
index 33d69239d9..97dc07e73a 100644
--- a/src/backend/utils/mmgr/slab.c
+++ b/src/backend/utils/mmgr/slab.c
@@ -155,6 +155,13 @@ static const MemoryContextMethods SlabMethods = {
 #endif
 };
 
+/* to keep room for sentinel in allocated chunks */
+#ifdef MEMORY_CONTEXT_CHECKING
+#define REQUIRED_SIZE(x) ((x) + 1)
+#else
+#define REQUIRED_SIZE(x) (x)
+#endif
+
 /* ----------
  * Debug macros
  * ----------
@@ -209,7 +216,7 @@ SlabContextCreate(MemoryContext parent,
         chunkSize = sizeof(int);
 
     /* chunk, including SLAB header (both addresses nicely aligned) */
-    fullChunkSize = sizeof(SlabChunk) + MAXALIGN(chunkSize);
+    fullChunkSize = sizeof(SlabChunk) + MAXALIGN(REQUIRED_SIZE(chunkSize));
 
     /* Make sure the block can store at least one chunk. */
     if (blockSize < fullChunkSize + sizeof(SlabBlock))
@@ -465,14 +472,12 @@ SlabAlloc(MemoryContext context, Size size)
 
 #ifdef MEMORY_CONTEXT_CHECKING
     /* slab mark to catch clobber of "unused" space */
-    if (slab->chunkSize < (slab->fullChunkSize - sizeof(SlabChunk)))
-    {
-        set_sentinel(SlabChunkGetPointer(chunk), size);
-        VALGRIND_MAKE_MEM_NOACCESS(((char *) chunk) +
-                                   sizeof(SlabChunk) + slab->chunkSize,
-                                   slab->fullChunkSize -
-                                   (slab->chunkSize + sizeof(SlabChunk)));
-    }
+    Assert (slab->chunkSize < (slab->fullChunkSize - sizeof(SlabChunk)));
+    set_sentinel(SlabChunkGetPointer(chunk), size);
+    VALGRIND_MAKE_MEM_NOACCESS(((char *) chunk) +
+                               sizeof(SlabChunk) + slab->chunkSize,
+                               slab->fullChunkSize -
+                               (slab->chunkSize + sizeof(SlabChunk)));
 #endif
 #ifdef RANDOMIZE_ALLOCATED_MEMORY
     /* fill the allocated space with junk */
-- 
2.16.3


В списке pgsql-hackers по дате отправления:

Предыдущее
От: Masahiko Sawada
Дата:
Сообщение: Re: New vacuum option to do only freezing
Следующее
От: Michael Paquier
Дата:
Сообщение: Re: Fwd: Add tablespace tap test to pg_rewind