radix-tree: replace preallocated node array with linked list
authorKirill A. Shutemov <kirill.shutemov@linux.intel.com>
Thu, 25 Jun 2015 22:02:19 +0000 (15:02 -0700)
committerLinus Torvalds <torvalds@linux-foundation.org>
Fri, 26 Jun 2015 00:00:40 +0000 (17:00 -0700)
Currently we use per-cpu array to hold pointers to preallocated nodes.
Let's replace it with linked list.  On x86_64 it saves 256 bytes in
per-cpu ELF section which may translate into freeing up 2MB of memory for
NR_CPUS==8192.

[akpm@linux-foundation.org: fix comment, coding style]
Signed-off-by: Kirill A. Shutemov <kirill.shutemov@linux.intel.com>
Acked-by: Johannes Weiner <hannes@cmpxchg.org>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
lib/radix-tree.c

index 061550de77bc040878a1a62ef72816d85043b3a6..f9ebe1c82060ec330ac7ae7a9d1678f18058cf65 100644 (file)
@@ -65,7 +65,8 @@ static struct kmem_cache *radix_tree_node_cachep;
  */
 struct radix_tree_preload {
        int nr;
-       struct radix_tree_node *nodes[RADIX_TREE_PRELOAD_SIZE];
+       /* nodes->private_data points to next preallocated node */
+       struct radix_tree_node *nodes;
 };
 static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
 
@@ -197,8 +198,9 @@ radix_tree_node_alloc(struct radix_tree_root *root)
                 */
                rtp = this_cpu_ptr(&radix_tree_preloads);
                if (rtp->nr) {
-                       ret = rtp->nodes[rtp->nr - 1];
-                       rtp->nodes[rtp->nr - 1] = NULL;
+                       ret = rtp->nodes;
+                       rtp->nodes = ret->private_data;
+                       ret->private_data = NULL;
                        rtp->nr--;
                }
                /*
@@ -257,17 +259,20 @@ static int __radix_tree_preload(gfp_t gfp_mask)
 
        preempt_disable();
        rtp = this_cpu_ptr(&radix_tree_preloads);
-       while (rtp->nr < ARRAY_SIZE(rtp->nodes)) {
+       while (rtp->nr < RADIX_TREE_PRELOAD_SIZE) {
                preempt_enable();
                node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
                if (node == NULL)
                        goto out;
                preempt_disable();
                rtp = this_cpu_ptr(&radix_tree_preloads);
-               if (rtp->nr < ARRAY_SIZE(rtp->nodes))
-                       rtp->nodes[rtp->nr++] = node;
-               else
+               if (rtp->nr < RADIX_TREE_PRELOAD_SIZE) {
+                       node->private_data = rtp->nodes;
+                       rtp->nodes = node;
+                       rtp->nr++;
+               } else {
                        kmem_cache_free(radix_tree_node_cachep, node);
+               }
        }
        ret = 0;
 out:
@@ -1463,15 +1468,16 @@ static int radix_tree_callback(struct notifier_block *nfb,
 {
        int cpu = (long)hcpu;
        struct radix_tree_preload *rtp;
+       struct radix_tree_node *node;
 
        /* Free per-cpu pool of perloaded nodes */
        if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
                rtp = &per_cpu(radix_tree_preloads, cpu);
                while (rtp->nr) {
-                       kmem_cache_free(radix_tree_node_cachep,
-                                       rtp->nodes[rtp->nr-1]);
-                       rtp->nodes[rtp->nr-1] = NULL;
-                       rtp->nr--;
+                       node = rtp->nodes;
+                       rtp->nodes = node->private_data;
+                       kmem_cache_free(radix_tree_node_cachep, node);
+                       rtp->nr--;
                }
        }
        return NOTIFY_OK;