* needed for RCU lookups (because root->height is unreliable). The only
* time callers need worry about this is when doing a lookup_slot under
* RCU.
+ *
+ * Indirect pointer in fact is also used to tag the last pointer of a node
+ * when it is shrunk, before we rcu free the node. See shrink code for
+ * details.
*/
#define RADIX_TREE_INDIRECT_PTR 1
-#define RADIX_TREE_RETRY ((void *)-1UL)
-
-static inline void *radix_tree_ptr_to_indirect(void *ptr)
-{
- return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR);
-}
-static inline void *radix_tree_indirect_to_ptr(void *ptr)
-{
- return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR);
-}
#define radix_tree_indirect_to_ptr(ptr) \
radix_tree_indirect_to_ptr((void __force *)(ptr))
* removed.
*
* For use with radix_tree_lookup_slot(). Caller must hold tree at least read
- * locked across slot lookup and dereference. More likely, will be used with
- * radix_tree_replace_slot(), as well, so caller will hold tree write locked.
+ * locked across slot lookup and dereference. Not required if write lock is
+ * held (ie. items cannot be concurrently inserted).
+ *
+ * radix_tree_deref_retry must be used to confirm validity of the pointer if
+ * only the read lock is held.
*/
static inline void *radix_tree_deref_slot(void **pslot)
{
- void *ret = rcu_dereference(*pslot);
- if (unlikely(radix_tree_is_indirect_ptr(ret)))
- ret = RADIX_TREE_RETRY;
- return ret;
+ return rcu_dereference(*pslot);
}
+
+/**
+ * radix_tree_deref_retry - check radix_tree_deref_slot
+ * @arg: pointer returned by radix_tree_deref_slot
+ * Returns: 0 if retry is not required, otherwise retry is required
+ *
+ * radix_tree_deref_retry must be used with radix_tree_deref_slot.
+ */
+static inline int radix_tree_deref_retry(void *arg)
+{
+ return unlikely((unsigned long)arg & RADIX_TREE_INDIRECT_PTR);
+}
+
/**
* radix_tree_replace_slot - replace item in a slot
* @pslot: pointer to slot, returned by radix_tree_lookup_slot
};
static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
+static inline void *ptr_to_indirect(void *ptr)
+{
+ return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR);
+}
+
+static inline void *indirect_to_ptr(void *ptr)
+{
+ return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR);
+}
+
static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
{
return root->gfp_mask & __GFP_BITS_MASK;
return -ENOMEM;
/* Increase the height. */
- node->slots[0] = radix_tree_indirect_to_ptr(root->rnode);
+ node->slots[0] = indirect_to_ptr(root->rnode);
/* Propagate the aggregated tag info into the new root */
for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
newheight = root->height+1;
node->height = newheight;
node->count = 1;
- node = radix_tree_ptr_to_indirect(node);
+ node = ptr_to_indirect(node);
rcu_assign_pointer(root->rnode, node);
root->height = newheight;
} while (height > root->height);
return error;
}
- slot = radix_tree_indirect_to_ptr(root->rnode);
+ slot = indirect_to_ptr(root->rnode);
height = root->height;
shift = (height-1) * RADIX_TREE_MAP_SHIFT;
rcu_assign_pointer(node->slots[offset], slot);
node->count++;
} else
- rcu_assign_pointer(root->rnode,
- radix_tree_ptr_to_indirect(slot));
+ rcu_assign_pointer(root->rnode, ptr_to_indirect(slot));
}
/* Go a level down */
return NULL;
return is_slot ? (void *)&root->rnode : node;
}
- node = radix_tree_indirect_to_ptr(node);
+ node = indirect_to_ptr(node);
height = node->height;
if (index > radix_tree_maxindex(height))
height--;
} while (height > 0);
- return is_slot ? (void *)slot:node;
+ return is_slot ? (void *)slot : indirect_to_ptr(node);
}
/**
height = root->height;
BUG_ON(index > radix_tree_maxindex(height));
- slot = radix_tree_indirect_to_ptr(root->rnode);
+ slot = indirect_to_ptr(root->rnode);
shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
while (height > 0) {
shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
pathp->node = NULL;
- slot = radix_tree_indirect_to_ptr(root->rnode);
+ slot = indirect_to_ptr(root->rnode);
while (height > 0) {
int offset;
if (!radix_tree_is_indirect_ptr(node))
return (index == 0);
- node = radix_tree_indirect_to_ptr(node);
+ node = indirect_to_ptr(node);
height = node->height;
if (index > radix_tree_maxindex(height))
}
shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
- slot = radix_tree_indirect_to_ptr(root->rnode);
+ slot = indirect_to_ptr(root->rnode);
/*
* we fill the path from (root->height - 2) to 0, leaving the index at
results[0] = node;
return 1;
}
- node = radix_tree_indirect_to_ptr(node);
+ node = indirect_to_ptr(node);
max_index = radix_tree_maxindex(node->height);
slot = *(((void ***)results)[ret + i]);
if (!slot)
continue;
- results[ret + nr_found] = rcu_dereference_raw(slot);
+ results[ret + nr_found] =
+ indirect_to_ptr(rcu_dereference_raw(slot));
nr_found++;
}
ret += nr_found;
results[0] = (void **)&root->rnode;
return 1;
}
- node = radix_tree_indirect_to_ptr(node);
+ node = indirect_to_ptr(node);
max_index = radix_tree_maxindex(node->height);
results[0] = node;
return 1;
}
- node = radix_tree_indirect_to_ptr(node);
+ node = indirect_to_ptr(node);
max_index = radix_tree_maxindex(node->height);
slot = *(((void ***)results)[ret + i]);
if (!slot)
continue;
- results[ret + nr_found] = rcu_dereference_raw(slot);
+ results[ret + nr_found] =
+ indirect_to_ptr(rcu_dereference_raw(slot));
nr_found++;
}
ret += nr_found;
results[0] = (void **)&root->rnode;
return 1;
}
- node = radix_tree_indirect_to_ptr(node);
+ node = indirect_to_ptr(node);
max_index = radix_tree_maxindex(node->height);
void *newptr;
BUG_ON(!radix_tree_is_indirect_ptr(to_free));
- to_free = radix_tree_indirect_to_ptr(to_free);
+ to_free = indirect_to_ptr(to_free);
/*
* The candidate node has more than one child, or its child
/*
* We don't need rcu_assign_pointer(), since we are simply
- * moving the node from one part of the tree to another. If
- * it was safe to dereference the old pointer to it
+ * moving the node from one part of the tree to another: if it
+ * was safe to dereference the old pointer to it
* (to_free->slots[0]), it will be safe to dereference the new
- * one (root->rnode).
+ * one (root->rnode) as far as dependent read barriers go.
*/
newptr = to_free->slots[0];
if (root->height > 1)
- newptr = radix_tree_ptr_to_indirect(newptr);
+ newptr = ptr_to_indirect(newptr);
root->rnode = newptr;
root->height--;
+
+ /*
+ * We have a dilemma here. The node's slot[0] must not be
+ * NULLed in case there are concurrent lookups expecting to
+ * find the item. However if this was a bottom-level node,
+ * then it may be subject to the slot pointer being visible
+ * to callers dereferencing it. If item corresponding to
+ * slot[0] is subsequently deleted, these callers would expect
+ * their slot to become empty sooner or later.
+ *
+ * For example, lockless pagecache will look up a slot, deref
+ * the page pointer, and if the page is 0 refcount it means it
+ * was concurrently deleted from pagecache so try the deref
+ * again. Fortunately there is already a requirement for logic
+ * to retry the entire slot lookup -- the indirect pointer
+ * problem (replacing direct root node with an indirect pointer
+ * also results in a stale slot). So tag the slot as indirect
+ * to force callers to retry.
+ */
+ if (root->height == 0)
+ *((unsigned long *)&to_free->slots[0]) |=
+ RADIX_TREE_INDIRECT_PTR;
+
radix_tree_node_free(to_free);
}
}
root->rnode = NULL;
goto out;
}
- slot = radix_tree_indirect_to_ptr(slot);
+ slot = indirect_to_ptr(slot);
shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
pathp->node = NULL;
radix_tree_node_free(to_free);
if (pathp->node->count) {
- if (pathp->node ==
- radix_tree_indirect_to_ptr(root->rnode))
+ if (pathp->node == indirect_to_ptr(root->rnode))
radix_tree_shrink(root);
goto out;
}
pagep = radix_tree_lookup_slot(&mapping->page_tree, offset);
if (pagep) {
page = radix_tree_deref_slot(pagep);
- if (unlikely(!page || page == RADIX_TREE_RETRY))
+ if (unlikely(!page))
+ goto out;
+ if (radix_tree_deref_retry(page))
goto repeat;
if (!page_cache_get_speculative(page))
goto repeat;
}
}
+out:
rcu_read_unlock();
return page;
page = radix_tree_deref_slot((void **)pages[i]);
if (unlikely(!page))
continue;
- /*
- * this can only trigger if nr_found == 1, making livelock
- * a non issue.
- */
- if (unlikely(page == RADIX_TREE_RETRY))
+ if (radix_tree_deref_retry(page)) {
+ if (ret)
+ start = pages[ret-1]->index;
goto restart;
+ }
if (!page_cache_get_speculative(page))
goto repeat;
page = radix_tree_deref_slot((void **)pages[i]);
if (unlikely(!page))
continue;
- /*
- * this can only trigger if nr_found == 1, making livelock
- * a non issue.
- */
- if (unlikely(page == RADIX_TREE_RETRY))
+ if (radix_tree_deref_retry(page))
goto restart;
if (page->mapping == NULL || page->index != index)
page = radix_tree_deref_slot((void **)pages[i]);
if (unlikely(!page))
continue;
- /*
- * this can only trigger if nr_found == 1, making livelock
- * a non issue.
- */
- if (unlikely(page == RADIX_TREE_RETRY))
+ if (radix_tree_deref_retry(page))
goto restart;
if (!page_cache_get_speculative(page))