RB_CLEAR_NODE(&map->rb_node);
map->groups = NULL;
map->erange_warned = false;
- atomic_set(&map->refcnt, 1);
+ refcount_set(&map->refcnt, 1);
}
struct map *map__new(struct machine *machine, u64 start, u64 len,
void map__put(struct map *map)
{
- if (map && atomic_dec_and_test(&map->refcnt))
+ if (map && refcount_dec_and_test(&map->refcnt))
map__delete(map);
}
struct map *map = memdup(from, sizeof(*map));
if (map != NULL) {
- atomic_set(&map->refcnt, 1);
+ refcount_set(&map->refcnt, 1);
RB_CLEAR_NODE(&map->rb_node);
dso__get(map->dso);
map->groups = NULL;
#ifndef __PERF_MAP_H
#define __PERF_MAP_H
-#include <linux/atomic.h>
+#include <linux/refcount.h>
#include <linux/compiler.h>
#include <linux/list.h>
#include <linux/rbtree.h>
struct dso *dso;
struct map_groups *groups;
- atomic_t refcnt;
+ refcount_t refcnt;
};
struct kmap {
static inline struct map *map__get(struct map *map)
{
if (map)
- atomic_inc(&map->refcnt);
+ refcount_inc(&map->refcnt);
return map;
}