Loading mm/slab.c +4 −4 Original line number Diff line number Diff line Loading @@ -1544,9 +1544,6 @@ void __init kmem_cache_init(void) } g_cpucache_up = EARLY; /* Annotate slab for lockdep -- annotate the malloc caches */ init_lock_keys(); } void __init kmem_cache_init_late(void) Loading @@ -1563,6 +1560,9 @@ void __init kmem_cache_init_late(void) /* Done! */ g_cpucache_up = FULL; /* Annotate slab for lockdep -- annotate the malloc caches */ init_lock_keys(); /* * Register a cpu startup notifier callback that initializes * cpu_cache_get for all new cpus Loading Loading @@ -2547,7 +2547,7 @@ void kmem_cache_destroy(struct kmem_cache *cachep) } if (unlikely(cachep->flags & SLAB_DESTROY_BY_RCU)) synchronize_rcu(); rcu_barrier(); __kmem_cache_destroy(cachep); mutex_unlock(&cache_chain_mutex); Loading mm/slob.c +2 −0 Original line number Diff line number Diff line Loading @@ -595,6 +595,8 @@ EXPORT_SYMBOL(kmem_cache_create); void kmem_cache_destroy(struct kmem_cache *c) { kmemleak_free(c); if (c->flags & SLAB_DESTROY_BY_RCU) rcu_barrier(); slob_free(c, sizeof(struct kmem_cache)); } EXPORT_SYMBOL(kmem_cache_destroy); Loading mm/slub.c +2 −0 Original line number Diff line number Diff line Loading @@ -2595,6 +2595,8 @@ static inline int kmem_cache_close(struct kmem_cache *s) */ void kmem_cache_destroy(struct kmem_cache *s) { if (s->flags & SLAB_DESTROY_BY_RCU) rcu_barrier(); down_write(&slub_lock); s->refcount--; if (!s->refcount) { Loading Loading
mm/slab.c +4 −4 Original line number Diff line number Diff line Loading @@ -1544,9 +1544,6 @@ void __init kmem_cache_init(void) } g_cpucache_up = EARLY; /* Annotate slab for lockdep -- annotate the malloc caches */ init_lock_keys(); } void __init kmem_cache_init_late(void) Loading @@ -1563,6 +1560,9 @@ void __init kmem_cache_init_late(void) /* Done! */ g_cpucache_up = FULL; /* Annotate slab for lockdep -- annotate the malloc caches */ init_lock_keys(); /* * Register a cpu startup notifier callback that initializes * cpu_cache_get for all new cpus Loading Loading @@ -2547,7 +2547,7 @@ void kmem_cache_destroy(struct kmem_cache *cachep) } if (unlikely(cachep->flags & SLAB_DESTROY_BY_RCU)) synchronize_rcu(); rcu_barrier(); __kmem_cache_destroy(cachep); mutex_unlock(&cache_chain_mutex); Loading
mm/slob.c +2 −0 Original line number Diff line number Diff line Loading @@ -595,6 +595,8 @@ EXPORT_SYMBOL(kmem_cache_create); void kmem_cache_destroy(struct kmem_cache *c) { kmemleak_free(c); if (c->flags & SLAB_DESTROY_BY_RCU) rcu_barrier(); slob_free(c, sizeof(struct kmem_cache)); } EXPORT_SYMBOL(kmem_cache_destroy); Loading
mm/slub.c +2 −0 Original line number Diff line number Diff line Loading @@ -2595,6 +2595,8 @@ static inline int kmem_cache_close(struct kmem_cache *s) */ void kmem_cache_destroy(struct kmem_cache *s) { if (s->flags & SLAB_DESTROY_BY_RCU) rcu_barrier(); down_write(&slub_lock); s->refcount--; if (!s->refcount) { Loading