mem: provide thread-unsafe memseg list walk variant

Sometimes, user code needs to walk memseg list while being inside
a memory-related callback. Rather than making everyone copy around
the same iteration code and depending on DPDK internals, provide an
official way to do memseg_list_walk() inside callbacks.

Also, remove existing reimplementation from memalloc code and use
the new API instead.

Signed-off-by: Anatoly Burakov <anatoly.burakov@intel.com>
This commit is contained in:
Anatoly Burakov 2018-06-12 10:46:16 +01:00 committed by Thomas Monjalon
parent 7c790af08f
commit e26415428f
4 changed files with 43 additions and 42 deletions

View File

@ -513,14 +513,11 @@ rte_memseg_walk(rte_memseg_walk_t func, void *arg)
}
int __rte_experimental
rte_memseg_list_walk(rte_memseg_list_walk_t func, void *arg)
rte_memseg_list_walk_thread_unsafe(rte_memseg_list_walk_t func, void *arg)
{
struct rte_mem_config *mcfg = rte_eal_get_configuration()->mem_config;
int i, ret = 0;
/* do not allow allocations/frees/init while we iterate */
rte_rwlock_read_lock(&mcfg->memory_hotplug_lock);
for (i = 0; i < RTE_MAX_MEMSEG_LISTS; i++) {
struct rte_memseg_list *msl = &mcfg->memsegs[i];
@ -528,17 +525,23 @@ rte_memseg_list_walk(rte_memseg_list_walk_t func, void *arg)
continue;
ret = func(msl, arg);
if (ret < 0) {
ret = -1;
goto out;
}
if (ret > 0) {
ret = 1;
goto out;
}
if (ret)
return ret;
}
out:
return 0;
}
int __rte_experimental
rte_memseg_list_walk(rte_memseg_list_walk_t func, void *arg)
{
struct rte_mem_config *mcfg = rte_eal_get_configuration()->mem_config;
int ret = 0;
/* do not allow allocations/frees/init while we iterate */
rte_rwlock_read_lock(&mcfg->memory_hotplug_lock);
ret = rte_memseg_list_walk_thread_unsafe(func, arg);
rte_rwlock_read_unlock(&mcfg->memory_hotplug_lock);
return ret;
}

View File

@ -299,6 +299,24 @@ rte_memseg_walk_thread_unsafe(rte_memseg_walk_t func, void *arg);
int __rte_experimental
rte_memseg_contig_walk_thread_unsafe(rte_memseg_contig_walk_t func, void *arg);
/**
* Walk each allocated memseg list without performing any locking.
*
* @note This function does not perform any locking, and is only safe to call
* from within memory-related callback functions.
*
* @param func
* Iterator function
* @param arg
* Argument passed to iterator
* @return
* 0 if walked over the entire list
* 1 if stopped by the user
* -1 if user function reported error
*/
int __rte_experimental
rte_memseg_list_walk_thread_unsafe(rte_memseg_list_walk_t func, void *arg);
/**
* Dump the physical memory layout to a file.
*

View File

@ -172,32 +172,6 @@ get_file_size(int fd)
return st.st_size;
}
/* we cannot use rte_memseg_list_walk() here because we will be holding a
* write lock whenever we enter every function in this file, however copying
* the same iteration code everywhere is not ideal as well. so, use a lockless
* copy of memseg list walk here.
*/
static int
memseg_list_walk_thread_unsafe(rte_memseg_list_walk_t func, void *arg)
{
struct rte_mem_config *mcfg = rte_eal_get_configuration()->mem_config;
int i, ret = 0;
for (i = 0; i < RTE_MAX_MEMSEG_LISTS; i++) {
struct rte_memseg_list *msl = &mcfg->memsegs[i];
if (msl->base_va == NULL)
continue;
ret = func(msl, arg);
if (ret < 0)
return -1;
if (ret > 0)
return 1;
}
return 0;
}
/* returns 1 on successful lock, 0 on unsuccessful lock, -1 on error */
static int lock(int fd, int type)
{
@ -900,7 +874,8 @@ eal_memalloc_alloc_seg_bulk(struct rte_memseg **ms, int n_segs, size_t page_sz,
wa.socket = socket;
wa.segs_allocated = 0;
ret = memseg_list_walk_thread_unsafe(alloc_seg_walk, &wa);
/* memalloc is locked, so it's safe to use thread-unsafe version */
ret = rte_memseg_list_walk_thread_unsafe(alloc_seg_walk, &wa);
if (ret == 0) {
RTE_LOG(ERR, EAL, "%s(): couldn't find suitable memseg_list\n",
__func__);
@ -965,7 +940,10 @@ eal_memalloc_free_seg_bulk(struct rte_memseg **ms, int n_segs)
wa.ms = cur;
wa.hi = hi;
walk_res = memseg_list_walk_thread_unsafe(free_seg_walk, &wa);
/* memalloc is locked, so it's safe to use thread-unsafe version
*/
walk_res = rte_memseg_list_walk_thread_unsafe(free_seg_walk,
&wa);
if (walk_res == 1)
continue;
if (walk_res == 0)
@ -1252,7 +1230,8 @@ eal_memalloc_sync_with_primary(void)
if (rte_eal_process_type() == RTE_PROC_PRIMARY)
return 0;
if (memseg_list_walk_thread_unsafe(sync_walk, NULL))
/* memalloc is locked, so it's safe to call thread-unsafe version */
if (rte_memseg_list_walk_thread_unsafe(sync_walk, NULL))
return -1;
return 0;
}

View File

@ -294,6 +294,7 @@ EXPERIMENTAL {
rte_memseg_contig_walk;
rte_memseg_contig_walk_thread_unsafe;
rte_memseg_list_walk;
rte_memseg_list_walk_thread_unsafe;
rte_memseg_walk;
rte_memseg_walk_thread_unsafe;
rte_mp_action_register;