Optimise allocation of USB DMA structures. By default don't double map

allocations if only one element should be allocated per page
cache. Make one allocation per element compile time configurable. Fix
a comment while at it.

Suggested by:	ian @
MFC after:	1 week
This commit is contained in:
Hans Petter Selasky 2015-02-02 11:32:15 +00:00
parent add9e3e5d3
commit 8b1ce3297a
Notes: svn2git 2020-12-20 02:59:44 +00:00
svn path=/head/; revision=278074
2 changed files with 28 additions and 4 deletions

View File

@ -651,6 +651,7 @@ USB_HOST_ALIGN opt_usb.h
USB_REQ_DEBUG opt_usb.h
USB_TEMPLATE opt_usb.h
USB_VERBOSE opt_usb.h
USB_DMA_SINGLE_ALLOC opt_usb.h
USB_EHCI_BIG_ENDIAN_DESC opt_usb.h
U3G_DEBUG opt_u3g.h
UKBD_DFLT_KEYMAP opt_ukbd.h

View File

@ -237,7 +237,11 @@ usbd_transfer_setup_sub_malloc(struct usb_setup_params *parm,
n_obj = 1;
} else {
/* compute number of objects per page */
#ifdef USB_DMA_SINGLE_ALLOC
n_obj = 1;
#else
n_obj = (USB_PAGE_SIZE / size);
#endif
/*
* Compute number of DMA chunks, rounded up
* to nearest one:
@ -273,15 +277,33 @@ usbd_transfer_setup_sub_malloc(struct usb_setup_params *parm,
&parm->curr_xfer->xroot->dma_parent_tag;
}
if (ppc) {
*ppc = parm->xfer_page_cache_ptr;
if (ppc != NULL) {
if (n_obj != 1)
*ppc = parm->xfer_page_cache_ptr;
else
*ppc = parm->dma_page_cache_ptr;
}
r = count; /* set remainder count */
z = n_obj * size; /* set allocation size */
pc = parm->xfer_page_cache_ptr;
pg = parm->dma_page_ptr;
for (x = 0; x != n_dma_pc; x++) {
if (n_obj == 1) {
/*
* Avoid mapping memory twice if only a single object
* should be allocated per page cache:
*/
for (x = 0; x != n_dma_pc; x++) {
if (usb_pc_alloc_mem(parm->dma_page_cache_ptr,
pg, z, align)) {
return (1); /* failure */
}
/* Make room for one DMA page cache and "n_dma_pg" pages */
parm->dma_page_cache_ptr++;
pg += n_dma_pg;
}
} else {
for (x = 0; x != n_dma_pc; x++) {
if (r < n_obj) {
/* compute last remainder */
@ -294,7 +316,7 @@ usbd_transfer_setup_sub_malloc(struct usb_setup_params *parm,
}
/* Set beginning of current buffer */
buf = parm->dma_page_cache_ptr->buffer;
/* Make room for one DMA page cache and one page */
/* Make room for one DMA page cache and "n_dma_pg" pages */
parm->dma_page_cache_ptr++;
pg += n_dma_pg;
@ -314,6 +336,7 @@ usbd_transfer_setup_sub_malloc(struct usb_setup_params *parm,
}
mtx_unlock(pc->tag_parent->mtx);
}
}
}
parm->xfer_page_cache_ptr = pc;