Use some features of sh(1) which I didn't know about until today ("read"
can read two variables at once; and suffix pattern deletion) to make the extract command fork fewer processes. With the portsnap snapshot and the ports tree in swap-backed memory disks on my 1.4GHz laptop, this reduces 178800 processes and 195/56/126 seconds of real/user/sys time to 44600 processes and 103/34/60 seconds.
This commit is contained in:
parent
f83695f5e9
commit
2a569ec900
@ -873,9 +873,7 @@ extract_run() {
|
||||
grep -vE "${REFUSE}" ${WORKDIR}/INDEX
|
||||
else
|
||||
cat ${WORKDIR}/INDEX
|
||||
fi | while read LINE; do
|
||||
FILE=`echo ${LINE} | cut -f 1 -d '|'`
|
||||
HASH=`echo ${LINE} | cut -f 2 -d '|'`
|
||||
fi | tr '|' ' ' | while read FILE HASH; do
|
||||
echo ${PORTSDIR}/${FILE}
|
||||
if ! [ -r "${WORKDIR}/files/${HASH}.gz" ]; then
|
||||
echo "files/${HASH}.gz not found -- snapshot corrupt."
|
||||
@ -883,8 +881,7 @@ extract_run() {
|
||||
fi
|
||||
case ${FILE} in
|
||||
*/)
|
||||
DIR=`echo ${FILE} | sed -e 's|/$||'`
|
||||
rm -rf ${PORTSDIR}/${DIR}
|
||||
rm -rf ${PORTSDIR}/${FILE%/}
|
||||
mkdir -p ${PORTSDIR}/${FILE}
|
||||
tar -xzf ${WORKDIR}/files/${HASH}.gz \
|
||||
-C ${PORTSDIR}/${FILE}
|
||||
|
Loading…
Reference in New Issue
Block a user