From: Ashley Whetter <[email protected]>

Signed-off-by: Ashley Whetter <[email protected]>
---
 scripts/.gitignore                 |   1 +
 scripts/Makefile.am                |   4 +
 scripts/libmakepkg/downloads.sh.in | 287 +++++++++++++++++++++++++++++++++++++
 scripts/libmakepkg/utils.sh.in     | 261 ---------------------------------
 scripts/makepkg.sh.in              |   1 +
 5 files changed, 293 insertions(+), 261 deletions(-)
 create mode 100644 scripts/libmakepkg/downloads.sh.in

diff --git a/scripts/.gitignore b/scripts/.gitignore
index ae17035..b15c7ac 100644
--- a/scripts/.gitignore
+++ b/scripts/.gitignore
@@ -8,4 +8,5 @@ pkgdelta
 repo-add
 repo-elephant
 repo-remove
+libmakepkg/downloads.sh
 libmakepkg/utils.sh
diff --git a/scripts/Makefile.am b/scripts/Makefile.am
index 6080508..979de54 100644
--- a/scripts/Makefile.am
+++ b/scripts/Makefile.am
@@ -37,6 +37,7 @@ LIBRARY = \
        library/term_colors.sh
 
 LIBMAKEPKG = \
+       libmakepkg/downloads.sh \
        libmakepkg/utils.sh
 
 # Files that should be removed, but which Automake does not know.
@@ -92,6 +93,7 @@ $(LIBMAKEPKG): Makefile
        @$(BASH_SHELL) -O extglob -n $@
 
 libmakepkg: \
+       $(srcdir)/libmakepkg/downloads.sh \
        $(srcdir)/libmakepkg/utils.sh \
        $(srcdir)/library/parseopts.sh
 
@@ -142,6 +144,7 @@ makepkg-wrapper: \
        Makefile \
        $(srcdir)/makepkg-wrapper.sh.in \
        $(srcdir)/makepkg.sh.in \
+       $(srcdir)/libmakepkg/downloads.sh \
        $(srcdir)/libmakepkg/utils.sh \
        $(srcdir)/library/parseopts.sh \
        | makepkg
@@ -160,6 +163,7 @@ install-data-hook:
                $(RM) makepkg makepkg-wrapper
        $(INSTALL) .lib/makepkg $(DESTDIR)$(bindir)/makepkg
        $(AM_V_at)$(MKDIR_P) $(DESTDIR)$(libmakepkgdir)
+       $(INSTALL) libmakepkg/downloads.sh 
$(DESTDIR)$(libmakepkgdir)/downloads.sh
        $(INSTALL) libmakepkg/utils.sh $(DESTDIR)$(libmakepkgdir)/utils.sh
        cd $(DESTDIR)$(bindir) && \
                $(RM) repo-elephant && \
diff --git a/scripts/libmakepkg/downloads.sh.in 
b/scripts/libmakepkg/downloads.sh.in
new file mode 100644
index 0000000..e05d7da
--- /dev/null
+++ b/scripts/libmakepkg/downloads.sh.in
@@ -0,0 +1,287 @@
+#
+#   downloads.sh
+#
+#   Copyright (c) 2013 Pacman Development Team <[email protected]>
+#
+#   This program is free software; you can redistribute it and/or modify
+#   it under the terms of the GNU General Public License as published by
+#   the Free Software Foundation; either version 2 of the License, or
+#   (at your option) any later version.
+#
+#   This program is distributed in the hope that it will be useful,
+#   but WITHOUT ANY WARRANTY; without even the implied warranty of
+#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#   GNU General Public License for more details.
+#
+#   You should have received a copy of the GNU General Public License
+#   along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+
+[ -n "$LIBMAKEPKG_DOWNLOADS_SH" ] && return || LIBMAKEPKG_DOWNLOADS_SH=1
+
+LIBRARY=${LIBRARY:-'@libmakepkgdir@'}
+
+source $LIBRARY/utils.sh
+
+download_local() {
+       local netfile=$1
+       local filepath=$(get_filepath "$netfile")
+
+       if [[ -n "$filepath" ]]; then
+               msg2 "$(gettext "Found %s")" "${filepath##*/}"
+               rm -f "$srcdir/${filepath##*/}"
+               ln -s "$filepath" "$srcdir/"
+       else
+               local filename=$(get_filename "$netfile")
+               error "$(gettext "%s was not found in the build directory and 
is not a URL.")" "$filename"
+               exit 1 # $E_MISSING_FILE
+       fi
+}
+
+download_file() {
+       local netfile=$1
+
+       local filepath=$(get_filepath "$netfile")
+       if [[ -n "$filepath" ]]; then
+               msg2 "$(gettext "Found %s")" "${filepath##*/}"
+               rm -f "$srcdir/${filepath##*/}"
+               ln -s "$filepath" "$srcdir/"
+               return
+       fi
+
+       local proto=$(get_protocol "$netfile")
+
+       # find the client we should use for this URL
+       local dlcmd
+       dlcmd=$(get_downloadclient "$proto") || exit $?
+
+       local filename=$(get_filename "$netfile")
+       local url=$(get_url "$netfile")
+
+       if [[ $proto = "scp" ]]; then
+               # scp downloads should not pass the protocol in the url
+               url="${url##*://}"
+       fi
+
+       msg2 "$(gettext "Downloading %s...")" "$filename"
+
+       # temporary download file, default to last component of the URL
+       local dlfile="${url##*/}"
+
+       # replace %o by the temporary dlfile if it exists
+       if [[ $dlcmd = *%o* ]]; then
+               dlcmd=${dlcmd//\%o/\"$filename.part\"}
+               dlfile="$filename.part"
+       fi
+       # add the URL, either in place of %u or at the end
+       if [[ $dlcmd = *%u* ]]; then
+               dlcmd=${dlcmd//\%u/\"$url\"}
+       else
+               dlcmd="$dlcmd \"$url\""
+       fi
+
+       local ret=0
+       eval "$dlcmd || ret=\$?"
+       if (( ret )); then
+               [[ ! -s $dlfile ]] && rm -f -- "$dlfile"
+               error "$(gettext "Failure while downloading %s")" "$filename"
+               plain "$(gettext "Aborting...")"
+               exit 1
+       fi
+
+       # rename the temporary download file to the final destination
+       if [[ $dlfile != "$filename" ]]; then
+               mv -f "$SRCDEST/$dlfile" "$SRCDEST/$filename"
+       fi
+
+       rm -f "$srcdir/$filename"
+       ln -s "$SRCDEST/$filename" "$srcdir/"
+}
+
+download_bzr() {
+       local netfile=$1
+
+       local url=$(get_url "$netfile")
+       url=${url##*bzr+}
+       url=${url%%#*}
+
+       local repo=$(get_filename "$netfile")
+       local displaylocation="$url"
+
+       local dir=$(get_filepath "$netfile")
+       [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
+
+       if [[ ! -d "$dir" ]] || dir_is_empty "$dir" ; then
+               msg2 "$(gettext "Branching %s ...")" "${displaylocation}"
+               if ! bzr branch "$url" "$dir" --no-tree --use-existing-dir; then
+                       error "$(gettext "Failure while branching %s")" 
"${displaylocation}"
+                       plain "$(gettext "Aborting...")"
+                       exit 1
+               fi
+       elif (( ! HOLDVER )); then
+               # Make sure we are fetching the right repo
+               local distant_url="$(bzr info $url 2> /dev/null | sed -n 
'/branch root/{s/  branch root: //p;q;}')"
+               local local_url="$(bzr config parent_location -d $dir)"
+               if [[ -n $distant_url ]]; then
+                       if [[ $distant_url != "$local_url" ]]; then
+                               error "$(gettext "%s is not a branch of %s")" 
"$dir" "$url"
+                               plain "$(gettext "Aborting...")"
+                               exit 1
+                       fi
+               else
+                       if [[ $url != "$local_url" ]] ; then
+                               error "$(gettext "%s is not a branch of %s")" 
"$dir" "$url"
+                               error "$(gettext "The local URL is %s")" 
"$local_url"
+                               plain "$(gettext "Aborting...")"
+                               exit 1
+                       fi
+               fi
+               msg2 "$(gettext "Pulling %s ...")" "${displaylocation}"
+               cd_safe "$dir"
+               if ! bzr pull "$url" --overwrite; then
+                       # only warn on failure to allow offline builds
+                       warning "$(gettext "Failure while pulling %s")" 
"${displaylocation}"
+               fi
+       fi
+}
+
+download_git() {
+       local netfile=$1
+
+       local dir=$(get_filepath "$netfile")
+       [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
+
+       local repo=$(get_filename "$netfile")
+
+       local url=$(get_url "$netfile")
+       url=${url##*git+}
+       url=${url%%#*}
+
+       if [[ ! -d "$dir" ]] || dir_is_empty "$dir" ; then
+               msg2 "$(gettext "Cloning %s %s repo...")" "${repo}" "git"
+               if ! git clone --mirror "$url" "$dir"; then
+                       error "$(gettext "Failure while downloading %s %s 
repo")" "${repo}" "git"
+                       plain "$(gettext "Aborting...")"
+                       exit 1
+               fi
+       elif (( ! HOLDVER )); then
+               cd_safe "$dir"
+               # Make sure we are fetching the right repo
+               if [[ "$url" != "$(git config --get remote.origin.url)" ]] ; 
then
+                       error "$(gettext "%s is not a clone of %s")" "$dir" 
"$url"
+                       plain "$(gettext "Aborting...")"
+                       exit 1
+               fi
+               msg2 "$(gettext "Updating %s %s repo...")" "${repo}" "git"
+               if ! git fetch --all -p; then
+                       # only warn on failure to allow offline builds
+                       warning "$(gettext "Failure while updating %s %s 
repo")" "${repo}" "git"
+               fi
+       fi
+}
+
+download_hg() {
+       local netfile=$1
+
+       local dir=$(get_filepath "$netfile")
+       [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
+
+       local repo=$(get_filename "$netfile")
+
+       local url=$(get_url "$netfile")
+       url=${url##*hg+}
+       url=${url%%#*}
+
+       if [[ ! -d "$dir" ]] || dir_is_empty "$dir" ; then
+               msg2 "$(gettext "Cloning %s %s repo...")" "${repo}" "hg"
+               if ! hg clone -U "$url" "$dir"; then
+                       error "$(gettext "Failure while downloading %s %s 
repo")" "${repo}" "hg"
+                       plain "$(gettext "Aborting...")"
+                       exit 1
+               fi
+       elif (( ! HOLDVER )); then
+               msg2 "$(gettext "Updating %s %s repo...")" "${repo}" "hg"
+               cd_safe "$dir"
+               if ! hg pull; then
+                       # only warn on failure to allow offline builds
+                       warning "$(gettext "Failure while updating %s %s 
repo")" "${repo}" "hg"
+               fi
+       fi
+}
+
+download_svn() {
+       local netfile=$1
+
+       local fragment=${netfile#*#}
+       if [[ $fragment = "$netfile" ]]; then
+               unset fragment
+       fi
+
+       local dir=$(get_filepath "$netfile")
+       [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
+
+       local repo=$(get_filename "$netfile")
+
+       local url=$(get_url "$netfile")
+       if [[ $url != svn+ssh* ]]; then
+               url=${url##*svn+}
+       fi
+       url=${url%%#*}
+
+       if [[ ! -d "$dir" ]] || dir_is_empty "$dir" ; then
+               msg2 "$(gettext "Cloning %s %s repo...")" "${repo}" "svn"
+               mkdir -p "$dir/.makepkg"
+               if ! svn checkout --config-dir "$dir/.makepkg" "$url" "$dir"; 
then
+                       error "$(gettext "Failure while downloading %s %s 
repo")" "${repo}" "svn"
+                       plain "$(gettext "Aborting...")"
+                       exit 1
+               fi
+       elif (( ! HOLDVER )); then
+               msg2 "$(gettext "Updating %s %s repo...")" "${repo}" "svn"
+               cd_safe "$dir"
+               if ! svn update; then
+                       # only warn on failure to allow offline builds
+                       warning "$(gettext "Failure while updating %s %s 
repo")" "${repo}" "svn"
+               fi
+       fi
+}
+
+download_sources() {
+       msg "$(gettext "Retrieving sources...")"
+
+       local GET_VCS=1
+       if [[ $1 == "fast" ]]; then
+               GET_VCS=0
+       fi
+
+       local netfile
+       for netfile in "${source[@]}"; do
+               pushd "$SRCDEST" &>/dev/null
+
+               local proto=$(get_protocol "$netfile")
+               case "$proto" in
+                       local)
+                               download_local "$netfile"
+                               ;;
+                       bzr*)
+                               (( GET_VCS )) && download_bzr "$netfile"
+                               ;;
+                       git*)
+                               (( GET_VCS )) && download_git "$netfile"
+                               ;;
+                       hg*)
+                               (( GET_VCS )) && download_hg "$netfile"
+                               ;;
+                       svn*)
+                               (( GET_VCS )) && download_svn "$netfile"
+                               ;;
+                       *)
+                               download_file "$netfile"
+                               ;;
+               esac
+
+               popd &>/dev/null
+       done
+}
+
+# vim: set ts=2 sw=2 noet:
diff --git a/scripts/libmakepkg/utils.sh.in b/scripts/libmakepkg/utils.sh.in
index f7df309..4eba0e6 100644
--- a/scripts/libmakepkg/utils.sh.in
+++ b/scripts/libmakepkg/utils.sh.in
@@ -236,81 +236,6 @@ get_downloadclient() {
        printf "%s\n" "$agent"
 }
 
-download_local() {
-       local netfile=$1
-       local filepath=$(get_filepath "$netfile")
-
-       if [[ -n "$filepath" ]]; then
-               msg2 "$(gettext "Found %s")" "${filepath##*/}"
-               rm -f "$srcdir/${filepath##*/}"
-               ln -s "$filepath" "$srcdir/"
-       else
-               local filename=$(get_filename "$netfile")
-               error "$(gettext "%s was not found in the build directory and 
is not a URL.")" "$filename"
-               exit 1 # $E_MISSING_FILE
-       fi
-}
-
-download_file() {
-       local netfile=$1
-
-       local filepath=$(get_filepath "$netfile")
-       if [[ -n "$filepath" ]]; then
-               msg2 "$(gettext "Found %s")" "${filepath##*/}"
-               rm -f "$srcdir/${filepath##*/}"
-               ln -s "$filepath" "$srcdir/"
-               return
-       fi
-
-       local proto=$(get_protocol "$netfile")
-
-       # find the client we should use for this URL
-       local dlcmd
-       dlcmd=$(get_downloadclient "$proto") || exit $?
-
-       local filename=$(get_filename "$netfile")
-       local url=$(get_url "$netfile")
-
-       if [[ $proto = "scp" ]]; then
-               # scp downloads should not pass the protocol in the url
-               url="${url##*://}"
-       fi
-
-       msg2 "$(gettext "Downloading %s...")" "$filename"
-
-       # temporary download file, default to last component of the URL
-       local dlfile="${url##*/}"
-
-       # replace %o by the temporary dlfile if it exists
-       if [[ $dlcmd = *%o* ]]; then
-               dlcmd=${dlcmd//\%o/\"$filename.part\"}
-               dlfile="$filename.part"
-       fi
-       # add the URL, either in place of %u or at the end
-       if [[ $dlcmd = *%u* ]]; then
-               dlcmd=${dlcmd//\%u/\"$url\"}
-       else
-               dlcmd="$dlcmd \"$url\""
-       fi
-
-       local ret=0
-       eval "$dlcmd || ret=\$?"
-       if (( ret )); then
-               [[ ! -s $dlfile ]] && rm -f -- "$dlfile"
-               error "$(gettext "Failure while downloading %s")" "$filename"
-               plain "$(gettext "Aborting...")"
-               exit 1
-       fi
-
-       # rename the temporary download file to the final destination
-       if [[ $dlfile != "$filename" ]]; then
-               mv -f "$SRCDEST/$dlfile" "$SRCDEST/$filename"
-       fi
-
-       rm -f "$srcdir/$filename"
-       ln -s "$SRCDEST/$filename" "$srcdir/"
-}
-
 extract_file() {
        local file=$1
        # do not rely on extension for file type
@@ -364,53 +289,6 @@ extract_file() {
        fi
 }
 
-download_bzr() {
-       local netfile=$1
-
-       local url=$(get_url "$netfile")
-       url=${url##*bzr+}
-       url=${url%%#*}
-
-       local repo=$(get_filename "$netfile")
-       local displaylocation="$url"
-
-       local dir=$(get_filepath "$netfile")
-       [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
-
-       if [[ ! -d "$dir" ]] || dir_is_empty "$dir" ; then
-               msg2 "$(gettext "Branching %s ...")" "${displaylocation}"
-               if ! bzr branch "$url" "$dir" --no-tree --use-existing-dir; then
-                       error "$(gettext "Failure while branching %s")" 
"${displaylocation}"
-                       plain "$(gettext "Aborting...")"
-                       exit 1
-               fi
-       elif (( ! HOLDVER )); then
-               # Make sure we are fetching the right repo
-               local distant_url="$(bzr info $url 2> /dev/null | sed -n 
'/branch root/{s/  branch root: //p;q;}')"
-               local local_url="$(bzr config parent_location -d $dir)"
-               if [[ -n $distant_url ]]; then
-                       if [[ $distant_url != "$local_url" ]]; then
-                               error "$(gettext "%s is not a branch of %s")" 
"$dir" "$url"
-                               plain "$(gettext "Aborting...")"
-                               exit 1
-                       fi
-               else
-                       if [[ $url != "$local_url" ]] ; then
-                               error "$(gettext "%s is not a branch of %s")" 
"$dir" "$url"
-                               error "$(gettext "The local URL is %s")" 
"$local_url"
-                               plain "$(gettext "Aborting...")"
-                               exit 1
-                       fi
-               fi
-               msg2 "$(gettext "Pulling %s ...")" "${displaylocation}"
-               cd_safe "$dir"
-               if ! bzr pull "$url" --overwrite; then
-                       # only warn on failure to allow offline builds
-                       warning "$(gettext "Failure while pulling %s")" 
"${displaylocation}"
-               fi
-       fi
-}
-
 extract_bzr() {
        local netfile=$1
 
@@ -450,41 +328,6 @@ extract_bzr() {
        popd &>/dev/null
 }
 
-download_git() {
-       local netfile=$1
-
-       local dir=$(get_filepath "$netfile")
-       [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
-
-       local repo=$(get_filename "$netfile")
-
-       local url=$(get_url "$netfile")
-       url=${url##*git+}
-       url=${url%%#*}
-
-       if [[ ! -d "$dir" ]] || dir_is_empty "$dir" ; then
-               msg2 "$(gettext "Cloning %s %s repo...")" "${repo}" "git"
-               if ! git clone --mirror "$url" "$dir"; then
-                       error "$(gettext "Failure while downloading %s %s 
repo")" "${repo}" "git"
-                       plain "$(gettext "Aborting...")"
-                       exit 1
-               fi
-       elif (( ! HOLDVER )); then
-               cd_safe "$dir"
-               # Make sure we are fetching the right repo
-               if [[ "$url" != "$(git config --get remote.origin.url)" ]] ; 
then
-                       error "$(gettext "%s is not a clone of %s")" "$dir" 
"$url"
-                       plain "$(gettext "Aborting...")"
-                       exit 1
-               fi
-               msg2 "$(gettext "Updating %s %s repo...")" "${repo}" "git"
-               if ! git fetch --all -p; then
-                       # only warn on failure to allow offline builds
-                       warning "$(gettext "Failure while updating %s %s 
repo")" "${repo}" "git"
-               fi
-       fi
-}
-
 extract_git() {
        local netfile=$1
 
@@ -539,35 +382,6 @@ extract_git() {
        popd &>/dev/null
 }
 
-download_hg() {
-       local netfile=$1
-
-       local dir=$(get_filepath "$netfile")
-       [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
-
-       local repo=$(get_filename "$netfile")
-
-       local url=$(get_url "$netfile")
-       url=${url##*hg+}
-       url=${url%%#*}
-
-       if [[ ! -d "$dir" ]] || dir_is_empty "$dir" ; then
-               msg2 "$(gettext "Cloning %s %s repo...")" "${repo}" "hg"
-               if ! hg clone -U "$url" "$dir"; then
-                       error "$(gettext "Failure while downloading %s %s 
repo")" "${repo}" "hg"
-                       plain "$(gettext "Aborting...")"
-                       exit 1
-               fi
-       elif (( ! HOLDVER )); then
-               msg2 "$(gettext "Updating %s %s repo...")" "${repo}" "hg"
-               cd_safe "$dir"
-               if ! hg pull; then
-                       # only warn on failure to allow offline builds
-                       warning "$(gettext "Failure while updating %s %s 
repo")" "${repo}" "hg"
-               fi
-       fi
-}
-
 extract_hg() {
        local netfile=$1
 
@@ -608,43 +422,6 @@ extract_hg() {
        popd &>/dev/null
 }
 
-download_svn() {
-       local netfile=$1
-
-       local fragment=${netfile#*#}
-       if [[ $fragment = "$netfile" ]]; then
-               unset fragment
-       fi
-
-       local dir=$(get_filepath "$netfile")
-       [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
-
-       local repo=$(get_filename "$netfile")
-
-       local url=$(get_url "$netfile")
-       if [[ $url != svn+ssh* ]]; then
-               url=${url##*svn+}
-       fi
-       url=${url%%#*}
-
-       if [[ ! -d "$dir" ]] || dir_is_empty "$dir" ; then
-               msg2 "$(gettext "Cloning %s %s repo...")" "${repo}" "svn"
-               mkdir -p "$dir/.makepkg"
-               if ! svn checkout --config-dir "$dir/.makepkg" "$url" "$dir"; 
then
-                       error "$(gettext "Failure while downloading %s %s 
repo")" "${repo}" "svn"
-                       plain "$(gettext "Aborting...")"
-                       exit 1
-               fi
-       elif (( ! HOLDVER )); then
-               msg2 "$(gettext "Updating %s %s repo...")" "${repo}" "svn"
-               cd_safe "$dir"
-               if ! svn update; then
-                       # only warn on failure to allow offline builds
-                       warning "$(gettext "Failure while updating %s %s 
repo")" "${repo}" "svn"
-               fi
-       fi
-}
-
 extract_svn() {
        local netfile=$1
 
@@ -689,44 +466,6 @@ extract_svn() {
        popd &>/dev/null
 }
 
-download_sources() {
-       msg "$(gettext "Retrieving sources...")"
-
-       local GET_VCS=1
-       if [[ $1 == "fast" ]]; then
-               GET_VCS=0
-       fi
-
-       local netfile
-       for netfile in "${source[@]}"; do
-               pushd "$SRCDEST" &>/dev/null
-
-               local proto=$(get_protocol "$netfile")
-               case "$proto" in
-                       local)
-                               download_local "$netfile"
-                               ;;
-                       bzr*)
-                               (( GET_VCS )) && download_bzr "$netfile"
-                               ;;
-                       git*)
-                               (( GET_VCS )) && download_git "$netfile"
-                               ;;
-                       hg*)
-                               (( GET_VCS )) && download_hg "$netfile"
-                               ;;
-                       svn*)
-                               (( GET_VCS )) && download_svn "$netfile"
-                               ;;
-                       *)
-                               download_file "$netfile"
-                               ;;
-               esac
-
-               popd &>/dev/null
-       done
-}
-
 # Automatically update pkgver variable if a pkgver() function is provided
 # Re-sources the PKGBUILD afterwards to allow for other variables that use 
$pkgver
 update_pkgver() {
diff --git a/scripts/makepkg.sh.in b/scripts/makepkg.sh.in
index 8d982ba..0c22774 100644
--- a/scripts/makepkg.sh.in
+++ b/scripts/makepkg.sh.in
@@ -95,6 +95,7 @@ PACMAN_OPTS=
 
 shopt -s extglob
 
+source $LIBRARY/downloads.sh
 source $LIBRARY/utils.sh
 
 # PROGRAM START
-- 
1.8.4


Reply via email to