Sorry for the delay in my reply, I've been busy with school.

The attatched patch should make GSRC work just fine with make 3.82.

Now, instead of using
 $ make http//example.com/foo.tar.gz
it's
 $ make download/http/example.com/foo.tar.gz

While it's a little more... verbose, I like it because it seems
slightly less "magic"; protocols are foreign to make, directories aren't.

Happy hacking!
~ Luke Shumaker

At Mon, 15 Aug 2011 11:30:21 +0100,
Brian Gough wrote:
> 
> At Fri, 12 Aug 2011 19:15:32 -0400,
> Luke Shumaker wrote:
> > 
> > What specifically did 3.82 break?
> > 
> 
> Details here:
> http://lists.gnu.org/archive/html/bug-make/2010-12/msg00012.html
> http://savannah.gnu.org/bugs/?func=detailitem&item_id=31002
> 

=== modified file 'gar.lib.mk'
--- gar.lib.mk  2011-08-08 16:47:44 +0000
+++ gar.lib.mk  2011-10-17 03:22:09 +0000
@@ -52,16 +52,17 @@
 SIGFILE_URLS = $(foreach DIR,$(FILE_SITES) $(SIGFILE_DIRS) 
$(MASTER_DIRS),$(addprefix $(DIR),$(SIGFILES)))
 PATCHFILE_URLS = $(foreach DIR,$(FILE_SITES) $(PATCHFILE_DIRS) 
$(MASTER_DIRS),$(addprefix $(DIR),$(PATCHFILES)))
 
+ALL_URLS = $(DISTFILE_URLS) $(SIGFILE_URLS) $(PATCHFILE_URLS)
 # FIXME: doesn't handle colons that are actually in the URL.
 # Need to do some URI-encoding before we change the http:// to
 # http// etc.
-URLS = $(subst ://,//,$(DISTFILE_URLS) $(SIGFILE_URLS) $(PATCHFILE_URLS))
-
+URL_FILES = $(subst ://,/,$(ALL_URLS)) # This is where any encoding happens
+URLS = $(addprefix $(DOWNLOADDIR)/,$(URL_FILES))
 
 # Download the file if and only if it doesn't have a preexisting
 # checksum file.  Loop through available URLs and stop when you
 # get one that doesn't return an error code.
-$(DOWNLOADDIR)/%:  
+$(DOWNLOADDIR)/%:
        @if test -f $(COOKIEDIR)/checksum-$*; then : ; else \
                echo " ==> Grabbing $@"; \
                for i in $(filter %/$*,$(URLS)); do  \
@@ -84,23 +85,23 @@
 WGET_OPTS = -c --no-check-certificate --passive-ftp -U "GSRC/1.0"
 
 # download an http URL (colons omitted)
-http//%: 
+$(DOWNLOADDIR)/http/%:
        wget $(WGET_OPTS) -O $(DOWNLOADDIR)/$(notdir $*).partial http://$*
        mv $(DOWNLOADDIR)/$(notdir $*).partial $(DOWNLOADDIR)/$(notdir $*)
 
 # download an https URL (colons omitted)
-https//%: 
+$(DOWNLOADDIR)/https/%:
        wget $(WGET_OPTS) -O $(DOWNLOADDIR)/$(notdir $*).partial https://$*
        mv $(DOWNLOADDIR)/$(notdir $*).partial $(DOWNLOADDIR)/$(notdir $*)
 
 # download an ftp URL (colons omitted)
-ftp//%: 
+$(DOWNLOADDIR)/ftp/%:
        wget $(WGET_OPTS) -O $(DOWNLOADDIR)/$(notdir $*).partial ftp://$*
        mv $(DOWNLOADDIR)/$(notdir $*).partial $(DOWNLOADDIR)/$(notdir $*)
 
 # link to a local copy of the file
 # (absolute path)
-file///%: 
+$(DOWNLOADDIR)/file//%: 
        @if test -f /$*; then \
                ln -sf "/$*" $(DOWNLOADDIR)/$(notdir $*); \
        else \
@@ -109,7 +110,7 @@
 
 # link to a local copy of the file
 # (relative path)
-file//%: 
+$(DOWNLOADDIR)/file/%: 
        @if test -f $*; then \
                ln -sf $(CURDIR)/$* $(DOWNLOADDIR)/$(notdir $*); \
        else \
@@ -118,26 +119,26 @@
 
 # Using Jeff Waugh's rsync rule.
 # DOES NOT PRESERVE SYMLINKS!
-rsync//%: 
+$(DOWNLOADDIR)/rsync/%: 
        rsync -azvL --progress rsync://$* $(DOWNLOADDIR)/
 
 # Download a directory tree as a tarball.
 RSYNC_OPTS ?= -az
 RSYNC_PATH ?=
-rsynctree//%:
+$(DOWNLOADDIR)/rsynctree/%:
        mkdir -p $(DOWNLOADDIR)/rsync
        rsync -v --progress $(RSYNC_OPTS) $(RSYNC_PATH) $(DOWNLOADDIR)/rsync
        cd $(DOWNLOADDIR)/rsync && tar -czvf ../out *
        mv $(DOWNLOADDIR)/out $(DOWNLOADDIR)/$*
 
 # Using Jeff Waugh's scp rule
-scp//%:
+$(DOWNLOADDIR)/scp/%:
        scp -C $* $(DOWNLOADDIR)/
 
 # Check out source from CVS.
 CVS_CO_OPTS ?= -D$(GARVERSION) -P
 CVS_MODULE ?= $(GARNAME)
-cvs//%:
+$(DOWNLOADDIR)/cvs/%:
        mkdir -p $(DOWNLOADDIR)/cvs
        cd $(DOWNLOADDIR)/cvs && \
                cvs -d$(CVS_ROOT) login && \
@@ -147,7 +148,7 @@
 # Check out source from Subversion.
 SVN_REVISION ?= "{$(GARVERSION)}"
 SVN_CO_OPTS ?= -r $(SVN_REVISION)
-svnco//%:
+$(DOWNLOADDIR)/svnco/%:
        mkdir -p $(DOWNLOADDIR)/svn
        cd $(DOWNLOADDIR)/svn && \
                svn co $(SVN_CO_OPTS) $(SVN_PATH) $(DISTNAME) && \
@@ -155,7 +156,7 @@
 
 # Check out source from Darcs.
 DARCS_GET_OPTS ?= --partial --to-match "date $(GARVERSION)"
-darcs//%:
+$(DOWNLOADDIR)/darcs/%:
        mkdir -p $(DOWNLOADDIR)/darcs
        cd $(DOWNLOADDIR)/darcs && \
                darcs get $(DARCS_GET_OPTS) $(DARCS_PATH) $(DISTNAME) && \
@@ -163,7 +164,7 @@
 
 # Check out source from Git.
 GIT_REVISION ?= v$(GARVERSION)
-git//%:
+$(DOWNLOADDIR)/git/%:
        mkdir -p $(DOWNLOADDIR)/git
        cd $(DOWNLOADDIR)/git && \
                git clone $(GIT_PATH) $(DISTNAME) && \
@@ -173,7 +174,7 @@
 # Check out source from Mercurial.
 HG_REVISION ?= $(GARVERSION)
 HG_CLONE_OPTS ?= -r "$(HG_REVISION)"
-hg//%:
+$(DOWNLOADDIR)/hg/%:
        mkdir -p $(DOWNLOADDIR)/hg
        cd $(DOWNLOADDIR)/hg && \
                hg clone $(HG_CLONE_OPTS) $(HG_PATH) $(DISTNAME) && \
@@ -182,7 +183,7 @@
 # Check out source from Bazaar.
 BZR_REVISION ?= before:date:$(GARVERSION)
 BZR_CHECKOUT_OPTS ?= -r "$(BZR_REVISION)" --lightweight
-bzr//%:
+$(DOWNLOADDIR)/bzr/%:
        mkdir -p $(DOWNLOADDIR)/bzr
        cd $(DOWNLOADDIR)/bzr && \
                bzr checkout $(BZR_CHECKOUT_OPTS) $(BZR_PATH) $(DISTNAME) && \

Reply via email to