Hello community,

here is the log from the commit of package obs-service-download_files for 
openSUSE:Factory checked in at 2012-01-04 07:25:30
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/obs-service-download_files (Old)
 and      /work/SRC/openSUSE:Factory/.obs-service-download_files.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "obs-service-download_files", Maintainer is ""

Changes:
--------
--- 
/work/SRC/openSUSE:Factory/obs-service-download_files/obs-service-download_files.changes
    2011-09-23 02:14:58.000000000 +0200
+++ 
/work/SRC/openSUSE:Factory/.obs-service-download_files.new/obs-service-download_files.changes
       2012-01-04 07:25:31.000000000 +0100
@@ -1,0 +2,35 @@
+Wed Nov 23 10:03:54 UTC 2011 - [email protected]
+
+- import fixes for recompress & enforcelocal case (by coolo)
+
+-------------------------------------------------------------------
+Tue Nov 22 13:51:58 UTC 2011 - [email protected]
+
+- silence during directory change
+
+-------------------------------------------------------------------
+Fri Aug 26 09:34:38 UTC 2011 - [email protected]
+
+- allow to configure a local cache directory
+
+-------------------------------------------------------------------
+Fri Aug 26 08:54:00 UTC 2011 - [email protected]
+
+- add two new options:
+    enforceupstream=yes: package turns into broken state when
+                         upstreams hosts a different file.
+    enforcelocal=yes: package turns into broken state when
+                      the file was not part of the commit
+
+-------------------------------------------------------------------
+Wed Aug 24 08:27:04 UTC 2011 - [email protected]
+
+- support special download handling for sourceforge.net
+
+-------------------------------------------------------------------
+Thu Aug 18 09:39:28 UTC 2011 - [email protected]
+
+- use own user agent
+- handle --outdir . situation
+
+-------------------------------------------------------------------

Old:
----
  _service:format_spec_file:obs-service-download_files.spec

New:
----
  download_files.rc

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ obs-service-download_files.spec ++++++
--- /var/tmp/diff_new_pack.2zs1iO/_old  2012-01-04 07:25:32.000000000 +0100
+++ /var/tmp/diff_new_pack.2zs1iO/_new  2012-01-04 07:25:32.000000000 +0100
@@ -21,10 +21,11 @@
 License:        GPL v2 or later
 Group:          Development/Tools/Building
 Summary:        An OBS source service: download files
-Version:        0.1
+Version:        0.3
 Release:        3
 Source:         download_files
 Source1:        download_files.service
+Source2:        download_files.rc
 Requires:       wget diffutils
 Requires:       build >= 2011.05.24
 BuildRoot:      %{_tmppath}/%{name}-%{version}-build
@@ -44,10 +45,25 @@
 mkdir -p $RPM_BUILD_ROOT/usr/lib/obs/service
 install -m 0755 %{SOURCE0} $RPM_BUILD_ROOT/usr/lib/obs/service
 install -m 0644 %{SOURCE1} $RPM_BUILD_ROOT/usr/lib/obs/service
+mkdir -p $RPM_BUILD_ROOT/etc/obs/services
+install -m 0644 %{SOURCE2} $RPM_BUILD_ROOT/etc/obs/services/download_files
+mkdir -p $RPM_BUILD_ROOT/var/cache/obs/download_files/file{,name}
+
+%pre
+/usr/sbin/groupadd -r obsrun 2> /dev/null || :
+/usr/sbin/useradd -r -o -s /bin/false -c "User for build service backend" -d 
/usr/lib/obs -g obsrun obsrun 2> /dev/null || :
 
 %files
 %defattr(-,root,root)
 %dir /usr/lib/obs
 /usr/lib/obs/service
+%dir /etc/obs
+%dir /etc/obs/services
+%config(noreplace) /etc/obs/services/*
+%dir /var/cache/obs
+%defattr(-,obsrun,obsrun)
+%dir /var/cache/obs/download_files
+%dir /var/cache/obs/download_files/file
+%dir /var/cache/obs/download_files/filename
 
 %changelog

++++++ download_files ++++++
--- /var/tmp/diff_new_pack.2zs1iO/_old  2012-01-04 07:25:32.000000000 +0100
+++ /var/tmp/diff_new_pack.2zs1iO/_new  2012-01-04 07:25:32.000000000 +0100
@@ -2,7 +2,18 @@
 
 # downloads files specified in spec files
 
+# config options for this host ?
+if [ -f /etc/obs/services/download_files ]; then
+  . /etc/obs/services/download_files
+fi
+# config options for this user ?
+if [ -f "$HOME"/.obs/download_files ]; then
+  . "$HOME"/.obs/download_files
+fi
+
 DORECOMPRESS=""
+ENFORCELOCAL=""
+ENFORCEUPSTREAM=""
 while test $# -gt 0; do
   case $1 in
     *-recompress)
@@ -11,6 +22,18 @@
       fi
       shift
     ;;
+    *-enforcelocal)
+      if [ "$2" == "yes" ]; then
+        ENFORCELOCAL="yes"
+      fi
+      shift
+    ;;
+    *-enforceupstream)
+      if [ "$2" == "yes" ]; then
+        ENFORCEUPSTREAM="yes"
+      fi
+      shift
+    ;;
     *-outdir)
       MYOUTDIR="$2"
       shift
@@ -53,18 +76,45 @@
   echo $BASENAME
 }
 
+WGET="/usr/bin/wget -4 --no-check-certificate -q --timeout=30 --tries=2 
--no-directories"
+
 RETURN=0
 for i in *.spec; do
   [ "$i" == "*.spec" ] && exit 0
 
   for url in `perl -I/usr/lib/build -MBuild -e Build::show 
/usr/lib/build/configs/default.conf "$i" sources`; do
+    MYCACHEDIRECTORY="$CACHEDIRECTORY"
     PROTOCOL="${url%%:*}"
+    SAMEFILEAFTERCOMPRESSION=
     if [ "$PROTOCOL" != "http" -a "$PROTOCOL" != "https" -a "$PROTOCOL" != 
"ftp" ]; then
       continue
     fi
+
+    # Some web sites need a special user agent
+    if echo $url | egrep -q '^http://sourceforge.net/'; then
+      # default wget user agent required, but /download suffix must be added
+      :
+#      BN=`basename $url`
+#      WGET="$WGET -O $BN"
+#      urlextension="/download"
+    else
+      # We tell the server that we are an OBS tool by default
+      WGET="$WGET -U 'OBS-wget'"
+      urlextension=""
+    fi
+
     cd "$MYOUTDIR"
-    if [ -z "$DORECOMPRESS" ]; then
-      if ! /usr/bin/wget -4 --no-check-certificate -q "$url"; then
+
+    # check local cache if configured
+    HASH=`echo "$url" | sha256sum | cut -d\  -f 1`
+    if [ -n "$MYCACHEDIRECTORY" -a -f "$MYCACHEDIRECTORY/file/$HASH" ]; then
+      RECOMPRESS=""
+      FILE=`cat "$MYCACHEDIRECTORY/filename/$HASH"`
+      echo "INFO: Take file from local cache $FILE"
+      cp -a "$MYCACHEDIRECTORY/file/$HASH" ./"$FILE"
+      MYCACHEDIRECTORY="" # do not copy back
+    elif [ -z "$DORECOMPRESS" ]; then
+      if ! $WGET "$url$urlextension"; then
         echo "ERROR: Fail to download $url"
         exit 1
       fi
@@ -72,16 +122,16 @@
       FILE="${url##*/}"
     else
       FORMAT="${url##*\.}"
-      if /usr/bin/wget -4 --no-check-certificate -q "$url"; then
+      if $WGET "$url$urlextension"; then
         RECOMPRESS=""
         FILE="${url}"
-      elif /usr/bin/wget -4 --no-check-certificate -q "${url%$FORMAT}gz"; then
+      elif $WGET "${url%$FORMAT}gz$urlextension"; then
         RECOMPRESS="$FORMAT"
         FILE="${url%$FORMAT}gz"
-      elif /usr/bin/wget -4 --no-check-certificate -q "${url%$FORMAT}bz2"; then
+      elif $WGET "${url%$FORMAT}bz2$urlextension"; then
         RECOMPRESS="$FORMAT"
         FILE="${url%$FORMAT}bz2"
-      elif /usr/bin/wget -4 --no-check-certificate -q "${url%$FORMAT}xz"; then
+      elif $WGET "${url%$FORMAT}xz$urlextension"; then
         RECOMPRESS="$FORMAT"
         FILE="${url%$FORMAT}xz"
       else
@@ -91,8 +141,11 @@
       FILE="${FILE##*/}"
     fi
 
-    # remove all file files which are indendical to committed files
-    [ -f "$OLDPWD/$FILE" ] && cmp "$FILE" "$OLDPWD/$FILE" && rm "$FILE"
+    # fill local cache, if configured
+    if [ -n "$MYCACHEDIRECTORY" -a ! -f "$MYCACHEDIRECTORY/file/$HASH" ]; then
+      cp -a "$FILE" "$MYCACHEDIRECTORY/file/$HASH" && \
+      echo "$FILE" > "$MYCACHEDIRECTORY/filename/$HASH"
+    fi
 
     if [ -n "$RECOMPRESS" ]; then
       tempfile=`mktemp`
@@ -124,12 +177,35 @@
         # do the compression
         cat "$tempfile" | $COMPRESS > "$file_name$SUFFIX" || RETURN=1
         rm "$FILE" # remove downloaded file
+       FILE="$file_name$SUFFIX"
+      else
+        # original file name
+        FILE="${url##*/}"
+        SAMEFILEAFTERCOMPRESSION=1
       fi
 
       # cleanup
       rm -f "$tempfile" "$tempoldfile"
     fi
-    cd -
+
+    # remove all file files which are indentical to committed files, but not 
the same instance (when --outdir .)
+    if [ -f "$OLDPWD/$FILE" ]; then
+       if [ ! "$FILE" -ef "$OLDPWD/$FILE" ]; then
+         if [ -z "$SAMEFILEAFTERCOMPRESSION" ]; then
+           if cmp "$FILE" "$OLDPWD/$FILE"; then
+              rm "$FILE"
+           elif [ -n "$ENFORCEUPSTREAM" ]; then
+             echo "ERROR: download_files is configured to fail when the 
upstream file is different then the committed file... this is the case!"
+             exit 1
+           fi
+         fi
+       fi
+    elif [ -n "$ENFORCELOCAL" ]; then
+      echo "ERROR: download_files is configured to fail when the file was not 
committed... this is the case!"
+      exit 1
+    fi
+
+    cd - > /dev/null
   done
 done
 

++++++ download_files.rc ++++++
#
# Define a cache directory here to avoid repeating downloads of the
# same file. This works on the server and on the client side.
# WARNING: you will not notice when the upstream file is changing!
# WARNING: you need to create two directories inside, when changing from 
default:
#          mkdir -p file{,name}
#
#CACHEDIRECTORY="/var/cache/obs/download_files/"
++++++ download_files.service ++++++
--- /var/tmp/diff_new_pack.2zs1iO/_old  2012-01-04 07:25:32.000000000 +0100
+++ /var/tmp/diff_new_pack.2zs1iO/_new  2012-01-04 07:25:32.000000000 +0100
@@ -6,6 +6,14 @@
     <description>In case the right compression is not available on server, do 
recompress the file as needed.</description>
     <allowedvalue>yes</allowedvalue>
   </parameter>
+  <parameter name="enforceupstream">
+    <description>Fail when upstream file differes to local committed file. 
Package state will become "broken".</description>
+    <allowedvalue>yes</allowedvalue>
+  </parameter>
+  <parameter name="enforcelocal">
+    <description>Fail when the file was not commited, download will happen 
anyway to verify that file is identical with upstream. Package state will 
become "broken".</description>
+    <allowedvalue>yes</allowedvalue>
+  </parameter>
 
 </service>
 

-- 
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to