Hello community,
here is the log from the commit of package obs-service-download_files for
openSUSE:Factory checked in at 2013-06-25 17:15:41
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/obs-service-download_files (Old)
and /work/SRC/openSUSE:Factory/.obs-service-download_files.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "obs-service-download_files"
Changes:
--------
---
/work/SRC/openSUSE:Factory/obs-service-download_files/obs-service-download_files.changes
2013-06-24 09:35:22.000000000 +0200
+++
/work/SRC/openSUSE:Factory/.obs-service-download_files.new/obs-service-download_files.changes
2013-06-25 17:15:54.000000000 +0200
@@ -1,0 +2,9 @@
+Mon Jun 24 12:09:57 UTC 2013 - [email protected]
+
+- Fix download from github by explicitly specifying output file.
+- Use local scope where possible.
+- Add bash comparsions instead of posix ones as bash invoke them
+ faster.
+- Remove urlextension variable which was always empty.
+
+-------------------------------------------------------------------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ obs-service-download_files.spec ++++++
--- /var/tmp/diff_new_pack.B5c7Cv/_old 2013-06-25 17:15:55.000000000 +0200
+++ /var/tmp/diff_new_pack.B5c7Cv/_new 2013-06-25 17:15:55.000000000 +0200
@@ -20,7 +20,7 @@
Summary: An OBS source service: download files
License: GPL-2.0+
Group: Development/Tools/Building
-Version: 0.4
+Version: 0.5
Release: 0
Source: download_files
Source1: download_files.service
++++++ download_files ++++++
--- /var/tmp/diff_new_pack.B5c7Cv/_old 2013-06-25 17:15:55.000000000 +0200
+++ /var/tmp/diff_new_pack.B5c7Cv/_new 2013-06-25 17:15:55.000000000 +0200
@@ -1,15 +1,12 @@
-#!/bin/bash
+#!/usr/bin/env bash
# downloads files specified in spec files
# config options for this host ?
-if [ -f /etc/obs/services/download_files ]; then
- . /etc/obs/services/download_files
-fi
+[[ -f /etc/obs/services/download_files ]] && . /etc/obs/services/download_files
+
# config options for this user ?
-if [ -f "$HOME"/.obs/download_files ]; then
- . "$HOME"/.obs/download_files
-fi
+[[ -f "$HOME"/.obs/download_files ]] && . "$HOME"/.obs/download_files
DORECOMPRESS=""
ENFORCELOCAL=""
@@ -17,19 +14,19 @@
while test $# -gt 0; do
case $1 in
*-recompress)
- if [ "$2" == "yes" ]; then
+ if [[ "$2" == "yes" ]]; then
DORECOMPRESS="yes"
fi
shift
;;
*-enforcelocal)
- if [ "$2" == "yes" ]; then
+ if [[ "$2" == "yes" ]]; then
ENFORCELOCAL="yes"
fi
shift
;;
*-enforceupstream)
- if [ "$2" == "yes" ]; then
+ if [[ "$2" == "yes" ]]; then
ENFORCEUPSTREAM="yes"
fi
shift
@@ -39,41 +36,41 @@
shift
;;
*)
- echo Unknown parameter $1.
- echo 'this service is not accepting parameters currently'
+ echo "Unknown parameter $1."
+ echo "This service is not accepting parameters currently."
exit 1
;;
esac
shift
done
-if [ ! -d "$MYOUTDIR" ]; then
- echo "ERROR: output directory does not exist"
+if [[ ! -d "${MYOUTDIR}" ]]; then
+ echo "ERROR: output directory \"${MYOUTDIR}\" does not exist"
exit 1
fi
function uncompress_file() {
local input=$1
local output=$2
+ local uncompress="cat"
+ local basename="${input}"
- UNCOMPRESS="cat"
- BASENAME="$input"
- if [ "${input%.gz}" != "$input" ]; then
- UNCOMPRESS="gunzip -c"
- BASENAME="${input%.gz}"
- elif [ "${input%.tgz}" != "$input" ]; then
- UNCOMPRESS="gunzip -c"
- BASENAME="${input%.tgz}.tar"
- elif [ "${input%.bz2}" != "$input" ]; then
- UNCOMPRESS="bunzip2 -c"
- BASENAME="${input%.bz2}"
- elif [ "${FILE%.xz}" != "$input" ]; then
- UNCOMPRESS="xz -dc"
- BASENAME="${input%.xz}"
+ if [[ "${input%.gz}" != "${input}" ]]; then
+ uncompress="gunzip -c"
+ basename="${input%.gz}"
+ elif [[ "${input%.tgz}" != "${input}" ]]; then
+ uncompress="gunzip -c"
+ basename="${input%.tgz}.tar"
+ elif [[ "${input%.bz2}" != "${input}" ]]; then
+ uncompress="bunzip2 -c"
+ basename="${input%.bz2}"
+ elif [[ "${FILE%.xz}" != "${input}" ]]; then
+ uncompress="xz -dc"
+ basename="${input%.xz}"
fi
- $UNCOMPRESS "$input" > "$output"
- echo $BASENAME
+ $uncompress "$input" > "$output"
+ echo $basename
}
@@ -86,17 +83,12 @@
MYCACHEDIRECTORY="$CACHEDIRECTORY"
PROTOCOL="${url%%:*}"
SAMEFILEAFTERCOMPRESSION=
- if [ "$PROTOCOL" != "http" -a "$PROTOCOL" != "https" -a "$PROTOCOL" !=
"ftp" ]; then
- continue
- fi
+ [[ "${PROTOCOL}" != "http" && "${PROTOCOL}" != "https" && "${PROTOCOL}" !=
"ftp" ]] && continue
# Some web sites need a special user agent
if echo $url | egrep -q '^http://sourceforge.net/'; then
- # default wget user agent required, but /download suffix must be added
+ # default wget user agent required
:
-# BN=`basename $url`
-# WGET="$WGET -O $BN"
-# urlextension="/download"
else
# determine intended filename from URL fragment, e.g.
# https://www.example.com/gpgkeys.asc#/%{name}.keyring
@@ -110,54 +102,52 @@
# We tell the server that we are an OBS tool by default
WGET="$WGET -U 'OBS-wget'"
- urlextension=""
fi
cd "$MYOUTDIR"
# check local cache if configured
- HASH=`echo "$url" | sha256sum | cut -d\ -f 1`
+ HASH=$(echo "$url" | sha256sum | cut -d\ -f 1)
if [ -n "$MYCACHEDIRECTORY" -a -f "$MYCACHEDIRECTORY/file/$HASH" ]; then
RECOMPRESS=""
- FILE=`cat "$MYCACHEDIRECTORY/filename/$HASH"`
+ FILE=$(cat "$MYCACHEDIRECTORY/filename/$HASH")
echo "INFO: Taking file from local cache $FILE"
cp -a "$MYCACHEDIRECTORY/file/$HASH" ./"$FILE"
- MYCACHEDIRECTORY="" # do not copy back
elif [ -z "$DORECOMPRESS" ]; then
- if ! $WGET "$url$urlextension"; then
- echo "ERROR: Failed to download $url"
+ if ! $WGET "$url$urlextension" -O "${url##*/}"; then
+ echo "ERROR: Failed to download \"$url\""
exit 1
fi
RECOMPRESS=""
FILE="${url##*/}"
else
FORMAT="${url##*\.}"
- if $WGET "$url$urlextension"; then
+ if $WGET "$url" -O "${url}"; then
RECOMPRESS=""
FILE="${url}"
- elif $WGET "${url%$FORMAT}gz$urlextension"; then
+ elif $WGET "${url%$FORMAT}gz" -O "${url%$FORMAT}gz"; then
RECOMPRESS="$FORMAT"
FILE="${url%$FORMAT}gz"
- elif $WGET "${url%$FORMAT}bz2$urlextension"; then
+ elif $WGET "${url%$FORMAT}bz2" -O "${url%$FORMAT}bz2"; then
RECOMPRESS="$FORMAT"
FILE="${url%$FORMAT}bz2"
- elif $WGET "${url%$FORMAT}xz$urlextension"; then
+ elif $WGET "${url%$FORMAT}xz" -O "${url%$FORMAT}xz"; then
RECOMPRESS="$FORMAT"
FILE="${url%$FORMAT}xz"
else
- echo "ERROR: Fail to download $url or any other compression format"
+ echo "ERROR: Failed to download $url or any other compression format"
exit 1
fi
FILE="${FILE##*/}"
fi
# fill local cache, if configured
- if [ -n "$MYCACHEDIRECTORY" -a ! -f "$MYCACHEDIRECTORY/file/$HASH" ]; then
+ if [[ -n "$MYCACHEDIRECTORY" && ! -f "$MYCACHEDIRECTORY/file/$HASH" ]];
then
cp -a "$FILE" "$MYCACHEDIRECTORY/file/$HASH" && \
echo "$FILE" > "$MYCACHEDIRECTORY/filename/$HASH"
fi
- if [ -n "$RECOMPRESS" ]; then
+ if [[ -n "$RECOMPRESS" ]]; then
tempfile=`mktemp`
file_name=`uncompress_file "$FILE" "$tempfile"`
@@ -180,7 +170,7 @@
COMPRESS="cat -"
SUFFIX=""
else
- echo "ERROR: Unknown compression $RECOMPRESS"
+ echo "ERROR: Unknown compression \"$RECOMPRESS\""
RETURN=1
fi
--
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]