V3 merge for most python scripts. fmod inclusion/linkage/packaging changes.

This commit is contained in:
Shyotl
2014-12-18 18:01:41 -06:00
parent 32706065ac
commit 44f8f17763
24 changed files with 1045 additions and 1735 deletions

View File

@@ -1,472 +0,0 @@
#!/bin/sh
# This is the build script used by Linden Lab's automated build system.
#
set -x
export INSTALL_USE_HTTP_FOR_SCP=true
export PATH=/bin:/usr/bin:$PATH
arch=`uname | cut -b-6`
here=`echo $0 | sed 's:[^/]*$:.:'`
# Hack : in the case of Snowglobe 1.x trunk and releases, we continue to use 2009 as the year so to separate them from Snowglobe 2.x trunk and releases
#year=`date +%Y`
year="2009"
branch=`svn info | grep '^URL:' | sed 's:.*/::'`
revision=`svn info | grep '^Revision:' | sed 's/.*: //'`
top=`cd "$here/../../.." && pwd`
[ x"$WGET_CACHE" = x ] && export WGET_CACHE=/var/tmp/parabuild/wget
[ x"$S3GET_URL" = x ] && export S3GET_URL=http://viewer-source-downloads.s3.amazonaws.com/$year
[ x"$S3PUT_URL" = x ] && export S3PUT_URL=https://s3.amazonaws.com/viewer-source-downloads/$year
[ x"$S3SYMBOL_URL" = x ] && export S3SYMBOL_URL=https://s3.amazonaws.com/automated-builds-secondlife-com/binaries
[ x"$PUBLIC_URL" = x ] && export PUBLIC_URL=http://secondlife.com/developers/opensource/downloads/$year
[ x"$PUBLIC_EMAIL" = x ] && export PUBLIC_EMAIL=sldev-commits@lists.secondlife.com
# Make sure command worked and bail out if not, reporting failure to parabuild
fail()
{
echo "BUILD FAILED" $@
exit 1
}
pass()
{
echo "BUILD SUCCESSFUL"
exit 0
}
# Locking to avoid contention with u-s-c
LOCK_PROCESS=
locking_available()
{
test -n "$LOCK_CREATE" -a -x "$LOCK_CREATE"\
-a -n "$LOCK_TOUCH" -a -x "$LOCK_TOUCH"\
-a -n "$LOCK_REMOVE" -a -x "$LOCK_REMOVE"
}
acquire_lock()
{
if locking_available
then
if "$LOCK_CREATE" /var/lock/update-system-config --retry 99
then
"$LOCK_TOUCH" /var/lock/update-system-config &
LOCK_PROCESS="$!"
else
fail acquire lock
fi
else
true
fi
}
release_lock()
{
if locking_available
then
if test x"$LOCK_PROCESS" != x
then
kill "$LOCK_PROCESS"
"$LOCK_REMOVE" /var/lock/update-system-config
else
echo No Lock Acquired >&2
fi
else
true
fi
}
get_asset()
{
mkdir -p "$WGET_CACHE" || fail creating WGET_CACHE
local tarball=`basename "$1"`
test -r "$WGET_CACHE/$tarball" || ( cd "$WGET_CACHE" && curl --location --remote-name "$1" || fail getting $1 )
case "$tarball" in
*.zip) unzip -qq -d "$top" -o "$WGET_CACHE/$tarball" || fail unzip $tarball ;;
*.tar.gz|*.tgz) tar -C "$top" -xzf "$WGET_CACHE/$tarball" || fail untar $tarball ;;
*) fail unrecognized filetype: $tarball ;;
esac
}
s3_available()
{
test -x "$helpers/hg/bin/s3get.sh" -a -x "$helpers/hg/bin/s3put.sh" -a -r "$helpers/hg/bin/s3curl.py"
}
build_dir_Darwin()
{
echo build-darwin-i386
}
build_dir_Linux()
{
echo viewer-linux-i686-`echo $1 | tr A-Z a-z`
}
build_dir_CYGWIN()
{
echo build-vc80
}
installer_Darwin()
{
ls -1td "$(build_dir_Darwin Release)/newview/"*.dmg 2>/dev/null | sed 1q
}
installer_Linux()
{
ls -1td "$(build_dir_Linux Release)/newview/"*.tar.bz2 2>/dev/null | sed 1q
}
installer_CYGWIN()
{
d=$(build_dir_CYGWIN Release)
p=$(sed 's:.*=::' "$d/newview/Release/touched.bat")
echo "$d/newview/Release/$p"
}
# deal with aborts etc..
trap fail 1 2 3 14 15
# Check location
cd "$here/../.."
test -x ../linden/scripts/automated_build_scripts/opensrc-build.sh\
|| fail 'The parent dir of your checkout needs to be named "linden"'
. doc/asset_urls.txt
get_asset "$SLASSET_ART"
update_version_files=
# Set up platform specific stuff
case "$arch" in
# Note that we can only build the "Release" variant for Darwin, because of a compiler bug:
# ld: bl out of range (-16777272 max is +/-16M)
# from __static_initialization_and_destruction_0(int, int)at 0x033D319C
# in __StaticInit of
# indra/build-darwin-universal/newview/SecondLife.build/Debug/Second Life.build/Objects-normal/ppc/llvoicevisualizer.o
# to ___cxa_atexit$island_2 at 0x023D50F8
# in __text of
# indra/build-darwin-universal/newview/SecondLife.build/Debug/Second Life.build/Objects-normal/ppc/Second Life
# in __static_initialization_and_destruction_0(int, int)
# from indra/build-darwin-universal/newview/SecondLife.build/Debug/Second Life.build/Objects-normal/ppc/llvoicevisualizer.o
Darwin)
helpers=/usr/local/buildscripts/shared/latest
variants="Release"
cmake_generator="Xcode"
fmod=fmodapi375mac
fmod_tar="$fmod.zip"
fmod_so=libfmod.a
fmod_lib=lib
target_dirs="libraries/universal-darwin/lib_debug
libraries/universal-darwin/lib_release
libraries/universal-darwin/lib_release_client"
other_archs="$S3GET_URL/$branch/$revision/CYGWIN $S3GET_URL/$branch/$revision/Linux"
symbolfiles=
mail="$helpers"/mail.py
all_done="$helpers"/all_done.py
test -r "$helpers/update_version_files.py" && update_version_files="$helpers/update_version_files.py"
libs_asset="$SLASSET_LIBS_DARWIN"
s3put="$helpers"/hg/bin/s3put.sh
;;
CYGWIN)
helpers=/cygdrive/c/buildscripts/shared/latest
variants="Debug RelWithDebInfo Release ReleaseSSE2"
#variants="Release"
cmake_generator="vc80"
fmod=fmodapi375win
fmod_tar=fmodapi375win.zip
fmod_so=fmodvc.lib
fmod_lib=lib
target_dirs="libraries/i686-win32/lib/debug
libraries/i686-win32/lib/release"
other_archs="$S3GET_URL/$branch/$revision/Darwin $S3GET_URL/$branch/$revision/Linux"
symbolfiles="newview/Release/secondlife-bin.pdb newview/Release/secondlife-bin.map newview/Release/secondlife-bin.exe"
export PATH="/cygdrive/c/Python25:/cygdrive/c/Program Files/Cmake 2.6/bin":$PATH
export PERL="/cygdrive/c/Perl/bin/perl.exe"
export S3CURL="C:\\buildscripts\\shared\\latest\\hg\\bin\\s3curl.py"
export SIGN_PY="C:\\buildscripts\\shared\\latest\\code-signing\\sign.py"
export CURL="C:\\cygwin\\bin\\curl.exe"
mail="C:\\buildscripts\\shared\\latest\\mail.py"
all_done="C:\\buildscripts\\shared\\latest\\all_done.py"
test -r "$helpers/update_version_files.py" && update_version_files="C:\\buildscripts\\shared\\latest\\update_version_files.py"
libs_asset="$SLASSET_LIBS_WIN32"
s3put="$helpers"/hg/bin/s3put.sh
;;
Linux)
helpers=/var/opt/parabuild/buildscripts/shared/latest
if [ x"$CXX" = x ]
then
if test -x /usr/bin/g++-4.1
then
if test -x /usr/bin/distcc
then
export CXX="/usr/bin/distcc /usr/bin/g++-4.1"
else
export CXX=/usr/bin/g++-4.1
fi
fi
fi
variants="Debug RelWithDebInfo Release ReleaseSSE2"
#variants="Release"
cmake_generator="Unix Makefiles"
fmod=fmodapi375linux
fmod_tar="$fmod".tar.gz
fmod_so=libfmod-3.75.so
fmod_lib=.
target_dirs="libraries/i686-linux/lib_debug
libraries/i686-linux/lib_release
libraries/i686-linux/lib_release_client"
other_archs="$S3GET_URL/$branch/$revision/Darwin $S3GET_URL/$branch/$revision/CYGWIN"
symbolfiles=
mail="$helpers"/mail.py
all_done="$helpers"/all_done.py
test -r "$helpers/update_version_files.py" && update_version_files="$helpers/update_version_files.py"
# Change the DISTCC_DIR to be somewhere that the parabuild process can write to
if test -r /etc/debian_version
then
[ x"$DISTCC_DIR" = x ] && export DISTCC_DIR=/var/tmp/parabuild
#case `cat /etc/debian_version` in
#3.*) [ x"$DISTCC_HOSTS" = x ]\
# && export DISTCC_HOSTS="build-linux-1/3
# station30/2,lzo" ;;
#4.*) [ x"$DISTCC_HOSTS" = x ]\
# && export DISTCC_HOSTS="build-linux-6/2,lzo
# build-linux-2/2,lzo
# build-linux-3/2,lzo
# build-linux-4/2,lzo
# build-linux-5/2,lzo
# build-linux-7/2,lzo
# build-linux-8/2,lzo
# build-linux-9/2,lzo" ;;
#esac
# Temp fix for Linux so that parabuild passes: use the new Linux build farm
export hostname=`hostname -f`
export phx_DISTCC_HOSTS="build-linux0.phx.lindenlab.com/2 build-linux1.phx.lindenlab.com/2 build-linux2.phx.lindenlab.com/2 build-linux3.phx.lindenlab.com/2 build-linux5.phx.lindenlab.com/2 build-linux5.phx.lindenlab.com/2 build-linux6.phx.lindenlab.com/2 "
export dfw_DISTCC_HOSTS="build-linux7.dfw.lindenlab.com/2 build-linux8.dfw.lindenlab.com/2 build-linux9.dfw.lindenlab.com/2 build-linux10.dfw.lindenlab.com/2 build-linux11.dfw.lindenlab.com/2 build-linux12.dfw.lindenlab.com/2 build-linux13.dfw.lindenlab.com/2 build-linux14.dfw.lindenlab.com/2 build-linux15.dfw.lindenlab.com/2"
case "$hostname" in
*.dfw.*) export DISTCC_HOSTS="$dfw_DISTCC_HOSTS" ;;
*.phx.*) export DISTCC_HOSTS="$phx_DISTCC_HOSTS" ;;
esac
fi
libs_asset="$SLASSET_LIBS_LINUXI386"
s3put="$helpers"/hg/bin/s3put.sh
;;
*) fail undefined $arch ;;
esac
acquire_lock
trap release_lock EXIT
get_asset "http://www.fmod.org/files/fmod3/$fmod_tar"
case "$arch" in
Darwin)
# Create fat binary on Mac...
if lipo -create -output "../$fmod"/api/$fmod_lib/libfmod-universal.a\
"../$fmod"/api/$fmod_lib/libfmod.a\
"../$fmod"/api/$fmod_lib/libfmodx86.a
then
mv "../$fmod"/api/$fmod_lib/libfmod.a "../$fmod"/api/$fmod_lib/libfmodppc.a
mv "../$fmod"/api/$fmod_lib/libfmod-universal.a "../$fmod"/api/$fmod_lib/libfmod.a
echo Created fat binary
else
fail running lipo
fi
;;
CYGWIN)
# install Quicktime. This will fail outside of Linden's network
scripts/install.py quicktime
;;
esac
# Only run this if the script exists
if test x"$update_version_files" = x
then
echo "Private Build..." > indra/build.log
[ x"$VIEWER_CHANNEL" = x ] && export VIEWER_CHANNEL="CommunityDeveloper"
else
# By right, this should be in the branched source tree, but for now it will be a helper
python "$update_version_files" --verbose --src-root=. --viewer > indra/build.log
[ x"$VIEWER_CHANNEL" = x ] && export VIEWER_CHANNEL="Snowglobe Test Build"
fi
# First, go into the directory where the code was checked out by Parabuild
cd indra
# This is the way it works now, but it will soon work on a variant dependent way
for target_dir in $target_dirs
do
mkdir -p "../$target_dir"
cp -f "../../$fmod/api/$fmod_lib/$fmod_so" "../$target_dir"
done
mkdir -p "../libraries/include"
cp -f "../../$fmod/api/inc/"* "../libraries/include"
# Special Windows case
test -r "../../$fmod/api/fmod.dll" && cp -f "../../$fmod/api/fmod.dll" newview
# Now run the build command over all variants
succeeded=true
### TEST CODE - remove when done
### variants=
### echo "Artificial build failure to test notifications" > build.log
### succeeded=false
### END TEST CODE
for variant in $variants
do
build_dir=`build_dir_$arch $variant`
rm -rf "$build_dir"
get_asset "$libs_asset" # This plunks stuff into the build dir, so have to restore it now.
# SNOW-713 : hack around a Darwin lib 1.23.4.0 tarball issue introduced by the move from universal to i386
# Should be removed when libs are rebuilt cleanly
if test -r build-darwin-universal-Release
then
mv build-darwin-universal-Release/ "$build_dir/"
fi
# End SNOW-713 hack
# This is the way it will work in future
#for target_dir in $target_dirs
#do
# mkdir -p "$build_dir/$target_dir"
# cp "../../$fmod/api/$fmod_lib/$fmod_so" "$build_dir/$target_dir"
#done
#mkdir -p "$build_dir/libraries/include"
#cp "../../$fmod/api/inc/"* "$build_dir/libraries/include"
echo "==== $variant ====" >> build.log
if ./develop.py \
--unattended \
--incredibuild \
-t $variant \
-G "$cmake_generator" \
configure \
-DVIEWER_CHANNEL:STRING="$VIEWER_CHANNEL"\
-DVIEWER_LOGIN_CHANNEL:STRING="$VIEWER_CHANNEL"\
-DPACKAGE:BOOL=ON >>build.log 2>&1
then
if ./develop.py\
--unattended\
--incredibuild \
-t $variant\
-G "$cmake_generator" \
build prepare >>build.log 2>&1
then
if ./develop.py\
--unattended\
--incredibuild \
-t $variant\
-G "$cmake_generator" \
build package >>build.log 2>&1
then
# run tests if needed
true
else
succeeded=false
fi
else
succeeded=false
fi
else
succeeded=false
fi
done
# Check status and upload results to S3
subject=
if $succeeded
then
package=`installer_$arch`
test -r "$package" || fail not found: $package
package_file=`echo $package | sed 's:.*/::'`
if s3_available
then
# Create an empty token file and populate it with the usable URLs: this will be emailed when all_done...
cp /dev/null "$arch"
echo "$PUBLIC_URL/$branch/$revision/$package_file" >> "$arch"
echo "$PUBLIC_URL/$branch/$revision/good-build.$arch" >> "$arch"
"$s3put" "$package" "$S3PUT_URL/$branch/$revision/$package_file" binary/octet-stream public-read\
|| fail Uploading "$package"
"$s3put" build.log "$S3PUT_URL/$branch/$revision/good-build.$arch" text/plain public-read\
|| fail Uploading build.log
"$s3put" "$arch" "$S3PUT_URL/$branch/$revision/$arch" text/plain public-read\
|| fail Uploading token file
for symbolfile in $symbolfiles
do
targetfile="`echo $symbolfile | sed 's:.*/::'`"
"$s3put" "$build_dir/$symbolfile" "$S3SYMBOL_URL/$revision/$targetfile" binary/octet-stream public-read\
|| fail Uploading "$symbolfile"
done
if python "$all_done"\
curl\
"$S3GET_URL/$branch/$revision/$arch"\
$other_archs > message
then
subject="Successful Build for $year/$branch ($revision)"
fi
else
true s3 is not available
fi
else
if s3_available
then
"$s3put" build.log "$S3PUT_URL/$branch/$revision/failed-build.$arch" text/plain public-read\
|| fail Uploading build.log
subject="Failed Build for $year/$branch ($revision) on $arch"
cat >message <<EOF
Build for $branch ($revision) failed for $arch.
Please see the build log for details:
$PUBLIC_URL/$branch/$revision/failed-build.$arch
EOF
else
true s3 is not available
fi
fi
# We have something to say...
if [ x"$subject" != x ]
then
# Extract change list since last build
if [ x"$PARABUILD_CHANGE_LIST_NUMBER" = x ]
then
echo "No change information available" >> message
elif [ x"$PARABUILD_PREVIOUS_CHANGE_LIST_NUMBER" = x ]
then
( cd .. && svn log --verbose --stop-on-copy --limit 50 ) >> message
else
if [ "$PARABUILD_PREVIOUS_CHANGE_LIST_NUMBER" -lt "$PARABUILD_CHANGE_LIST_NUMBER" ]
then
range=`expr 1 + "$PARABUILD_PREVIOUS_CHANGE_LIST_NUMBER"`:"$PARABUILD_CHANGE_LIST_NUMBER"
else
range="$PARABUILD_CHANGE_LIST_NUMBER"
fi
( cd .. && svn log --verbose -r"$range" ) >> message
fi
# $PUBLIC_EMAIL can be a list, so no quotes
python "$mail" "$subject" $PUBLIC_EMAIL < message
fi
if $succeeded
then
pass
else
fail
fi

View File

@@ -1,280 +0,0 @@
#!/usr/bin/python
"""\
@file public_fetch_tarballs.py
@author Rob Lanphier
@date 2009-05-30
@brief Fetch + extract tarballs and zipfiles listed in doc/asset_urls.txt
$LicenseInfo:firstyear=2009&license=viewergpl$
Copyright (c) 2009, Linden Research, Inc.
Second Life Viewer Source Code
The source code in this file ("Source Code") is provided by Linden Lab
to you under the terms of the GNU General Public License, version 2.0
("GPL"), unless you have obtained a separate licensing agreement
("Other License"), formally executed by you and Linden Lab. Terms of
the GPL can be found in doc/GPL-license.txt in this distribution, or
online at http://secondlifegrid.net/programs/open_source/licensing/gplv2
There are special exceptions to the terms and conditions of the GPL as
it is applied to this Source Code. View the full text of the exception
in the file doc/FLOSS-exception.txt in this software distribution, or
online at
http://secondlifegrid.net/programs/open_source/licensing/flossexception
By copying, modifying or distributing this software, you acknowledge
that you have read and understood your obligations described above,
and agree to abide by those obligations.
ALL LINDEN LAB SOURCE CODE IS PROVIDED "AS IS." LINDEN LAB MAKES NO
WARRANTIES, EXPRESS, IMPLIED OR OTHERWISE, REGARDING ITS ACCURACY,
COMPLETENESS OR PERFORMANCE.
$/LicenseInfo$
"""
import sys
import os.path
# Look for indra/lib/python in all possible parent directories ...
# This is an improvement over the setup-path.py method used previously:
# * the script may blocated anywhere inside the source tree
# * it doesn't depend on the current directory
# * it doesn't depend on another file being present.
def add_indra_lib_path():
root = os.path.realpath(__file__)
# always insert the directory of the script in the search path
dir = os.path.dirname(root)
if dir not in sys.path:
sys.path.insert(0, dir)
# Now go look for indra/lib/python in the parent dies
while root != os.path.sep:
root = os.path.dirname(root)
dir = os.path.join(root, 'indra', 'lib', 'python')
if os.path.isdir(dir):
if dir not in sys.path:
sys.path.insert(0, dir)
return root
else:
print >>sys.stderr, "This script is not inside a valid installation."
sys.exit(1)
base_dir = add_indra_lib_path()
print base_dir
import os
import sys
import re
import urllib
import zipfile
import tarfile
import optparse
import tempfile
import indra.util.helpformatter
# load + parse doc/asset_urls.txt
def get_asset_urls():
asset_urls={}
f = open(os.path.join(base_dir,"doc", "asset_urls.txt"))
for line in f:
line=line.strip()
(name, value)=re.split("=", line, 1)
asset_urls[name]=value
return asset_urls
# Filename from a URL
def get_asset_filename_from_url(asseturl, targetdir):
i = asseturl.rfind('/')
filename = os.path.join(targetdir, asseturl[i+1:])
return filename
# Extract .zip file to targetdir. Called by extract_archive_sans_linden.
def extract_zipfile_sans_linden(filename, targetdir):
archive = zipfile.ZipFile(filename, 'r')
names = archive.namelist()
for path in names:
if(path=="linden/"):
pass
target = os.path.join(targetdir, re.sub("linden/", "", path))
subdir = os.path.dirname(target)
if not os.path.exists(subdir):
os.makedirs(subdir)
if not os.path.exists(target):
fp = open(target, 'wb')
fp.write(archive.read(path))
fp.close()
archive.close()
# Extract .tar.gz file to targetdir. Called by extract_archive_sans_linden.
def extract_tarball_sans_linden(filename, targetdir):
archive = tarfile.TarFile.open(filename, 'r')
# get a series of TarInfo objects
tarentries=archive.getmembers()
for tarentry in tarentries:
target = re.sub(r'^(\./)?(linden/?)?', "", tarentry.name)
if(target==""):
continue
fulltarget=os.path.join(targetdir, target)
subdir = os.path.dirname(fulltarget)
if not os.path.exists(subdir):
os.makedirs(subdir)
if not os.path.exists(fulltarget):
# Reset the name property on the TarInfo object, so it writes the
# file exactly where we want it. It's hard telling for sure if this
# property is intended to be written to, but it works for now.
tarentry.name=fulltarget
# Calling TarFile.extract with the "path" parameter doesn't work as
# we might hope, because the path components in the tarball get
# appended to the "path" parameter. Hence the reason for passing in
# the TarInfo object with the munged name property
archive.extract(tarentry)
archive.close()
# Extract either .tar.gz file or .zip file to targetdir, stripping off the
# leading "linden" directory, but leaving the directory structure otherwise
# intact.
def extract_archive_sans_linden(filename, targetdir):
if(filename.endswith('.tar.gz')):
extract_tarball_sans_linden(filename, targetdir)
elif(filename.endswith('.zip')):
extract_zipfile_sans_linden(filename, targetdir)
else:
raise Exception, "Unhandled archive type"
def get_assetnames_by_platform(platform):
assetnames=['SLASSET_ART']
if(platform=='linux' or platform=='all'):
assetnames.append('SLASSET_LIBS_LINUXI386')
if(platform=='darwin' or platform=='all'):
assetnames.append('SLASSET_LIBS_DARWIN')
if(platform=='windows' or platform=='all'):
assetnames.append('SLASSET_LIBS_WIN32')
return assetnames
# adapted from install.py
def _get_platform():
"Return appropriate platform packages for the environment."
platform_map = {
'darwin': 'darwin',
'linux2': 'linux',
'win32' : 'windows',
'cygwin' : 'windows',
'solaris' : 'solaris'
}
this_platform = platform_map[sys.platform]
return this_platform
# copied from install.py
def _default_installable_cache():
"""In general, the installable files do not change much, so find a
host/user specific location to cache files."""
user = _getuser()
cache_dir = "/var/tmp/%s/sg.install.cache" % user
if _get_platform() == 'windows':
cache_dir = os.path.join(tempfile.gettempdir(), \
'sg.install.cache.%s' % user)
return cache_dir
# For status messages (e.g. "Loading..."). May come in handy if
# we implement a "quiet" mode.
def _report(string):
print string
# copied from install.py
def _getuser():
"Get the user"
try:
# Unix-only.
import getpass
return getpass.getuser()
except ImportError:
import win32api
return win32api.GetUserName()
# adapted from install.py
def _parse_args():
parser = optparse.OptionParser(
usage="usage: %prog [options]",
formatter = indra.util.helpformatter.Formatter(),
description="""This script fetches and installs tarballs and \
zipfiles ("asset bundles") listed in doc/asset_urls.txt
If no asset bundles are specified on the command line, then the default \
behavior is to install all known asset bundles appropriate for the platform \
specified. You can specify more than one asset bundle on the command line.
Example:
%prog SLASSET_ART
This looks for the "SLASSET_ART" entry in doc/asset_urls.txt, and extracts
the corresponding asset bundle into your source tree.
""")
parser.add_option(
'-p', '--platform',
type='choice',
default=_get_platform(),
dest='platform',
choices=['windows', 'darwin', 'linux', 'solaris', 'all'],
help="""Override the automatically determined platform. \
You can specify 'all' to get assets for all platforms. Choices: windows, \
darwin, linux, solaris, or all. Default: autodetected (%s)""" % \
_get_platform())
parser.add_option(
'--cache-dir',
type='string',
default=_default_installable_cache(),
dest='cache_dir',
help='Where to download files. Default: %s'% \
(_default_installable_cache()))
parser.add_option(
'--install-dir',
type='string',
default=base_dir,
dest='install_dir',
help='Where to unpack the installed files. Default: %s' % base_dir)
return parser.parse_args()
def main(argv):
options, args = _parse_args()
# 1. prepare cache dir
if not os.path.exists(options.cache_dir):
os.makedirs(options.cache_dir)
# 2. read doc/asset_urls.txt
asseturls=get_asset_urls()
# 3. figure out which asset bundles we'll be downloading
if len(args)>0:
assetnames=args
else:
assetnames=get_assetnames_by_platform(options.platform)
# 4. download and extract each asset bundle
for asset in assetnames:
# 4a. get the URL for the asset bundle
try:
asseturl=asseturls[asset]
except:
print "No asset in doc/asset_urls.txt named %s" % asset
sys.exit(2)
# 4b. figure out where to put the downloaded asset bundle
filename=get_asset_filename_from_url(asseturl, options.cache_dir)
# 4c. see if we have it, and if not, get it
if os.path.exists(filename):
_report("Using already downloaded "+filename+" ...")
else:
_report("Downloading "+filename+" ...")
urllib.urlretrieve(asseturl, filename)
# 4d. extract it into the tree
extract_archive_sans_linden(filename, options.install_dir)
# execute main() only if invoked directly:
if __name__ == "__main__":
sys.exit(main(sys.argv))