I think I've got this working, but I just want to confirm I'm doing this
correctly or if there's a better way to do this.

 

Basic premise is, want to be able to compile ( once ), then parallel the
push/install.  Also want to use roles to be able to selectively decide
which environment to push to at runtime.

 

Also, is there a better way to test directory existence that I'm
missing?

 

Would be run with something like:

 

fab deploy:1.73 -roles prd

 

from __future__ import with_statement

from fabric.api import *

from fabric.contrib.console import confirm

 

env.roledefs = {

    'prd' : ['host1','host2'],

    'qa'   : ['qahost1','qahost2'],

    'dev'   : ['devhost1']

}

 

def _package():

    # eventually will be a compile, but testing with tar.

    local("tar -cvf %s.tar %s" % (env.tarball, env.codedir))

    

@parallel

def _push():

    # smarter way to test directory existence?

    with settings(warn_only=True):

        result = run("ls /opt/ldad/proserv")

    if result.failed and not confirm("Directory does not exist.
Continue?"):

        abort("Aborting at user request")

    put(env.tarball, "/opt/ldad/proserv")

 

@task

@runs_once

def deploy(ver):

   env.pkgver = ver

    env.workdir = "/Users/steve/work/ldadscript"

    env.codedir = "%s/LdadIntegration-%s" % (env.workdir, env.pkgver)

    env.tarball = "%s.tar" % (env.codedir)

    _package()

    execute(_push)

_______________________________________________
Fab-user mailing list
[email protected]
https://lists.nongnu.org/mailman/listinfo/fab-user

Reply via email to