=== modified file 'download_agents.py'
--- download_agents.py	2016-02-18 17:37:30 +0000
+++ download_agents.py	2016-02-23 17:54:06 +0000
@@ -1,16 +1,18 @@
 #!/usr/bin/env python
 from argparse import ArgumentParser
+import errno
 from hashlib import sha256
 import os
 from shutil import rmtree
 import subprocess
 from tempfile import mkdtemp
 
-from sign_branch import (
+from utility import (
     acquire_log_dir,
     check_log,
     )
 
+
 def main():
     cd_release_juju = os.path.dirname(__file__)
     downloads = os.path.join(cd_release_juju, 'downloads-new-paths')

=== modified file 'release-juju.bash'
--- release-juju.bash	2016-02-18 17:53:20 +0000
+++ release-juju.bash	2016-02-23 17:54:06 +0000
@@ -16,6 +16,11 @@
     $scripts/download_agents.py $TOOLS_BASE/juju-release-tools \
         $STREAMS_OFFICIAL_DEST
     ;;
+  "update-streams" )
+    $scripts/update_streams.py $TOOLS_BASE/cloud-city/juju-qa.s3cfg \
+        $S3_ROOT $STREAMS_OFFICIAL_DEST/juju/tools $STREAMS_VERSION \
+        --timestamp "$STREAMS_TIMESTAMP" --poke $POKE
+    ;;
   "none" )
     exit 0
     ;;

=== modified file 'release.config'
--- release.config	2016-02-20 14:50:21 +0000
+++ release.config	2016-02-23 17:54:06 +0000
@@ -1,5 +1,5 @@
-# One of: "release", "sign-branch", "download-agents", "none"
-OPERATION="release"
+# One of: "release", "sign-branch", "download-agents", "update-streams", "none"
+OPERATION="none"
 POKE='0'
 SIGNING_KEY="6A157DB3"
 
@@ -14,3 +14,11 @@
 UNSIGNED_BRANCH="lp:~juju-qa/+junk/cpc-unsigned"
 REVISION_ID="curtis@hovey.name-20160220035019-mwt82mwv0vvivnpl"
 SIGNED_BRANCH="lp:~canonical-juju-qa/+junk/cpc-signed"
+
+# streams update configuration
+# Preferably the value from index2.json, though this is not currently
+# enforced.
+STREAMS_TIMESTAMP="Wed, 17 Feb 2016 20:44:59 +0000"
+# Use "" when no agents need be downloaded, e.g. proposed -> released
+STREAMS_VERSION="2.0-alpha2"
+S3_ROOT="s3://temp-streams/new-scc"

=== modified file 'sign_branch.py'
--- sign_branch.py	2016-02-18 17:37:30 +0000
+++ sign_branch.py	2016-02-23 17:54:06 +0000
@@ -1,15 +1,15 @@
 #!/usr/bin/env python
 from argparse import ArgumentParser
-import datetime
-import errno
-import logging
 import os
-import shutil
 import subprocess
 import sys
 
 from sign_metadata import sign_metadata
-from utility import temp_dir
+from utility import (
+    acquire_log_dir,
+    check_log,
+    temp_dir,
+    )
 
 
 SIGN_BRANCH_LOG = 'sign-branch.log'
@@ -104,66 +104,12 @@
     return parser.parse_args(argv)
 
 
-def get_log_subdir(root_dir):
-    return os.path.join(root_dir, 'new-tools', 'juju-dist', 'tools')
-
-
-def acquire_log_dir():
-    """Return the path of the log dir, creating if need be."""
-    tools_base = os.environ.get('TOOLS_BASE')
-    if tools_base is None:
-        tools_base = os.getcwd()
-    log_dir = get_log_subdir(tools_base)
-    if not os.path.exists(log_dir):
-        os.makedirs(log_dir)
-    return log_dir
-
-
-def check_log(log_dir, parameters, log_basename=SIGN_BRANCH_LOG):
-    """Check for a previous entry with the same parameters in the log.
-
-    If one exists, return False.  Otherwise, log the parameters.
-
-    This is deliberately done before attempting the operation, to avoid
-    endless retries if the operation fails.
-    """
-    log_filename = os.path.join(log_dir, log_basename)
-    log_entry = ' '.join(parameters + [''])
-    try:
-        log_branch = open(log_filename)
-    except IOError as e:
-        if e.errno != errno.ENOENT:
-            raise
-    else:
-        with log_branch:
-            for line in log_branch:
-                if line.startswith(log_entry):
-                    return False
-    with open(log_filename, 'a') as log_branch:
-        now = datetime.datetime.utcnow().replace(microsecond=0)
-        strdate = now.isoformat(' ')
-        log_branch.write('{}{}\n'.format(log_entry, strdate))
-    official_dest = os.environ.get('STREAMS_OFFICIAL_DEST')
-    if official_dest is None:
-        logging.warning('STREAMS_OFFICIAL_DEST is not defined.')
-    else:
-        parent = get_log_subdir(official_dest)
-        log_dest = os.path.join(parent, SIGN_BRANCH_LOG)
-        try:
-            os.makedirs(parent)
-        except OSError as e:
-            if e.errno != errno.EEXIST:
-                raise
-        shutil.copy2(log_filename, log_dest)
-    return True
-
-
 def main():
     args = parse_args()
     if args.check_log is not None:
         parameters = [args.revision_id, args.unsigned, args.signed,
                       args.signing_key, str(args.check_log)]
-        if not check_log(acquire_log_dir(), parameters):
+        if not check_log(acquire_log_dir(), parameters, SIGN_BRANCH_LOG):
             sys.exit(0)
     with temp_dir() as temp_branch:
         sb = SignBranch(args.unsigned, args.revision_id, args.signed,

=== modified file 'tests/test_sign_branch.py'
--- tests/test_sign_branch.py	2016-02-05 15:23:07 +0000
+++ tests/test_sign_branch.py	2016-02-23 17:54:06 +0000
@@ -10,20 +10,18 @@
     )
 
 from sign_branch import (
-    acquire_log_dir,
-    check_log,
-    get_log_subdir,
     parse_args,
     RunBzr,
     SignBranch,
-    SIGN_BRANCH_LOG,
     )
 from tests.test_sign_metadata import (
     fake_gpg,
     gpg_header,
     gpg_footer,
     )
-from utility import temp_dir
+from utility import (
+    temp_dir,
+    )
 
 
 class TestBzr(TestCase):
@@ -39,69 +37,6 @@
             stderr=None, stdout=None)
 
 
-class TestAcquireLogDir(TestCase):
-
-    def test_cwd(self):
-        with temp_dir() as new_cwd:
-            old_cwd = os.getcwd()
-            os.chdir(new_cwd)
-            try:
-                log_dir = acquire_log_dir()
-                self.assertTrue(os.path.isdir(log_dir))
-            finally:
-                os.chdir(old_cwd)
-        expected = os.path.join(new_cwd, 'new-tools', 'juju-dist', 'tools')
-        self.assertEqual(expected, log_dir)
-
-    def test_tools_base(self):
-        with temp_dir() as tools_base:
-            os.chdir(tools_base)
-            with patch.dict(os.environ, {'TOOLS_BASE': tools_base}):
-                log_dir = acquire_log_dir()
-                self.assertTrue(os.path.isdir(log_dir))
-        expected = os.path.join(tools_base, 'new-tools', 'juju-dist', 'tools')
-        self.assertEqual(expected, log_dir)
-
-
-class TestCheckLog(TestCase):
-
-    def test_write_log(self):
-        with temp_dir() as log_dir:
-            with patch('logging.warning'):
-                check_log(log_dir, ['a', 'b', 'c', 'd'])
-                check_log(log_dir, ['z', 'y', 'x', 'w'])
-            with open(os.path.join(log_dir, 'sign-branch.log')) as log_file:
-                log_lines = log_file.readlines()
-        self.assertEqual(2, len(log_lines))
-        self.assertRegexpMatches(log_lines[0], '^a b c d ')
-        self.assertRegexpMatches(log_lines[1], '^z y x w ')
-
-    def test_false_for_repeat(self):
-        with temp_dir() as log_dir:
-            with patch('logging.warning'):
-                self.assertIs(True, check_log(log_dir, ['a', 'b', 'c', 'd']))
-                self.assertIs(True, check_log(log_dir, ['z', 'b', 'c', 'd']))
-                self.assertIs(False, check_log(log_dir, ['a', 'b', 'c', 'd']))
-                self.assertIs(False, check_log(log_dir, ['z', 'b', 'c', 'd']))
-
-    def test_copies_to_official_dest(self):
-        with temp_dir() as root:
-            tools = os.path.join(root, 'tools')
-            os.mkdir(tools)
-            dest = os.path.join(root, 'dest')
-            with patch.dict(os.environ, {'STREAMS_OFFICIAL_DEST': dest}):
-                check_log(tools, ['a', 'b', 'c', 'd'])
-                check_log(tools, ['z', 'b', 'c', 'd'])
-            tools_filename = os.path.join(tools, SIGN_BRANCH_LOG)
-            with open(tools_filename) as tools_file:
-                tools_content = tools_file.read()
-            dest_filename = os.path.join(get_log_subdir(dest),
-                                         SIGN_BRANCH_LOG)
-            with open(dest_filename) as dest_file:
-                dest_content = dest_file.read()
-        self.assertEqual(tools_content, dest_content)
-
-
 class TestParseArgs(TestCase):
 
     def test_minimum(self):

=== added file 'tests/test_update_streams.py'
--- tests/test_update_streams.py	1970-01-01 00:00:00 +0000
+++ tests/test_update_streams.py	2016-02-23 17:54:06 +0000
@@ -0,0 +1,277 @@
+from argparse import Namespace
+import os
+from textwrap import dedent
+from unittest import TestCase
+
+from mock import (
+    call,
+    patch,
+    )
+
+from update_streams import (
+    ensure_dirs,
+    parse_args,
+    Updater,
+    )
+from utility import temp_dir
+
+
+__metaclass__ = type
+
+
+class TestParseArgs(TestCase):
+
+    def test_default(self):
+        parsed = parse_args(['foo', 'bar', 'baz'])
+        expected = Namespace(
+            config_file='foo', s3_root='bar', dest='baz', version=None,
+            poke='0', timestamp=None)
+        self.assertEqual(expected, parsed)
+
+    def test_version(self):
+        parsed = parse_args(['foo', 'bar', 'baz', 'qux'])
+        self.assertEqual('qux', parsed.version)
+
+    def test_poke(self):
+        parsed = parse_args(['foo', 'bar', 'baz', '--poke', '123'])
+        self.assertEqual('123', parsed.poke)
+
+    def test_timestamp(self):
+        parsed = parse_args(['foo', 'bar', 'baz', '--timestamp',
+                             'Wed, 17 Feb 2016 20:44:59 +0000'])
+        self.assertEqual('Wed, 17 Feb 2016 20:44:59 +0000', parsed.timestamp)
+
+
+def mock_get(command, args):
+    url, dest = args
+    ensure_dirs(os.path.dirname(dest))
+    with open(dest, 'w') as f:
+        f.write(os.path.basename(dest))
+
+
+def path_content(path):
+    with open(os.path.join(*path)) as f:
+        return f.read()
+
+
+class TestUpdater(TestCase):
+
+    def test_from_args(self):
+        updater = Updater.from_args(Namespace(
+            config_file='foo', s3_root='bar', dest='baz', version='qux',
+            poke='123'), 'temp_dir1')
+        self.assertEqual(updater.config_file, 'foo')
+        self.assertEqual(updater.s3_root, 'bar')
+        self.assertEqual(updater.dest, 'baz')
+        self.assertEqual(updater.version, 'qux')
+        self.assertEqual(updater.temp_dir, 'temp_dir1')
+
+    def make_updater(self, download_dir='temp_dir1', dest_dir='dest1'):
+        return Updater('config1', 's3_root1', download_dir, dest_dir,
+                       'version1')
+
+    def test_s3cmd(self):
+        updater = self.make_updater()
+        with patch('subprocess.check_call') as cc_mock:
+            updater.s3cmd('sync', ['foo', 'bar'])
+        cc_mock.assert_called_once_with([
+            's3cmd', 'sync', '--config', 'config1', 'foo', 'bar'])
+
+    def test_get_path_hashes(self):
+        updater = self.make_updater()
+        with patch('subprocess.check_output', autospec=True) as cc_mock:
+            result = updater.get_path_hashes()
+        cc_mock.assert_called_once_with([
+            'sstream-query', 'temp_dir1/streams/v1/index2.json',
+            'version=version1', '--output-format=%(path)s %(sha256)s'])
+        self.assertIs(cc_mock.return_value, result)
+
+    def test_get_path_hashes_none_version(self):
+        updater = self.make_updater()
+        updater.version = None
+        with patch('subprocess.check_output', autospec=True) as cc_mock:
+            result = updater.get_path_hashes()
+        self.assertEqual(0, cc_mock.call_count)
+        self.assertEqual('', result)
+
+    def test_iter_path_hash(self):
+        updater = self.make_updater()
+        output = dedent("""\
+            foo asdf
+            bar sdf
+            baz fasd
+            bar sdf
+            """)
+        with patch.object(updater, 'get_path_hashes',
+                          return_value=output, autospec=True) as gph_mock:
+            result = updater.get_path_hash_dict()
+        gph_mock.assert_called_once_with()
+        self.assertEqual({
+            'bar': 'sdf',
+            'baz': 'fasd',
+            'foo': 'asdf',
+            }, result)
+
+    def test_iter_path_conflicting_hash(self):
+        updater = self.make_updater()
+        output = dedent("""\
+            foo asdf
+            bar sdf
+            baz fasd
+            bar sde
+            """)
+        with patch.object(updater, 'get_path_hashes',
+                          return_value=output, autospec=True) as gph_mock:
+            with self.assertRaisesRegexp(ValueError,
+                                         'Conflicting hashes for "bar"'):
+                updater.get_path_hash_dict()
+        gph_mock.assert_called_once_with()
+
+    def test_download_and_verify(self):
+        with temp_dir() as download_dir:
+            updater = self.make_updater(download_dir=download_dir)
+            with patch.object(updater, 's3cmd', side_effect=mock_get,
+                              autospec=True) as s3cmd_mock:
+                updater.download_and_verify(
+                    'foo', '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f'
+                    '98a5e886266e7ae')
+        s3cmd_mock.assert_called_once_with(
+            'get', ['s3_root1/foo', os.path.join(download_dir, 'foo')])
+
+    def test_download_and_verify_fail(self):
+        with temp_dir() as download_dir:
+            updater = self.make_updater(download_dir=download_dir)
+            with patch.object(updater, 's3cmd', side_effect=mock_get,
+                              autospec=True):
+                with self.assertRaisesRegexp(Exception, 'Hashes differ.'):
+                    updater.download_and_verify(
+                        'foo', 'fc26b46b68ffc68ff99b453c1d30413413422d706483b'
+                        'fa0f98a5e886266e7ae')
+
+    def test_move_into_place(self):
+        with temp_dir() as root:
+            download_dir = os.path.join(root, 'download')
+            download_streams = os.path.join(download_dir, 'streams', 'v1')
+            os.makedirs(download_streams)
+            index_path = os.path.join(download_streams, 'index2.mason')
+            with open(index_path, 'w') as f:
+                f.write('Masonry is fun!')
+            download_agents = os.path.join(download_dir, 'agent/2.5')
+            os.makedirs(download_agents)
+            agent_basename = '1.25-series-arch.tbd'
+            agent_path = os.path.join(download_agents, agent_basename)
+            with open(agent_path, 'w') as f:
+                f.write('Agency is essential!')
+            dest_dir = os.path.join(root, 'dest')
+            updater = self.make_updater(download_dir=download_dir,
+                                        dest_dir=dest_dir)
+            updater.move_into_place('agent')
+            dest_agent = os.path.join(dest_dir, 'agent', '2.5',
+                                      agent_basename)
+            with open(dest_agent) as f:
+                self.assertEqual('Agency is essential!', f.read())
+            dest_index = os.path.join(dest_dir, 'streams', 'v1',
+                                      'index2.mason')
+            self.assertFalse(os.path.exists(dest_index))
+            updater.move_into_place('streams/v1')
+            with open(dest_index) as f:
+                self.assertEqual('Masonry is fun!', f.read())
+
+    def test_move_into_place_replace_existing_file(self):
+        with temp_dir() as root:
+            download_dir = os.path.join(root, 'download')
+            download_streams = os.path.join(download_dir, 'streams', 'v1')
+            os.makedirs(download_streams)
+            index_path = os.path.join(download_streams, 'index2.mason')
+            with open(index_path, 'w') as f:
+                f.write('Masonry is fun!')
+            dest_dir = os.path.join(root, 'dest')
+            updater = self.make_updater(download_dir=download_dir,
+                                        dest_dir=dest_dir)
+            dest_index = os.path.join(dest_dir, 'streams', 'v1',
+                                      'index2.mason')
+            os.makedirs(os.path.dirname(dest_index))
+            with open(dest_index, 'w') as f:
+                f.write('old contents')
+            updater.move_into_place('streams/v1')
+            with open(dest_index) as f:
+                self.assertEqual('Masonry is fun!', f.read())
+
+    def test_s3_download(self):
+        path_hashes = dedent("""\
+            agent/foo/bar asdf
+            agent/foo/bar2 asdf
+            agent/baz/qux sdf
+            """)
+        with temp_dir() as root:
+            download_dir = os.path.join(root, 'download')
+            os.mkdir(download_dir)
+            dest_dir = os.path.join(root, 'dest')
+            updater = self.make_updater(download_dir=download_dir,
+                                        dest_dir=dest_dir)
+
+            def s3cmd_effect(command, args):
+                parent = os.path.join(updater.temp_streams, 'v1')
+                ensure_dirs(parent)
+                with open(os.path.join(parent, 'index.mason'), 'w') as f:
+                    f.write('index!')
+
+            def dv_effect(path, agent_hash):
+                mock_get('get', [None, os.path.join(download_dir, path)])
+
+            s3cmd = patch.object(updater, 's3cmd', side_effect=s3cmd_effect)
+            dl = patch.object(updater, 'download_and_verify',
+                              side_effect=dv_effect, autospec=True)
+            gph = patch.object(updater, 'get_path_hashes',
+                               return_value=path_hashes)
+            with s3cmd as s3cmd_mock, dl as dv_mock, gph as gph_mock:
+                updater.s3_download()
+            s3cmd_mock.assert_called_once_with(
+                'sync', ['s3_root1/streams/', updater.temp_streams])
+            gph_mock.assert_called_once_with()
+            self.assertEqual([
+                call('agent/baz/qux', 'sdf'),
+                call('agent/foo/bar', 'asdf'),
+                call('agent/foo/bar2', 'asdf'),
+                ], dv_mock.mock_calls)
+            self.assertEqual(
+                'qux', path_content([updater.dest, 'agent', 'baz', 'qux']))
+            self.assertEqual(
+                'bar', path_content([updater.dest, 'agent', 'foo', 'bar']))
+            self.assertEqual(
+                'bar2', path_content([updater.dest, 'agent', 'foo', 'bar2']))
+            self.assertEqual(
+                'index!',
+                path_content([updater.dest, 'streams', 'v1', 'index.mason']))
+
+    def test_s3_download_version_none(self):
+        with temp_dir() as root:
+            download_dir = os.path.join(root, 'download')
+            os.mkdir(download_dir)
+            dest_dir = os.path.join(root, 'dest')
+            updater = self.make_updater(download_dir=download_dir,
+                                        dest_dir=dest_dir)
+            updater.version = None
+
+            def s3cmd_effect(command, args):
+                parent = os.path.join(updater.temp_streams, 'v1')
+                ensure_dirs(parent)
+                with open(os.path.join(parent, 'index.mason'), 'w') as f:
+                    f.write('index!')
+
+            def dv_effect(path, agent_hash):
+                mock_get('get', [None, os.path.join(download_dir, path)])
+
+            s3cmd = patch.object(updater, 's3cmd', side_effect=s3cmd_effect)
+            dl = patch.object(updater, 'download_and_verify',
+                              side_effect=dv_effect, autospec=True)
+            with s3cmd as s3cmd_mock, dl as dv_mock:
+                updater.s3_download()
+            s3cmd_mock.assert_called_once_with(
+                'sync', ['s3_root1/streams/', updater.temp_streams])
+            self.assertEqual([], dv_mock.mock_calls)
+            self.assertIs(False,
+                          os.path.exists(os.path.join(updater.dest, 'agent')))
+            self.assertEqual(
+                'index!',
+                path_content([updater.dest, 'streams', 'v1', 'index.mason']))

=== added file 'tests/test_utility.py'
--- tests/test_utility.py	1970-01-01 00:00:00 +0000
+++ tests/test_utility.py	2016-02-23 17:54:06 +0000
@@ -0,0 +1,81 @@
+import os
+from unittest import TestCase
+
+from mock import patch
+
+from utility import (
+    acquire_log_dir,
+    check_log,
+    get_log_subdir,
+    temp_dir,
+    )
+
+
+EXAMPLE_LOG = 'example.log'
+
+
+class TestAcquireLogDir(TestCase):
+
+    def test_cwd(self):
+        with temp_dir() as new_cwd:
+            old_cwd = os.getcwd()
+            os.chdir(new_cwd)
+            try:
+                log_dir = acquire_log_dir()
+                self.assertTrue(os.path.isdir(log_dir))
+            finally:
+                os.chdir(old_cwd)
+        expected = os.path.join(new_cwd, 'new-tools', 'juju-dist', 'tools')
+        self.assertEqual(expected, log_dir)
+
+    def test_tools_base(self):
+        with temp_dir() as tools_base:
+            os.chdir(tools_base)
+            with patch.dict(os.environ, {'TOOLS_BASE': tools_base}):
+                log_dir = acquire_log_dir()
+                self.assertTrue(os.path.isdir(log_dir))
+        expected = os.path.join(tools_base, 'new-tools', 'juju-dist', 'tools')
+        self.assertEqual(expected, log_dir)
+
+
+class TestCheckLog(TestCase):
+
+    def test_write_log(self):
+        with temp_dir() as log_dir:
+            with patch('logging.warning'):
+                check_log(log_dir, ['a', 'b', 'c', 'd'], EXAMPLE_LOG)
+                check_log(log_dir, ['z', 'y', 'x', 'w'], EXAMPLE_LOG)
+            with open(os.path.join(log_dir, EXAMPLE_LOG)) as log_file:
+                log_lines = log_file.readlines()
+        self.assertEqual(2, len(log_lines))
+        self.assertRegexpMatches(log_lines[0], '^a b c d ')
+        self.assertRegexpMatches(log_lines[1], '^z y x w ')
+
+    def test_false_for_repeat(self):
+        with temp_dir() as log_dir:
+            with patch('logging.warning'):
+                self.assertIs(True, check_log(log_dir, ['a', 'b', 'c', 'd'],
+                                              EXAMPLE_LOG))
+                self.assertIs(True, check_log(log_dir, ['z', 'b', 'c', 'd'],
+                                              EXAMPLE_LOG))
+                self.assertIs(False, check_log(log_dir, ['a', 'b', 'c', 'd'],
+                                               EXAMPLE_LOG))
+                self.assertIs(False, check_log(log_dir, ['z', 'b', 'c', 'd'],
+                                               EXAMPLE_LOG))
+
+    def test_copies_to_official_dest(self):
+        with temp_dir() as root:
+            tools = os.path.join(root, 'tools')
+            os.mkdir(tools)
+            dest = os.path.join(root, 'dest')
+            with patch.dict(os.environ, {'STREAMS_OFFICIAL_DEST': dest}):
+                check_log(tools, ['a', 'b', 'c', 'd'], EXAMPLE_LOG)
+                check_log(tools, ['z', 'b', 'c', 'd'], EXAMPLE_LOG)
+            tools_filename = os.path.join(tools, EXAMPLE_LOG)
+            with open(tools_filename) as tools_file:
+                tools_content = tools_file.read()
+            dest_filename = os.path.join(get_log_subdir(dest),
+                                         EXAMPLE_LOG)
+            with open(dest_filename) as dest_file:
+                dest_content = dest_file.read()
+        self.assertEqual(tools_content, dest_content)

=== added file 'update_streams.py'
--- update_streams.py	1970-01-01 00:00:00 +0000
+++ update_streams.py	2016-02-23 17:54:06 +0000
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+from argparse import ArgumentParser
+import errno
+from hashlib import sha256
+import os
+from shutil import (
+    move,
+    )
+import subprocess
+
+from utility import (
+    acquire_log_dir,
+    check_log,
+    temp_dir,
+    )
+
+
+__metaclass__ = type
+
+
+UPDATE_STREAMS_LOG = 'update-streams.log'
+
+
+def parse_args(argv=None):
+    parser = ArgumentParser()
+    parser.add_argument('config_file')
+    parser.add_argument('s3_root')
+    parser.add_argument('dest')
+    parser.add_argument('version', nargs='?')
+    parser.add_argument('--timestamp')
+    parser.add_argument('--poke', default='0')
+    return parser.parse_args(argv)
+
+
+def ensure_dirs(path):
+    try:
+        os.makedirs(path)
+    except OSError as e:
+        if e.errno != errno.EEXIST:
+            raise
+
+
+class Updater:
+
+    def __init__(self, config_file, s3_root, temp_dir, dest, version):
+        self.config_file = config_file
+        self.s3_root = s3_root
+        self.temp_dir = temp_dir
+        self.dest = dest
+        self.version = version
+
+    @classmethod
+    def from_args(cls, args, temp_dir):
+        return cls(args.config_file, args.s3_root, temp_dir, args.dest,
+                   args.version)
+
+    def s3cmd(self, action, args):
+        full_args = ['s3cmd', action, '--config', self.config_file] + args
+        subprocess.check_call(full_args)
+
+    def get_path_hashes(self):
+        if self.version is None:
+            return ''
+        ver_filter = 'version={}'.format(self.version)
+        out_format = '--output-format=%(path)s %(sha256)s'
+        index2 = os.path.join(self.temp_streams, 'v1', 'index2.json')
+        return subprocess.check_output(
+            ['sstream-query', index2, ver_filter, out_format])
+
+    def get_path_hash_dict(self):
+        output = self.get_path_hashes()
+        path_hashes = {}
+        for line in output.splitlines():
+            path, path_hash = line.rsplit(' ', 1)
+            path_hashes.setdefault(path, path_hash)
+            if path_hashes[path] != path_hash:
+                raise ValueError('Conflicting hashes for "{}"'.format(path))
+        return path_hashes
+
+    def s3_url(self, path):
+        return '{}/{}'.format(self.s3_root, path)
+
+    @property
+    def temp_streams(self):
+        return os.path.join(self.temp_dir, 'streams')
+
+    def download_and_verify(self, path, agent_hash):
+        temp_path = os.path.join(self.temp_dir, path)
+        self.s3cmd('get', [self.s3_url(path), temp_path])
+        digest = calculate_sha256(temp_path)
+        if digest != agent_hash:
+            raise Exception(
+                'Hashes differ.  Expected: {}'
+                ' Actual: {}'.format(agent_hash, digest))
+
+    def move_into_place(self, parent):
+        dest_path = os.path.join(self.dest, parent)
+        temp_path = os.path.join(self.temp_dir, parent)
+        ensure_dirs(dest_path)
+        for subfile in os.listdir(temp_path):
+            move(os.path.join(temp_path, subfile),
+                 os.path.join(dest_path, subfile))
+
+    def s3_download(self):
+        os.mkdir(self.temp_streams)
+        self.s3cmd('sync', [self.s3_url('streams/'), self.temp_streams])
+        path_hashes = self.get_path_hash_dict()
+        for path, agent_hash in sorted(path_hashes.items()):
+            self.download_and_verify(path, agent_hash)
+        if path_hashes != {}:
+            self.move_into_place('agent')
+        self.move_into_place('streams/v1')
+
+
+def calculate_sha256(path):
+    hasher = sha256()
+    with open(path) as f:
+        while True:
+            result = f.read(1000000)
+            hasher.update(result)
+            if result == '':
+                break
+    return hasher.hexdigest()
+
+
+def main():
+    args = parse_args()
+    if args.timestamp is not None:
+        parameters = [args.timestamp, args.poke]
+        if not check_log(acquire_log_dir(), parameters, UPDATE_STREAMS_LOG):
+            return
+    with temp_dir() as download_dir:
+        updater = Updater.from_args(args, download_dir)
+        updater.s3_download()
+
+
+if __name__ == '__main__':
+    main()

=== modified file 'utility.py'
--- utility.py	2016-02-02 17:48:35 +0000
+++ utility.py	2016-02-23 17:54:06 +0000
@@ -1,4 +1,8 @@
 from contextlib import contextmanager
+import datetime
+import errno
+import logging
+import os
 import shutil
 from tempfile import mkdtemp
 
@@ -10,3 +14,57 @@
         yield dirname
     finally:
         shutil.rmtree(dirname)
+
+
+def acquire_log_dir():
+    """Return the path of the log dir, creating if need be."""
+    tools_base = os.environ.get('TOOLS_BASE')
+    if tools_base is None:
+        tools_base = os.getcwd()
+    log_dir = get_log_subdir(tools_base)
+    if not os.path.exists(log_dir):
+        os.makedirs(log_dir)
+    return log_dir
+
+
+def get_log_subdir(root_dir):
+    return os.path.join(root_dir, 'new-tools', 'juju-dist', 'tools')
+
+
+def check_log(log_dir, parameters, log_basename):
+    """Check for a previous entry with the same parameters in the log.
+
+    If one exists, return False.  Otherwise, log the parameters.
+
+    This should be done before attempting the operation, to avoid endless
+    retries if the operation fails.
+    """
+    log_filename = os.path.join(log_dir, log_basename)
+    log_entry = ' '.join(parameters + [''])
+    try:
+        log_branch = open(log_filename)
+    except IOError as e:
+        if e.errno != errno.ENOENT:
+            raise
+    else:
+        with log_branch:
+            for line in log_branch:
+                if line.startswith(log_entry):
+                    return False
+    with open(log_filename, 'a') as log_branch:
+        now = datetime.datetime.utcnow().replace(microsecond=0)
+        strdate = now.isoformat(' ')
+        log_branch.write('{}{}\n'.format(log_entry, strdate))
+    official_dest = os.environ.get('STREAMS_OFFICIAL_DEST')
+    if official_dest is None:
+        logging.warning('STREAMS_OFFICIAL_DEST is not defined.')
+    else:
+        parent = get_log_subdir(official_dest)
+        log_dest = os.path.join(parent, log_basename)
+        try:
+            os.makedirs(parent)
+        except OSError as e:
+            if e.errno != errno.EEXIST:
+                raise
+        shutil.copy2(log_filename, log_dest)
+    return True

