Ubuntu Pastebin

Paste from Cory at Wed, 11 May 2016 16:43:25 +0000

Download as text
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
from charms.reactive import when, when_not, when_any, set_state, remove_state
from charms.layer.apache_bigtop_base import Bigtop, get_fqdn
from charmhelpers.core import host, hookenv


@when('puppet.available')
@when_not('spark.installed')
def install_spark():
    bigtop = Bigtop()
    try:
        hookenv.status_set('maintenance', 'installing spark')
        bigtop.install(
            hosts={
                'spark': get_fqdn(),
            },
            roles=[
                'spark-worker',
                'spark-master',
                'spark-history-server',
                'spark-client',
            ],
        )
        hookenv.status_set('active', 'ready')
        set_state('spark.installed')
        set_state('spark.no-hadoop')
    except host.ChecksumError:
        hookenv.status_set('blocked',
                           'Unable to fetch BigTop release (checksum error)')


@when('spark.installed')
@when('hadoop.installed')
@when_any('spark.no-hadoop', 'config.changed.execution_mode')
def install_spark_hadoop(hadoop):
    bigtop = Bigtop()
    yarn_mode = hookenv.config('execution_mode').startswith('yarn')
    bigtop.render_site_yaml(
        hosts={
            'spark': get_fqdn(),
            'namenode': hadoop.namenodes(),
            'resourcemanager': hadoop.resourcemanagers(),
        },
        roles=[
            'spark-worker',
            'spark-master',
            'spark-history-server',
            'spark-client',
        ],
        overrides={
            'spark::common::master_url': 'yarn' if yarn_mode else
                                         'spark://{}'.format(get_fqdn()),
        },
    )
    bigtop.trigger_puppet()
    remove_state('spark.no-hadoop')
Download as text