Ottomata has uploaded a new change for review. (
https://gerrit.wikimedia.org/r/404789 )
Change subject: No-op for refinery job camus to ease future analytics -> jumbo
kafka
......................................................................
No-op for refinery job camus to ease future analytics -> jumbo kafka
Bug: T175461
Change-Id: I65385d2d6970aa6971436e6d0aebde678fbc5648
---
M modules/profile/manifests/analytics/refinery/job/camus.pp
1 file changed, 14 insertions(+), 3 deletions(-)
git pull ssh://gerrit.wikimedia.org:29418/operations/puppet
refs/changes/89/404789/1
diff --git a/modules/profile/manifests/analytics/refinery/job/camus.pp
b/modules/profile/manifests/analytics/refinery/job/camus.pp
index a7f923d..8f13a55 100644
--- a/modules/profile/manifests/analytics/refinery/job/camus.pp
+++ b/modules/profile/manifests/analytics/refinery/job/camus.pp
@@ -8,12 +8,17 @@
# to look up brokers from which Camus will import data. Default: analytics
#
class profile::analytics::refinery::job::camus(
- $kafka_cluster_name =
hiera('profile::analytics::refinery::job::camus::kafka_cluster_name',
'analytics')
+ $kafka_cluster_name =
hiera('profile::analytics::refinery::job::camus::kafka_cluster_name', 'jumbo')
) {
require ::profile::hadoop::common
require ::profile::analytics::refinery
- $kafka_config = kafka_config($kafka_cluster_name)
+ $kafka_config = kafka_config($kafka_cluster_name)
+ $kafka_brokers = suffix($kafka_config['brokers']['array'], ':9092')
+
+ # Temporary while we migrate camus jobs over to new kafka cluster.
+ $kafka_config_analytics = kafka_config('analytics')
+ $kafka_brokers_analytics =
suffix($kafka_config_analytics['brokers']['array'], ':9092'),
# Make all uses of camus::job set default kafka_brokers and camus_jar.
# If you build a new camus or refinery, and you want to use it, you'll
@@ -22,7 +27,7 @@
# the camus::job declaration.
Camus::Job {
script => "export
PYTHONPATH=\${PYTHONPATH}:${profile::analytics::refinery::path}/python &&
${profile::analytics::refinery::path}/bin/camus",
- kafka_brokers => suffix($kafka_config['brokers']['array'],
':9092'),
+ kafka_brokers => $kafka_brokers,
camus_jar =>
"${profile::analytics::refinery::path}/artifacts/org/wikimedia/analytics/camus-wmf/camus-wmf-0.1.0-wmf7.jar",
check_jar =>
"${profile::analytics::refinery::path}/artifacts/org/wikimedia/analytics/refinery/refinery-camus-0.0.35.jar",
template_variables => {
@@ -30,23 +35,27 @@
}
}
+
# Import webrequest_* topics into /wmf/data/raw/webrequest
# every 10 minutes, check runs and flag fully imported hours.
camus::job { 'webrequest':
check => true,
minute => '*/10',
+ kafka_brokers => $kafka_brokers_analytics,
}
# Import eventlogging_* topics into /wmf/data/raw/eventlogging
# once every hour.
camus::job { 'eventlogging':
minute => '5',
+ kafka_brokers => $kafka_brokers_analytics,
}
# Import eventbus topics into /wmf/data/raw/eventbus
# once every hour.
camus::job { 'eventbus':
minute => '5',
+ kafka_brokers => $kafka_brokers_analytics,
}
# Import mediawiki_* topics into /wmf/data/raw/mediawiki
@@ -57,11 +66,13 @@
# refinery-camus contains some custom decoder classes which
# are needed to import Avro binary data.
libjars =>
"${profile::analytics::refinery::path}/artifacts/org/wikimedia/analytics/refinery/refinery-camus-0.0.28.jar",
+ kafka_brokers => $kafka_brokers_analytics,
}
# Import eventbus mediawiki.job queue topics into
/wmf/data/raw/mediawiki_job
# once every hour.
camus::job { 'mediawiki_job':
minute => '10',
+ kafka_brokers => $kafka_brokers_analytics,
}
}
--
To view, visit https://gerrit.wikimedia.org/r/404789
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: newchange
Gerrit-Change-Id: I65385d2d6970aa6971436e6d0aebde678fbc5648
Gerrit-PatchSet: 1
Gerrit-Project: operations/puppet
Gerrit-Branch: production
Gerrit-Owner: Ottomata <[email protected]>
_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits