Giuseppe Lavagetto has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/175425

Change subject: mediawiki: allow toggling experimental hhvm settings
......................................................................

mediawiki: allow toggling experimental hhvm settings

We allow adding specific keys to the hhvm.ini fastcgi settings depending
on a configuration flag; this will allow to have groups of hosts with
such features enabled without needing to copy the standard settings
around. We also add a deep_merge() function that allows recursively
merging of arrays.

Change-Id: I5d70d3aaddded11ea73f9842ef9beb241a074678
Signed-off-by: Giuseppe Lavagetto <glavage...@wikimedia.org>
---
M modules/mediawiki/manifests/hhvm.pp
M modules/wmflib/README.md
A modules/wmflib/lib/puppet/parser/functions/deep_merge.rb
3 files changed, 94 insertions(+), 11 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/operations/puppet 
refs/changes/25/175425/1

diff --git a/modules/mediawiki/manifests/hhvm.pp 
b/modules/mediawiki/manifests/hhvm.pp
index 6225a64..0b9c751 100644
--- a/modules/mediawiki/manifests/hhvm.pp
+++ b/modules/mediawiki/manifests/hhvm.pp
@@ -2,7 +2,10 @@
 #
 # Configures HHVM to serve MediaWiki in FastCGI mode.
 #
-class mediawiki::hhvm {
+# [*experimental_features*]
+#   Boolean parameter. Can be used on single hosts to enable
+#   experimental features.
+class mediawiki::hhvm($experimental_features = false) {
     requires_ubuntu('>= trusty')
 
     include ::hhvm::admin
@@ -19,21 +22,38 @@
         floor(to_bytes($::memorytotal) / to_bytes('120M')),
         $::processorcount*4)
 
+
+    $fcgi_standard_settings = {
+        error_handling => {
+            call_user_handler_on_fatals => true,
+        },
+        server         => {
+            source_root           => '/srv/mediawiki/docroot',
+            error_document500     => '/srv/mediawiki/hhvm-fatal-error.php',
+            error_document404     => '/srv/mediawiki/w/404.php',
+            request_init_document => 
'/srv/mediawiki/wmf-config/HHVMRequestInit.php',
+            thread_count          => $max_threads,
+        },
+    }
+
+    $experimental_settings = {}
+
+    if ($experimental_features) {
+        $fcgi_settings = deep_merge(
+            $fcgi_standard_settings,
+            $experimental_settings)
+    }
+    else {
+        $fcgi_settings = $fcgi_standard_settings
+    }
+
+
     class { '::hhvm':
         user          => 'apache',
         group         => 'apache',
         fcgi_settings => {
             hhvm => {
-                error_handling => {
-                    call_user_handler_on_fatals => true,
-                },
-                server         => {
-                    source_root           => '/srv/mediawiki/docroot',
-                    error_document500     => 
'/srv/mediawiki/hhvm-fatal-error.php',
-                    error_document404     => '/srv/mediawiki/w/404.php',
-                    request_init_document => 
'/srv/mediawiki/wmf-config/HHVMRequestInit.php',
-                    thread_count          => $max_threads,
-                },
+                $fcgi_settings
             },
         },
     }
diff --git a/modules/wmflib/README.md b/modules/wmflib/README.md
index 882a17a..144bce8 100644
--- a/modules/wmflib/README.md
+++ b/modules/wmflib/README.md
@@ -14,6 +14,23 @@
     $languages = [ 'finnish', 'french', 'greek', 'hebrew' ]
     $packages = apply_format('texlive-lang-%s', $languages)
 
+## deep_merge
+
+`deep_merge( hash $a, hash $b)`
+
+Merges two or more hashes together and returns the resulting hash.
+When there is a duplicate key, the key in the rightmost hash will
+"win"; however if the key is an hash, it will be merged itself as
+well.
+
+### Examples
+    $hash1 = {'one' => 1, 'two', => { 'numeral' => 2}}
+    $hash2 = {'two' => {'spanish' => 'dos'}, 'three', => 'tres'}
+    $merged_hash = deep_merge($hash1, $hash2)
+    # The resulting hash is equivalent to:
+    # $merged_hash =  {'one' => 1, 'two' => {'numeral' => 2,
+    #  'spanish' => 'dos'}, 'three' => 'tres'}
+
 
 ## ensure_directory
 
diff --git a/modules/wmflib/lib/puppet/parser/functions/deep_merge.rb 
b/modules/wmflib/lib/puppet/parser/functions/deep_merge.rb
new file mode 100644
index 0000000..df5d7a2
--- /dev/null
+++ b/modules/wmflib/lib/puppet/parser/functions/deep_merge.rb
@@ -0,0 +1,46 @@
+class Hash
+  def deep_merge(hash)
+    hash.keys.each do |key|
+      if hash[key].is_a? Hash and self[key].is_a? Hash
+        self[key].deep_merge(hash[key])
+        next
+      end
+      self[key] = hash[key]
+    end
+  end
+end
+
+module Puppet::Parser::Functions
+  newfunction(:deep_merge, :type => :rvalue, :doc => <<-'ENDHEREDOC') do |args|
+    Merges two or more hashes together and returns the resulting hash.
+
+    For example:
+
+        $hash1 = {'one' => 1, 'two', => 2}
+        $hash2 = {'two' => 'dos', 'three', => 'tres'}
+        $merged_hash = deep_merge($hash1, $hash2)
+        # The resulting hash is equivalent to:
+        # $merged_hash =  {'one' => 1, 'two' => 'dos', 'three' => 'tres'}
+
+    When there is a duplicate key, the key in the rightmost hash will "win"; 
however if the key is an hash,
+    it will be merged itself as well.
+
+    ENDHEREDOC
+
+    if args.length < 2
+      raise Puppet::ParseError, ("merge(): wrong number of arguments 
(#{args.length}; must be at least 2)")
+    end
+
+    # The hash we accumulate into
+    accumulator = Hash.new
+    # Merge into the accumulator hash
+    args.each do |arg|
+      unless arg.is_a?(Hash)
+        raise Puppet::ParseError, "merge: unexpected argument type 
#{arg.class}, only expects hash arguments"
+      end
+      accumulator.deep_merge(arg)
+    end
+    # Return the fully merged hash
+    accumulator
+  end
+end

-- 
To view, visit https://gerrit.wikimedia.org/r/175425
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I5d70d3aaddded11ea73f9842ef9beb241a074678
Gerrit-PatchSet: 1
Gerrit-Project: operations/puppet
Gerrit-Branch: production
Gerrit-Owner: Giuseppe Lavagetto <glavage...@wikimedia.org>

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to