Hello all,

I want to have 2 servers running varnish in parallel so that if one fails the other still contains all cacheable data and the backend servers won't be overloaded.
Could someone check to see if I'm on the right track ?

This is how I figure it should be working.
I don't know how large 'weight' can be, but with varnish caching > 90% that impact would be affordable.
Regards,
Martin Boer


director via_other_varnish random {
  .retries = 5;
  {
     .backend = other_server;
     .weight = 9;
  }
  # use the regular backends if the other varnish instance fails.
  {
     .backend = backend_1;
     .weight = 1;
   }
 {
     .backend = backend_2;
     .weight = 1;
   }
 {
     .backend = backend_3;
     .weight = 1;
   }
}

director via_backends random {
   {
     .backend = backend_1;
     .weight = 1;
   }
 {
     .backend = backend_2;
     .weight = 1;
   }
 {
     .backend = backend_3;
     .weight = 1;
   }
}


sub vcl_recv {
  if ( resp.http.X-through-varnish > 0 ) {
     # other varnish forwarded the request already
     # so forward to backends
     set req.backend = via_backends;
     remove resp.http.X-through-varnish;
  } else {
     # try the other varnish
     resp.http.X-through-varnish = 1;
     set req.backend = via_other_varnish;
  }
  ..


_______________________________________________
varnish-misc mailing list
[email protected]
http://lists.varnish-cache.org/mailman/listinfo/varnish-misc

Reply via email to