Github user vanzin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/4435#discussion_r29090268
  
    --- Diff: 
core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala ---
    @@ -14,22 +14,170 @@
      * See the License for the specific language governing permissions and
      * limitations under the License.
      */
    -
     package org.apache.spark.deploy.history
     
    -import javax.servlet.http.HttpServletRequest
    -
    -import scala.collection.mutable
    +import java.io.{File, FileInputStream, FileWriter, IOException}
    +import java.net.{HttpURLConnection, URL}
    +import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
     
    -import org.apache.hadoop.fs.Path
    -import org.mockito.Mockito.{when}
    -import org.scalatest.FunSuite
    -import org.scalatest.Matchers
    +import org.apache.commons.io.{FileUtils, IOUtils}
    +import org.mockito.Mockito.when
    +import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
     import org.scalatest.mock.MockitoSugar
     
    +import org.apache.spark.{JsonTestUtils, SecurityManager, SparkConf}
     import org.apache.spark.ui.SparkUI
     
    -class HistoryServerSuite extends FunSuite with Matchers with MockitoSugar {
    +class HistoryServerSuite extends FunSuite with BeforeAndAfter with 
Matchers with MockitoSugar
    +  with JsonTestUtils {
    +
    +  private val logDir = new File("src/test/resources/spark-events")
    +  private val expRoot = new 
File("src/test/resources/HistoryServerExpectations/")
    +
    +  private var provider: FsHistoryProvider = null
    +  private var server: HistoryServer = null
    +  private var port: Int = -1
    +
    +  def init(): Unit = {
    +    val conf = new SparkConf()
    +      .set("spark.history.fs.logDirectory", logDir.getAbsolutePath)
    +      .set("spark.history.fs.updateInterval", "0")
    +      .set("spark.testing", "true")
    +    provider = new FsHistoryProvider(conf)
    +    provider.checkForLogs()
    +    val securityManager = new SecurityManager(conf)
    +
    +    server = new HistoryServer(conf, provider, securityManager, 18080)
    +    server.initialize()
    +    server.bind()
    +    port = server.boundPort
    +  }
    +  def stop(): Unit = {
    +    server.stop()
    +  }
    +
    +  before {
    +    init()
    +  }
    +
    +  after{
    +    stop()
    +  }
    +
    +  val cases = Seq(
    +    "application list json" -> "applications",
    +    "completed app list json" -> "applications?status=completed",
    +    "running app list json" -> "applications?status=running",
    +    "minDate app list json" -> "applications?minDate=2015-02-10",
    +    "maxDate app list json" -> "applications?maxDate=2015-02-10",
    +    "maxDate2 app list json" -> 
"applications?maxDate=2015-02-03T10:42:40.000CST",
    +    "one app json" -> "applications/local-1422981780767",
    +    "job list json" -> "applications/local-1422981780767/jobs",
    +    "one job json" -> "applications/local-1422981780767/jobs/0",
    +    "succeeded job list json" -> 
"applications/local-1422981780767/jobs?status=succeeded",
    +    "succeeded&failed job list json" ->
    +      
"applications/local-1422981780767/jobs?status=succeeded&status=failed",
    +    "executor list json" -> "applications/local-1422981780767/executors",
    +    "stage list json" -> "applications/local-1422981780767/stages",
    +    "complete stage list json" -> 
"applications/local-1422981780767/stages?status=complete",
    +    "failed stage list json" -> 
"applications/local-1422981780767/stages?status=failed",
    +    "one stage json" -> "applications/local-1422981780767/stages/1",
    +    "one stage attempt json" -> 
"applications/local-1422981780767/stages/1/0",
    +
    +    "stage task summary" -> 
"applications/local-1427397477963/stages/20/0/taskSummary",
    +    "stage task summary w/ custom quantiles" ->
    +      
"applications/local-1427397477963/stages/20/0/taskSummary?quantiles=0.01,0.5,0.99",
    +
    +    "stage task list" -> 
"applications/local-1427397477963/stages/20/0/taskList",
    +    "stage task list w/ offset & length" ->
    +      
"applications/local-1427397477963/stages/20/0/taskList?offset=10&length=50",
    +    "stage task list w/ sortBy" ->
    +      
"applications/local-1427397477963/stages/20/0/taskList?sortBy=DecreasingRuntime",
    +    "stage task list w/ sortBy short names: -runtime" ->
    +      
"applications/local-1427397477963/stages/20/0/taskList?sortBy=-runtime",
    +    "stage task list w/ sortBy short names: runtime" ->
    +      
"applications/local-1427397477963/stages/20/0/taskList?sortBy=runtime",
    +
    +    "stage list with accumulable json" -> 
"applications/local-1426533911241/stages",
    +    "stage with accumulable json" -> 
"applications/local-1426533911241/stages/0/0",
    +    "rdd list storage json" -> 
"applications/local-1422981780767/storage/rdd",
    +    "one rdd storage json" -> 
"applications/local-1422981780767/storage/rdd/0"
    +    // TODO multi-attempt stages
    +  )
    +
    +  // run a bunch of characterization tests -- just verify the behavior is 
the same as what is saved
    +  // in the test resource folder
    +  cases.foreach { case (name, path) =>
    +      test(name) {
    +        val (code, jsonOpt, errOpt) = getContentAndCode(path)
    +        code should be (HttpServletResponse.SC_OK)
    +        jsonOpt should be ('defined)
    +        errOpt should be (None)
    +        val json = jsonOpt.get
    +        val exp = IOUtils.toString(new FileInputStream(
    +          new File(expRoot, path + "/json_expectation")))
    +        // compare the ASTs so formatting differences don't cause failures
    +        import org.json4s._
    +        import org.json4s.jackson.JsonMethods._
    +        val jsonAst = parse(json)
    +        val expAst = parse(exp)
    +        assertValidDataInJson(jsonAst, expAst)
    +      }
    +  }
    +
    +  test("security") {
    +    val conf = new SparkConf()
    +      .set("spark.history.fs.logDirectory", logDir.getAbsolutePath)
    +      .set("spark.history.fs.updateInterval", "0")
    +      .set("spark.acls.enable", "true")
    +      .set("spark.ui.view.acls", "user1")
    +    val securityManager = new SecurityManager(conf)
    +
    +    val securePort = port + 1
    +    val secureServer = new HistoryServer(conf, provider, securityManager, 
securePort)
    +    secureServer.initialize()
    +    secureServer.bind()
    +
    +    securityManager.checkUIViewPermissions("user1") should be (true)
    +    securityManager.checkUIViewPermissions("user2") should be (false)
    +
    +    try {
    +
    --- End diff --
    
    Lots of noisy blank lines here.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to