Hello Team, With below "extractDataBytes" directive to file sink, I am unable to upload a file greater than 1 KB size. Where as below "fileUpload" directive to file sink is successful in uploading 700MB file! with same akka-server configuration. What is wrong with extractDataBytes code? Any specific configuration for "extractDataBytes" is required? Please assist.
*extractDataBytes:* *curl -k -i -X PUT --header token -d @myfile700MB.tar https://abc/uploadContent <https://abc/uploadContent>* put { implicit val system = ActorSystem("RepositoryService") implicit val materializer = ActorMaterializer() implicit val executionContext: ExecutionContextExecutor = system.dispatchers.lookup("akka.stream.default-blocking-io-dispatcher") withoutSizeLimit { (extractDataBytes & extractMaterializer) { (bytes,mat) => val uploadTmp = File.createTempFile("uploadTMP", ".txt") logger.info(" Temp absolute path is : " + Paths.get(uploadTmp.getAbsolutePath) + " " + Paths.get(uploadTmp.getAbsolutePath).toString) try { import scala.concurrent.duration._ import akka.stream.scaladsl.FileIO val action = bytes.runWith(FileIO.toPath(uploadTmp.toPath) .withAttributes(ActorAttributes.dispatcher("akka.stream.default-blocking-io-dispatcher")))(mat).map { case ior if ior.wasSuccessful => { val end = System.currentTimeMillis logger.info(StatusCodes.OK + s"${ior.count}" + "bytes written") complete(StatusCodes.OK, "Successfully written to FILE. ") } case ior => complete(StatusCodes.EnhanceYourCalm, ior.getError.toString) } Await.result(action, 30.seconds) } catch { case ex: Throwable => { logger.info("Exception in writing to file: " + ex.getMessage) complete(StatusCodes.OK, "Exception in writing to file: ") } } } } *fileUpload* *curl -k -i -X PUT --header token--form "[email protected]" https://abc/uploadContent <https://abc/uploadContent>* put { val uploadFile = File.createTempFile("uploadFile", ".txt") (extractRequestContext & extractMaterializer) { (ctx,mat) => implicit val materializer = ctx.materializer implicit val ec = ctx.executionContext fileUpload("csv") { case (metadata, byteSource) => try { import scala.concurrent.duration._ import akka.stream.scaladsl.FileIO val action = byteSource.runWith(FileIO.toPath(uploadFile.toPath))(mat).map { case ior if ior.wasSuccessful => { val end = System.currentTimeMillis logger.info(StatusCodes.OK + s"${ior.count}" + "bytes written") complete(StatusCodes.OK, "Successfully written to FILE. ") } case ior => complete(StatusCodes.EnhanceYourCalm, ior.getError.toString) } Await.result(action, 30.seconds) } catch { case ex: Throwable => { logger.info("Exception in writing to file: " + ex.getMessage) //stream.write(s"Exception in writing to file: ${ex.getMessage}".getBytes) complete(StatusCodes.OK, "Exception in writing to file: ") } } } } } -- >>>>>>>>>> Read the docs: http://akka.io/docs/ >>>>>>>>>> Check the FAQ: >>>>>>>>>> http://doc.akka.io/docs/akka/current/additional/faq.html >>>>>>>>>> Search the archives: https://groups.google.com/group/akka-user --- You received this message because you are subscribed to the Google Groups "Akka User List" group. To unsubscribe from this group and stop receiving emails from it, send an email to [email protected]. To post to this group, send email to [email protected]. Visit this group at https://groups.google.com/group/akka-user. For more options, visit https://groups.google.com/d/optout.
