Hi,

I am currently working on a project which needs a RTSP server running under
android and iOS.
I am able to play an audio file, but the audio is jerky. It's a simple mp3
file.

I didn't find a lot of documentation about live555, and if there is a way
to improve performances.
I don't think android is in fault because I built it in iOS too, and got
some lags too.

I've attached my code, I hope you will see what I am missing. It's based on
the example from live555 for RTSP streaming.

Audric
#include <jni.h>

#include "liveMedia.hh"
#include "GroupsockHelper.hh"

#include "BasicUsageEnvironment.hh"

#include <android/log.h>

#define  LOG_TAG    "live555_wrapper"
#define  ALOG(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)



// To stream using 'ADUs' rather than raw MP3 frames, uncomment the following:
#define STREAM_USING_ADUS 1
// To also reorder ADUs before streaming, uncomment the following:
//#define INTERLEAVE_ADUS 1
// (For more information about ADUs and interleaving,
//  see <http://www.live555.com/rtp-mp3/>)

// To stream using "source-specific multicast" (SSM), uncomment the following:
//#define USE_SSM 1
#ifdef USE_SSM
Boolean const isSSM = True;
#else
Boolean const isSSM = False;
#endif






#ifdef __cplusplus
extern "C" {
#endif




// A structure to hold the state of the current session.
// It is used in the "afterPlaying()" function to clean up the session.
struct sessionState_t {
  FramedSource* source;
  RTPSink* sink;
  RTCPInstance* rtcpInstance;
  Groupsock* rtpGroupsock;
  Groupsock* rtcpGroupsock;
} sessionState;





void afterPlaying(void* /*clientData*/) {
  ALOG("...done streaming\n");

  sessionState.sink->stopPlaying();
  // End this loop by closing the current source:
  Medium::close(sessionState.source);
}





JNIEXPORT jint JNICALL Java_com_athome_service_StreamerService_nativeAddMedia(
                  JNIEnv * jnienv, jobject obj, jlong live555env, 
                  jlong jrtspServer, jstring streamName, jstring fileName) {

  UsageEnvironment* env = (UsageEnvironment*) live555env;
  RTSPServer* rtspServer = (RTSPServer*) jrtspServer;

  if(rtspServer != NULL) {
    const char *nativeStreamName = (jnienv)->GetStringUTFChars(streamName, 0);
    const char *nativefilename = (jnienv)->GetStringUTFChars(fileName, 0);


    ServerMediaSession* sms = ServerMediaSession::createNew(*env, nativeStreamName, nativefilename,
      "Session streamed by \"testMP3Streamer\"", isSSM);
    
    sms->addSubsession(PassiveServerMediaSubsession::createNew(*sessionState.sink, sessionState.rtcpInstance));
    
    rtspServer->addServerMediaSession(sms);

    char* url = rtspServer->rtspURL(sms);
    ALOG("Play this stream using the URL %s", url);
    

    delete[] url;

    


    // Open the file as a 'MP3 file source':
    sessionState.source = MP3FileSource::createNew(*env, nativefilename);
    if (sessionState.source == NULL) {
     ALOG("Unable to open '%s' as a MP3 file source\n" , nativefilename);
      return 1;
    }

  #ifdef STREAM_USING_ADUS
    // Add a filter that converts the source MP3s to ADUs:
    sessionState.source
      = ADUFromMP3Source::createNew(*env, sessionState.source);
    if (sessionState.source == NULL) {
      ALOG("Unable to create a MP3->ADU filter for the source\n");
      return 1;
    }

  /*#ifdef INTERLEAVE_ADUS
    // Add another filter that interleaves the ADUs before packetizing them:
    unsigned char interleaveCycle[] = {0,2,1,3}; // or choose your own order...
    unsigned const interleaveCycleSize
      = (sizeof interleaveCycle)/(sizeof (unsigned char));
    Interleaving interleaving(interleaveCycleSize, interleaveCycle);
    sessionState.source
      = MP3ADUinterleaver::createNew(*env, interleaving, sessionState.source);
    if (sessionState.source == NULL) {
      ALOG("Unable to create an ADU interleaving filter for the source\n");

      return 1;
    }
  #endif
  */#endif

    // Finally, start the streaming:
    ALOG("Beginning streaming...\n");
    sessionState.sink->startPlaying(*sessionState.source, afterPlaying, NULL);

    //(jnienv)->ReleaseStringUTFChars(streamName, nativeStreamName);
    //(jnienv)->ReleaseStringUTFChars(fileName, nativefilename);

    return 0;
  }
  else {
    ALOG("Cannot add media: rtspServer is NULL!...\n");
    return 1;
    }
}



JNIEXPORT jlong JNICALL Java_com_athome_service_StreamerService_nativeInitEnv(JNIEnv * jnienv, jobject obj) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
  return (jlong) env;
}


JNIEXPORT jlong JNICALL Java_com_athome_service_StreamerService_nativeInitRtspServer(JNIEnv * jnienv, jobject obj, jlong live555env) {
  UsageEnvironment* env = (UsageEnvironment*) live555env;
  // Create 'groupsocks' for RTP and RTCP:
  char const* destinationAddressStr
#ifdef USE_SSM
    = "232.255.42.42";
#else
    = "239.255.42.42";
  // Note: This is a multicast address.  If you wish to stream using
  // unicast instead, then replace this string with the unicast address
  // of the (single) destination.  (You may also need to make a similar
  // change to the receiver program.)
#endif
  const unsigned short rtpPortNum = 6666;
  const unsigned short rtcpPortNum = rtpPortNum+1;
  const unsigned char ttl = 1; // low, in case routers don't admin scope

  struct in_addr destinationAddress;
  destinationAddress.s_addr = our_inet_addr(destinationAddressStr);
  const Port rtpPort(rtpPortNum);
  const Port rtcpPort(rtcpPortNum);

  sessionState.rtpGroupsock
    = new Groupsock(*env, destinationAddress, rtpPort, ttl);
  sessionState.rtcpGroupsock
    = new Groupsock(*env, destinationAddress, rtcpPort, ttl);
#ifdef USE_SSM
  sessionState.rtpGroupsock->multicastSendOnly();
  sessionState.rtcpGroupsock->multicastSendOnly();
#endif

  // Create a 'MP3 RTP' sink from the RTP 'groupsock':
#ifdef STREAM_USING_ADUS
  unsigned char rtpPayloadFormat = 96; // A dynamic payload format code
  sessionState.sink
    = MP3ADURTPSink::createNew(*env, sessionState.rtpGroupsock,
             rtpPayloadFormat);
#else
  sessionState.sink
    = MPEG1or2AudioRTPSink::createNew(*env, sessionState.rtpGroupsock);
#endif

  // Create (and start) a 'RTCP instance' for this RTP sink:
  const unsigned estimatedSessionBandwidth = 320; // in kbps; for RTCP b/w share
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen+1];
  gethostname((char*)CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '\0'; // just in case
  sessionState.rtcpInstance
    = RTCPInstance::createNew(*env, sessionState.rtcpGroupsock,
            estimatedSessionBandwidth, CNAME,
            sessionState.sink, NULL /* we're a server */,
            isSSM);


  RTSPServer* rtspServer = RTSPServer::createNew(*env, 55443);
  if (rtspServer == NULL) {
    ALOG("Failed to create RTSP server: %s", env->getResultMsg());
  }
  else
    ALOG("RTSP Server creation: OK: %p", rtspServer);

  return (jlong) rtspServer;
}





JNIEXPORT void JNICALL Java_com_athome_service_StreamerService_nativeStartServe(JNIEnv * jnienv, jobject obj, jlong live555env) {
  UsageEnvironment* env = (UsageEnvironment* ) live555env;
  ALOG("Starting serving rtsp....");
  env->taskScheduler().doEventLoop(); // does not return
}

#ifdef __cplusplus
}
#endif
_______________________________________________
live-devel mailing list
[email protected]
http://lists.live555.com/mailman/listinfo/live-devel

Reply via email to