The Media Mixer provides a set of APIs that allows any developer to create RTC multimedia applications or services.

JSR 309 APIs

The Media Mixer server provides a JSR 309 driver which is available here. The work is still under development, but most of the objects are functional now.

 

XMLRPC API

If you need a more fine grained control of the Media Mixer functionalities, this APIs exposed the inner objects needed to manipulate the RTC flows directly. The API can be accessed via XMLRPC over HTTP commands and is used internally by the JSR 309 driver. A java wrapper is also available, so the Media Mixer can be used remotelly by any java application.

The main concept of this API are the joinable objects, which present a source of media which can be attached to other objects to build the desired media flow.

joinable

Endpoint

endpoint

//Endpoint management
int EndpointCreate(std::wstring name,bool audioSupported,bool videoSupported,bool textSupported);
int EndpointDelete(int endpointId);
int EndpointSetLocalCryptoSDES(int id,MediaFrame::Type media,const char *suite,const char* key);
int EndpointSetRemoteCryptoSDES(int id,MediaFrame::Type media,const char *suite,const char* key);
int EndpointSetLocalSTUNCredentials(int id,MediaFrame::Type media,const char *username,const char* pwd);
int EndpointSetRemoteSTUNCredentials(int id,MediaFrame::Type media,const char *username,const char* pwd);
int EndpointSetRTPProperties(int id,MediaFrame::Type media,const Properties& properties);
//Endpoint media functionality
int EndpointStartSending(int endpointId,MediaFrame::Type media,char *ip,int port,RTPMap& rtpMap);
int EndpointStopSending(int endpointId,MediaFrame::Type media);
int EndpointStartReceiving(int endpointId,MediaFrame::Type media,RTPMap& rtpMap);
int EndpointStopReceiving(int endpointId,MediaFrame::Type media);
int EndpointRequestUpdate(int endpointId,MediaFrame::Type media);
//Attach intput to
int EndpointAttachToPlayer(int endpointId,int playerId,MediaFrame::Type media);
int EndpointAttachToEndpoint(int endpointId,int sourceId,MediaFrame::Type media);
int EndpointAttachToAudioMixerPort(int endpointId,int mixerId,int portId);
int EndpointDettach(int endpointId,MediaFrame::Type media);
int EndpointAttachToVideoMixerPort(int endpointId,int mixerId,int portId);
int EndpointAttachToVideoTranscoder(int endpointId,int transcoderId);

Player

player

//Player management
int PlayerCreate(std::wstring name);
int PlayerDelete(int playerId);
//Player functionality
int PlayerOpen(int playerId,const char* filename);
int PlayerPlay(int playerId);
int PlayerSeek(int playerId,QWORD time);
int PlayerStop(int playerId);
int PlayerClose(int playerId);

//Events
virtual void onEndOfFile(Player *player,void* param);

Recorder

recorder

//Recorder management
int RecorderCreate(std::wstring tag);
int RecorderDelete(int recorderId);
//Recorder functionality
int RecorderRecord(int recorderId,const char* filename);
int RecorderStop(int recorderId);

//Join other objects
int RecorderAttachToEndpoint(int recorderId,int endpointId,MediaFrame::Type media);
int RecorderAttachToAudioMixerPort(int recorderId,int mixerId,int portId);
int RecorderAttachToVideoMixerPort(int recorderId,int mixerId,int portId);
int RecorderDettach(int recorderId,MediaFrame::Type media);

Video Mixing

VideoMixerPort

//Video Mixer management
int VideoMixerCreate(std::wstring tag);
int VideoMixerDelete(int mixerId);
//Video mixer port management
int VideoMixerPortCreate(int mixerId,std::wstring tag,int mosiacId);
int VideoMixerPortSetCodec(int mixerId,int portId,VideoCodec::Type codec,int size,int fps,int bitrate,int intraPeriod);
int VideoMixerPortDelete(int mixerId,int porId);
int VideoMixerPortAttachToEndpoint(int mixerId,int portId,int endpointId);
int VideoMixerPortAttachToPlayer(int mixerId,int portId,int playerId);
int VideoMixerPortDettach(int mixerId,int portId);
//Video mixer mosaic management
int VideoMixerMosaicCreate(int mixerId,Mosaic::Type comp,int size);
int VideoMixerMosaicDelete(int mixerId,int portId);
int VideoMixerMosaicSetSlot(int mixerId,int mosaicId,int num,int portId);
int VideoMixerMosaicSetCompositionType(int mixerId,int mosaicId,Mosaic::Type comp,int size);
int VideoMixerMosaicSetOverlayPNG(int mixerId,int mosaicId,const char* overlay);
int VideoMixerMosaicResetSetOverlay(int mixerId,int mosaicId);
int VideoMixerMosaicAddPort(int mixerId,int mosaicId,int portId);
int VideoMixerMosaicRemovePort(int mixerId,int mosaicId,int portId);

Audio Mixing

AudioMixerPort

//AudioMixer management
int AudioMixerCreate(std::wstring name);
int AudioMixerDelete(int mixerId);
//AudioMixer port management
int AudioMixerPortCreate(int mixerId,AudioCodec::Type codec);
int AudioMixerPortDelete(int mixerId,int portId);
//Filters type: 0=pre 1=post
int AudioMixerPortAddFilter(int mixerId,int portId,int type,...) 
int AudioMixerPortUpdatedFilter(int mixerId,int portId,...)
int AudioMixerPortDeleteFilter(int mixerId,int portId,int filterId)
int AudioMixerPortClearFilters(int mixerId,int portId)
//Port Attach  to
int AudioMixerPortAttachToEndpoint(int mixerId,int portId,int endpointId);
int AudioMixerPortAttachToPlayer(int mixerId,int portId,int playerId);

Video Transcoder

videotranscoder

int VideoTranscoderCreate(std::wstring tag);
int VideoTranscoderDelete(int transcoderId);
int VideoTranscoderSetCodec(int transcoderId,VideoCodec::Type codec,int size,int fps,int bitrate,int intraPeriod);
int VideoTranscoderAttachToEndpoint(int transcoderId,int endpointId);
int VideoTranscoderDettach(int transcoderId);

Audio Transcoder

audiotranscoder

Example

Below is an example of a media bridge between RTP and SRTP with video transcoding (for example between H264 and VP8):

example

       //Create session
        idSession= client.MediaSessionCreate(tag, 0);        

        //Create endpoint A
        idEndpointA = client.EndpointCreate(idSession, "Endpoint A", audioSupported, videoSupported, false); 
        idTranscoderA = client.VideoTranscoderCreate(idSession, "vtx A");
        //Transcoder for A
    (1) client.EndpointAttachToVideoTranscoder(idSession, idEndpointA , idTranscoderA );

        //Transcoder for B
        idEndpointB = client.EndpointCreate(idSession,  "Endpoint B", audioSupported, videoSupported, false);
        idTranscoderB = client.VideoTranscoderCreate(idSession, "vtx B");
        //Link video
    (2) client.EndpointAttachToVideoTranscoder(idSession, idEndpointB , idTranscoderB );

        //Attach video transcoders
    (3) client.VideoTranscoderAttachToEndpoint(id, idTranscoderA , idEndpointB );
    (4) client.VideoTranscoderAttachToEndpoint(id, idTranscoderB , idEndpointA );

        //Attach direct audio
    (5) client.EndpointAttachToEndpoint(id, idEndpointA, idEndpointB, Codecs.MediaType.AUDIO);
    (6) client.EndpointAttachToEndpoint(id, idEndpointB, idEndpointA, Codecs.MediaType.AUDIO);