Chris,
Take a look at this module as a starting place. It converts SRT to onTextData, so it will have to be modified to use TTML file instead.
package test;
import java.nio.charset.Charset;
import java.util.*;
import com.wowza.wms.amf.*;
import com.wowza.wms.application.IApplicationInstance;
import com.wowza.wms.livestreamrecord.model.*;
import com.wowza.wms.media.h264.H264SEIMessages;
import com.wowza.wms.media.model.*;
import com.wowza.wms.module.ModuleBase;
import com.wowza.wms.stream.*;
import com.wowza.wms.timedtext.model.*;
// Module created by brian and scott to
// 1. read SRT data and inject as CEA608 data into a live stream created by stream demo publisher
// 2. Create a VOD asset with CEA608 data in itand create
public class ModulePublishSRTAsOnTextData extends ModuleBase
{
public class MySEIListener implements IMediaStreamH264SEINotify
{
TimedTextEntry currCaption = null;
public void onVideoH264Packet(IMediaStream stream, AMFPacket packet, H264SEIMessages seiMessages)
{
String text = null;
boolean sendEvent = false;
long currTime = packet.getAbsTimecode();
if (!hasSrtFile())
return;
TimedTextEntry caption = getCaption(currTime);
// set text to current active caption
if (caption != null && caption != currCaption)
{
text = caption.getText();
sendEvent = true;
}
// if we have an event, send it
if (sendEvent)
{
sendTextDataMessage(stream, text);
this.currCaption = caption;
getLogger().info("------- packet Time="+currTime+" "+text);
}
}
}
public class MyMediaStreamListener implements IMediaStreamActionNotify3
{
private Map<String, ILiveStreamRecord> recorders = new HashMap<String, ILiveStreamRecord>();
public void onPublish(IMediaStream stream, String streamName, boolean isRecord, boolean isAppend)
{
IApplicationInstance appInstance = stream.getStreams().getAppInstance();
if (!stream.isTranscodeResult())
{
// read the .srt file for this stream if it exits
List<TimedTextEntry> list = simpleSRTParse(appInstance, stream);
setTimedTextList(list);
if (hasSrtFile())
startRecording(stream, streamName);
}
}
public void onUnPublish(IMediaStream stream, String streamName, boolean isRecord, boolean isAppend)
{
// clear the list
setTimedTextList(null);
stopRecording(stream, streamName);
}
public void onMetaData(IMediaStream stream, AMFPacket metaDataPacket)
{
}
public void onPauseRaw(IMediaStream stream, boolean isPause, double location)
{
}
public void onPause(IMediaStream stream, boolean isPause, double location)
{
}
public void onPlay(IMediaStream stream, String streamName, double playStart, double playLen, int playReset)
{
}
public void onSeek(IMediaStream stream, double location)
{
}
public void onStop(IMediaStream stream)
{
}
public void onCodecInfoVideo(IMediaStream stream, MediaCodecInfoVideo codecInfoVideo)
{
}
public void onCodecInfoAudio(IMediaStream stream, MediaCodecInfoAudio codecInfoAudio)
{
}
private void startRecording(IMediaStream stream, String streamName)
{
//create a livestreamrecorder instance to create .mp4 files
ILiveStreamRecord recorder = new LiveStreamRecorderMP4();
recorder.setRecordData(false);
recorder.setStartOnKeyFrame(true);
recorder.setVersionFile(true);
// add it to the recorders list
synchronized (recorders)
{
ILiveStreamRecord prevRecorder = recorders.get(streamName);
if (prevRecorder != null)
prevRecorder.stopRecording();
recorders.put(streamName, recorder);
}
// start recording, create 1 minute segments using default content path
//System.out.println("--- startRecordingSegmentByDuration for 60 minutes");
//recorder.startRecordingSegmentByDuration(stream, null, null, 60*60*1000);
// start recording, create 1MB segments using default content path
//System.out.println("--- startRecordingSegmentBySize for 1MB");
//recorder.startRecordingSegmentBySize(stream, null, null, 1024*1024);
// start recording, create new segment at 1:00am each day.
//System.out.println("--- startRecordingSegmentBySchedule every "0 1 * * * *");
//recorder.startRecordingSegmentBySchedule(stream, null, null, "0 1 * * * *");
// start recording, using the default content path, do not append (i.e. overwrite if file exists)
getLogger().info("--- startRecording");
String filePath = "C:\\temp\\"+streamName+"-cc.mp4";
recorder.startRecording(stream, filePath, false);
//recorder.startRecording(stream, false);
// log where the recording is being written
getLogger().info("startRecording[" + stream.getContextStr() + "]: new Recording started:" + recorder.getFilePath());
}
private void stopRecording(IMediaStream stream, String streamName)
{
ILiveStreamRecord recorder = null;
synchronized (recorders)
{
recorder = recorders.remove(streamName);
}
if (recorder != null)
{
// grab the current path to the recorded file
String filepath = recorder.getFilePath();
// stop recording
recorder.stopRecording();
getLogger().info("stopRecording[" + stream.getContextStr() + "]: File Closed:" + filepath);
}
else
{
getLogger().info("stoprecording[" + stream.getContextStr() + "]: streamName:" + streamName + " stream recorder not found");
}
}
}
// local vars
private List<TimedTextEntry> timedTextList = null;
private boolean charsetTest = false;
private final Charset UTF8_CHARSET = Charset.forName("UTF-8");
private boolean foundSrt = false;
// app startup processing
public void onAppStart(IApplicationInstance appInstance)
{
getLogger().info("ModulePublishSRTAsOnTextData.onAppStart["+appInstance.getContextStr()+"]");
String onTextDataFile = "${com.wowza.wms.context.VHostConfigHome}/content/ontextdata.txt";
//publishInterval = appInstance.getProperties().getPropertyInt("publishOnTextDataPublishInterval", publishInterval);
//onTextDataFile = appInstance.getProperties().getPropertyStr("publishOnTextDataFile", onTextDataFile);
charsetTest = appInstance.getProperties().getPropertyBoolean("publishOnTextCharsetTest", charsetTest);
Map<String, String> pathMap = new HashMap<String, String>();
pathMap.put("com.wowza.wms.context.VHost", appInstance.getVHost().getName());
pathMap.put("com.wowza.wms.context.VHostConfigHome", appInstance.getVHost().getHomePath());
pathMap.put("com.wowza.wms.context.Application", appInstance.getApplication().getName());
pathMap.put("com.wowza.wms.context.ApplicationInstance", appInstance.getName());
}
// hookup stream listeners
public void onStreamCreate(IMediaStream stream)
{
stream.addClientListener(new MyMediaStreamListener());
stream.addVideoH264SEIListener(new MySEIListener());
}
// save the timedTextList
private void setTimedTextList(List<TimedTextEntry> list)
{
this.timedTextList = list;
}
// find and parse .srt file for the specified stream
private List<TimedTextEntry> simpleSRTParse(IApplicationInstance appInstance, IMediaStream stream)
{
List<TimedTextEntry> list = null;
String extension = ITimedTextConstants.TIMED_TEXT_READER_EXTENSION_SRT;
String fileName = stream.getName()+"."+extension;
String contentPath = stream.getStreamFileForRead().getParent(); // get stream content path
// create and configure a MediaReaderItem for use with TimedTextReaderFactory
MediaReaderItem mri = new MediaReaderItem(ITimedTextConstants.TIMED_TEXT_READER_EXTENSION_SRT, ITimedTextConstants.DEFAULT_TIMED_TEXT_READER_SRT);
mri.setFileExtension(ITimedTextConstants.TIMED_TEXT_READER_EXTENSION_SRT);
// create a TimedTextReader for the .srt file associated with this stream
ITimedTextReader reader = TimedTextReaderFactory.getInstance(appInstance, mri, contentPath, fileName, extension);
if (reader != null)
{
reader.open();
TimedTextRepresentation tt = reader.getTimedText();
reader.close();
if (tt != null)
{
TimedTextLanguageRendition rend = tt.getLanguageRendition(Locale.getDefault().getISO3Language());
// get the list of TimedTextItems
list = rend.getTimedText();
this.foundSrt = true;
}
else
{
getLogger().info("--- No srt file found for "+contentPath+"\\"+stream.getName());
}
}
//dumpTimedTextList(list);
return list;
}
// send OnTextData event
private void sendTextDataMessage(IMediaStream stream, String text)
{
try
{
AMFDataObj amfData = new AMFDataObj();
amfData.put("text", new AMFDataItem(text));
amfData.put("language", new AMFDataItem("eng"));
amfData.put("trackid", new AMFDataItem(99));
stream.sendDirect("onTextData", amfData);
}
catch(Exception e)
{
getLogger().error("ModulePublishSRTAsOnTextData#PublishThread.sendTextDataMessage["+stream.getContextStr()+"]: "+e.toString());
e.printStackTrace();
}
}
// get the caption active during the time passed in
private TimedTextEntry getCaption(long time)
{
TimedTextEntry entry = null;
Iterator<TimedTextEntry> itr = this.timedTextList.iterator();
while(itr.hasNext())
{
TimedTextEntry tte = itr.next();
if (tte.getStartTime() <= time && time < tte.getEndTime())
{
entry = tte;
break;
}
}
return entry;
}
private boolean hasSrtFile()
{
return this.foundSrt;
}
// dump the list of TimedTextEntries created from .srt file
private void dumpTimedTextList(List<TimedTextEntry> list)
{
Iterator<TimedTextEntry> itr = list.iterator();
getLogger().info("--- TimedTextList ----");
while(itr.hasNext())
{
TimedTextEntry tte = itr.next();
getLogger().info("s:"+tte.getStartTime()+", "+tte.getText()+", e:"+tte.getEndTime());
}
getLogger().info("--- ------------ ----");
}
}
Richard