org.jcodec.player.Player2 Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of jcodec Show documentation
Show all versions of jcodec Show documentation
Pure Java implementation of video/audio codecs and formats
package org.jcodec.player;
import static org.jcodec.common.model.RationalLarge.R;
import static org.jcodec.player.util.ThreadUtil.joinForSure;
import static org.jcodec.player.util.ThreadUtil.sleepNoShit;
import static org.jcodec.player.util.ThreadUtil.surePut;
import static org.jcodec.player.util.ThreadUtil.take;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.sound.sampled.AudioFormat;
import org.jcodec.common.model.AudioFrame;
import org.jcodec.common.model.Frame;
import org.jcodec.common.model.Picture;
import org.jcodec.common.model.RationalLarge;
import org.jcodec.common.model.Size;
import org.jcodec.common.model.TapeTimecode;
import org.jcodec.common.tools.Debug;
import org.jcodec.player.filters.AudioOut;
import org.jcodec.player.filters.MediaInfo;
import org.jcodec.player.filters.MediaInfo.AudioInfo;
import org.jcodec.player.filters.VideoOutput;
import org.jcodec.player.filters.VideoSource;
import org.jcodec.player.filters.audio.AudioSource;
import org.jcodec.scale.ColorUtil;
/**
* This class is part of JCodec ( www.jcodec.org ) This software is distributed
* under FreeBSD License
*
* Media player engine
*
* @author The JCodec project
*
*/
public class Player2 {
public enum Status {
STOPPED, PAUSED, BUFFERING, PLAYING
}
private static final int VIDEO_QUEUE_SIZE = 50;
private static final int AUDIO_QUEUE_SIZE = 50;
public static final int PACKETS_IN_BUFFER = 8;
public static int TIMESCALE = 96000;
private VideoSource videoSource;
private AudioSource audioSource;
private VideoOutput vo;
private AudioOut ao;
AtomicBoolean pause = new AtomicBoolean();
private long wallClock;
private long maxDecodedVideoPts;
private long lastAudio;
private List video = Collections.synchronizedList(new ArrayList());
private BlockingQueue videoDrain = new LinkedBlockingQueue();
private volatile boolean stop;
private BlockingQueue audio = new LinkedBlockingQueue();
private BlockingQueue audioDrain = new LinkedBlockingQueue();
private AudioFormat af;
private Picture dst;
private Object audioSeekLock = new Object();
private Object pausedEvent = new Object();
private MediaInfo.VideoInfo mi;
private int audioPacketSize;
private List listeners = new ArrayList();
private static final ExecutorService executor = Executors.newSingleThreadExecutor();
private volatile boolean resume;
// private volatile boolean decodingLocked;
private Thread resumeThread;
private Thread videoPlaybackThread;
private Thread audioDecodeThread;
private Thread audioPlaybackThread;
private Thread videoDecodeThread;
public Player2(VideoSource videoSource, AudioSource audioSource, VideoOutput vo, AudioOut ao) throws IOException {
this.videoSource = videoSource;
this.audioSource = audioSource;
this.vo = vo;
this.ao = ao;
initPlayer();
}
private void initPlayer() throws IOException {
Debug.println("Initializing player");
pause.set(true);
wallClock = 0;
videoDrain.clear();
audioDrain.clear();
video.clear();
audio.clear();
AudioInfo ai = audioSource.getAudioInfo();
af = ai.getFormat();
audioPacketSize = ai.getFramesPerPacket();
ao.open(af, audioPacketSize * PACKETS_IN_BUFFER);
mi = videoSource.getMediaInfo();
startAudioDecode();
lastAudio = ao.playedMs();
startAudioPlayback();
startVideoDecode();
for (int i = 0; i < VIDEO_QUEUE_SIZE; i++) {
surePut(videoDrain, createTarget());
}
for (int i = 0; i < AUDIO_QUEUE_SIZE; i++) {
surePut(audioDrain, ByteBuffer.allocate(af.getFrameSize() * (audioPacketSize + 10)));
}
startVideoPlayback();
startResumeThread();
}
/**
* Resumes player playback as soon as possible
*/
public void play() {
resume = true;
notifyStatus();
}
/**
* Pauses playback
*
* Waits until player actually stops
*
* @return Wheather playback was already paused
*/
public boolean pause() {
resume = false;
return pauseWait();
}
public boolean pauseWait() {
try {
if (!pause.getAndSet(true)) {
ao.pause();
synchronized (pausedEvent) {
sureWait(pausedEvent);
}
return false;
}
return true;
} finally {
notifyStatus();
}
}
private void startResumeThread() {
resumeThread = new Thread() {
public void run() {
while (!stop) {
if (resume && pause.get()) {
if (audio.size() >= AUDIO_QUEUE_SIZE / 2 && video.size() >= VIDEO_QUEUE_SIZE / 2) {
pause.set(false);
ao.resume();
notifyStatus();
}
}
sleepNoShit(500000);
}
Debug.println("Resume thread done");
}
};
resumeThread.setDaemon(true);
resumeThread.start();
}
private void startVideoPlayback() {
videoPlaybackThread = new Thread() {
public void run() {
Debug.println("Starting video playback");
try {
playVideo();
} catch (IOException e) {
e.printStackTrace();
}
Debug.println("Playing video done");
}
};
videoPlaybackThread.start();
}
Frame[] EMPTY = new Frame[0];
private void playVideo() throws IOException {
while (!stop) {
if (!pause.get()) {
long newAudio = ao.playedMs();
wallClock += newAudio - lastAudio;
lastAudio = newAudio;
Frame selected = selectFrame((wallClock * 96) / 1000, maxDecodedVideoPts);
if (selected == null) {
if (video.size() > 0)
sleepNoShit(2000000);
else
pauseNoWait();
} else {
show(selected);
surePut(videoDrain, selected.getPic().getData());
}
} else {
synchronized (pausedEvent) {
pausedEvent.notifyAll();
}
sleepNoShit(200000);
}
}
}
private Frame selectFrame(long wallClock, long lastDecodedVideoPts) {
List remove = new ArrayList();
Frame selected = null;
for (Frame frame : video.toArray(EMPTY)) {
long framePts = (frame.getPts().getNum() * TIMESCALE) / frame.getPts().getDen();
long frameDuration = (frame.getDuration().getNum() * TIMESCALE) / frame.getDuration().getDen();
if (framePts < wallClock)
remove.add(frame);
else if (framePts + frameDuration >= wallClock)
remove.add(frame);
else {
selected = frame;
break;
}
}
Iterator it = remove.iterator();
while (it.hasNext()) {
Frame frame = it.next();
if (frame.getPts().getNum() <= lastDecodedVideoPts) {
it.remove();
}
}
video.removeAll(remove);
for (Frame frame : remove) {
surePut(videoDrain, frame.getPic().getData());
}
if (selected != null)
video.remove(selected);
return selected;
}
private int[][] createTarget() {
Size dim = mi.getDim();
int sz = 2 * dim.getWidth() * dim.getHeight();
return new int[][] { new int[sz], new int[sz], new int[sz] };
}
private void startVideoDecode() {
videoDecodeThread = new Thread() {
public void run() {
Debug.println("Starting video decode");
try {
decodeVideo();
} catch (IOException e) {
e.printStackTrace();
}
Debug.println("Decoding video done");
}
};
videoDecodeThread.start();
}
private void startAudioDecode() {
audioDecodeThread = new Thread() {
public void run() {
Debug.println("Starting audio decode");
try {
decodeAudio();
} catch (IOException e) {
e.printStackTrace();
}
Debug.println("Decoding audio done");
}
};
audioDecodeThread.start();
}
private void decodeAudio() throws IOException {
while (!stop) {
AudioFrame frame;
synchronized (audioSeekLock) {
ByteBuffer buf = take(audioDrain, 5);
buf.rewind();
if (buf == null)
continue;
frame = audioSource.getFrame(buf);
if (frame != null) {
seekVideoIfNeeded(frame);
surePut(audio, frame);
} else {
surePut(audioDrain, buf);
}
}
if (frame == null)
sleepNoShit(500000);
}
}
private void seekVideoIfNeeded(AudioFrame frame) throws IOException {
final long apts = (frame.getPts() * TIMESCALE) / frame.getTimescale();
Frame[] copy = video.toArray(EMPTY);
long minPts = Long.MAX_VALUE, maxPts = Long.MIN_VALUE;
for (Frame frame2 : copy) {
long vpts = (frame2.getPts().getNum() * TIMESCALE) / frame2.getPts().getDen();
if (vpts < minPts)
minPts = vpts;
else if (vpts > maxPts)
maxPts = vpts;
}
if (apts > maxPts + TIMESCALE / 4 || apts < minPts - TIMESCALE / 4) {
seekVideo(frame.getPts(), frame.getTimescale());
}
}
private void seekVideo(long pts, long timescale) throws IOException {
if (videoSource.drySeek(R(pts, timescale))) {
maxDecodedVideoPts = 0;
videoSource.seek(R(pts, timescale));
} else
throw new RuntimeException("Handle this");
}
private void decodeVideo() throws IOException {
while (!stop) {
decodeJustOneFrame();
}
}
private void decodeJustOneFrame() throws IOException {
int[][] buf = take(videoDrain, 5);
if (buf == null)
return;
Frame frame = videoSource.decode(buf);
if (frame != null) {
long pts = frame.getPts().getNum();
if (pts > maxDecodedVideoPts)
maxDecodedVideoPts = pts;
video.add(frame);
} else {
surePut(videoDrain, buf);
sleepNoShit(500000);
}
}
private void startAudioPlayback() {
audioPlaybackThread = new Thread() {
public void run() {
sleepNoShit(10000000);
playAudio();
}
};
audioPlaybackThread.start();
}
private void playAudio() {
long predPts = Long.MIN_VALUE;
Debug.println("Starting audio playback");
ByteBuffer pkt = null;
while (!stop) {
if (!pause.get()) {
if (pkt == null) {
AudioFrame frame = audio.poll();
if (frame == null) {
Debug.println("Audio queue empty");
pauseNoWait();
continue;
}
pkt = frame.getData();
long pts = (frame.getPts() * TIMESCALE) / frame.getTimescale();
if (Math.abs(predPts - pts) > TIMESCALE / 100) {
ao.drain();
lastAudio = ao.playedMs();
wallClock = (1000000 * frame.getPts()) / frame.getTimescale();
}
predPts = (frame.getPts() * TIMESCALE) / frame.getTimescale() + (frame.getDuration() * TIMESCALE)
/ frame.getTimescale();
}
ao.write(pkt);
if (pkt.remaining() == 0) {
surePut(audioDrain, pkt);
pkt = null;
}
} else {
sleepNoShit(500000);
}
}
Debug.println("Playing autio done");
}
private void pauseNoWait() {
try {
if (!pause.getAndSet(true)) {
ao.pause();
Debug.println("On pause: " + ao.playedMs());
}
} finally {
notifyStatus();
}
}
private void show(Frame frame) {
Picture src = frame.getPic();
notifyTime(frame);
if (src.getColor() != vo.getColorSpace()) {
if (dst == null || dst.getWidth() != src.getWidth() || dst.getHeight() != src.getHeight())
dst = Picture.create(src.getWidth(), src.getHeight(), vo.getColorSpace());
ColorUtil.getTransform(src.getColor(), vo.getColorSpace()).transform(src, dst);
vo.show(dst, frame.getPixelAspect());
} else {
vo.show(src, frame.getPixelAspect());
}
}
public void seekAsync(final RationalLarge where) {
executor.submit(new Runnable() {
public void run() {
try {
seek(where.getNum(), where.getDen());
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
private void seek(long clk, long timescale) throws IOException {
if (clk < 0 || !audioSource.drySeek(R(clk, timescale)))
return;
System.out.println("Seek requested: " + clk + "/" + timescale);
synchronized (audioSeekLock) {
System.out.println("Seek executing: " + clk + "/" + timescale);
// decodingLocked = true;
// drainAudio();
audioSource.seek(R(clk, timescale));
ao.flush();
// decodingLocked = false;
}
}
// private void drainVideo() {
// synchronized (video) {
// Frame[] copy = video.toArray(EMPTY);
// video.clear();
// for (Frame frame : copy) {
// surePut(videoDrain, frame.getPic().getData());
// }
// }
// }
// private void drainAudio() {
// List list = new LinkedList();
// audio.drainTo(list);
// for (Buffer frame : list) {
// audioDrain.add(frame.buffer);
// }
// }
private void sureWait(Object monitor) {
try {
pausedEvent.wait();
} catch (InterruptedException e) {
}
}
public RationalLarge getPos() {
return new RationalLarge((wallClock * 96) / 1000, TIMESCALE);
}
public void destroy() {
stop = true;
joinForSure(videoDecodeThread);
joinForSure(audioDecodeThread);
joinForSure(videoPlaybackThread);
joinForSure(audioPlaybackThread);
joinForSure(resumeThread);
video = null;
audio = null;
videoDrain = null;
audioDrain = null;
Debug.println("Player destroyed");
}
private void notifyStatus() {
final Status status = pause.get() ? (resume ? Status.BUFFERING : Status.PAUSED) : Status.PLAYING;
executor.execute(new Runnable() {
public void run() {
for (Listener listener : listeners) {
try {
listener.statusChanged(status);
} catch (Throwable t) {
t.printStackTrace();
}
}
}
});
}
private void notifyTime(final Frame frame) {
executor.execute(new Runnable() {
public void run() {
for (Listener listener : listeners) {
try {
listener.timeChanged(frame.getPts(), frame.getFrameNo(), frame.getTapeTimecode());
} catch (Throwable t) {
t.printStackTrace();
}
}
}
});
}
public static interface Listener {
void timeChanged(RationalLarge pts, int frameNo, TapeTimecode tapeTimecode);
void statusChanged(Status status);
}
public void addListener(Listener listener) {
listeners.add(listener);
}
public VideoSource getVideoSource() {
return videoSource;
}
public AudioSource getAudioSources() {
return audioSource;
}
}