Test analyzing become more sophisticated and time consuming.Record a video of your test failure will make things awesome for you.I will explain step by step how to record a video of your test failure.
<dependency>
<groupId>com.pojosontheweb</groupId>
<artifactId>monte-repack</artifactId>
<version>1.0</version>
</dependency>
Create screen recorder class
import org.monte.media.*;
import org.monte.media.FormatKeys.MediaType;
import org.monte.media.avi.AVIWriter;
import org.monte.media.beans.AbstractStateModel;
import org.monte.media.color.Colors;
import org.monte.media.converter.CodecChain;
import org.monte.media.converter.ScaleImageCodec;
import org.monte.media.image.Images;
import org.monte.media.math.Rational;
import org.monte.media.quicktime.QuickTimeWriter;
import javax.sound.sampled.*;
import javax.sound.sampled.AudioFormat.Encoding;
import javax.sound.sampled.BooleanControl.Type;
import javax.sound.sampled.DataLine.Info;
import javax.swing.*;
import java.awt.*;
import java.awt.event.AWTEventListener;
import java.awt.image.BufferedImage;
import java.awt.image.ImageObserver;
import java.io.File;
import java.io.IOException;
import java.nio.ByteOrder;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.concurrent.*;
public class ScreenRecorder extends AbstractStateModel {
public static final String ENCODING_BLACK_CURSOR = "black";
public static final String ENCODING_WHITE_CURSOR = "white";
public static final String ENCODING_YELLOW_CURSOR = "yellow";
private final Object sync;
protected Format mouseFormat;
protected long recordingStartTime;
protected volatile long recordingStopTime;
protected ScheduledThreadPoolExecutor mouseCaptureTimer;
protected int videoTrack;
protected int audioTrack;
protected ScreenRecorder.MouseGrabber mouseGrabber;
protected ScheduledFuture mouseFuture;
protected File movieFolder;
long counter;
File screenRecorderName;
private ScreenRecorder.State state;
private String stateMessage;
private Format fileFormat;
private Format screenFormat;
private Format audioFormat;
private Rectangle captureArea;
private MovieWriter w;
private long fileStartTime;
private ArrayBlockingQueue<Buffer> mouseCaptures;
private ScheduledThreadPoolExecutor screenCaptureTimer;
private ScheduledThreadPoolExecutor audioCaptureTimer;
private volatile Thread writerThread;
private BufferedImage cursorImg;
private BufferedImage cursorImgPressed;
private Point cursorOffset;
private ArrayBlockingQueue<Buffer> writerQueue;
private Codec frameEncoder;
private Rational outputTime;
private Rational ffrDuration;
private ArrayList<File> recordedFiles;
private GraphicsDevice captureDevice;
private ScreenRecorder.AudioGrabber audioGrabber;
private ScreenRecorder.ScreenGrabber screenGrabber;
private ScheduledFuture audioFuture;
private ScheduledFuture screenFuture;
private AWTEventListener awtEventListener;
private long maxRecordingTime;
private long maxFileSize;
private Mixer mixer;
public ScreenRecorder(GraphicsConfiguration cfg) throws IOException, AWTException {
this(cfg, (Rectangle) null, new Format(new Object[]{FormatKeys.MediaTypeKey, MediaType.FILE, FormatKeys.MimeTypeKey, "video/quicktime"}), new Format(new Object[]{FormatKeys.MediaTypeKey, MediaType.VIDEO, FormatKeys.EncodingKey, "rle ", VideoFormatKeys.CompressorNameKey, "Animation", VideoFormatKeys.DepthKey, Integer.valueOf(24), FormatKeys.FrameRateKey, new Rational(15L, 1L)}), new Format(new Object[]{FormatKeys.MediaTypeKey, MediaType.VIDEO, FormatKeys.EncodingKey, "black", FormatKeys.FrameRateKey, new Rational(30L, 1L)}), new Format(new Object[]{FormatKeys.MediaTypeKey, MediaType.AUDIO, FormatKeys.EncodingKey, "twos", FormatKeys.FrameRateKey, new Rational(48000L, 1L), AudioFormatKeys.SampleSizeInBitsKey, Integer.valueOf(16), AudioFormatKeys.ChannelsKey, Integer.valueOf(2), AudioFormatKeys.SampleRateKey, new Rational(48000L, 1L), AudioFormatKeys.SignedKey, Boolean.valueOf(true), AudioFormatKeys.ByteOrderKey, ByteOrder.BIG_ENDIAN}));
}
public ScreenRecorder(GraphicsConfiguration cfg, Format fileFormat, Format screenFormat, Format mouseFormat, Format audioFormat) throws IOException, AWTException {
this(cfg, (Rectangle) null, fileFormat, screenFormat, mouseFormat, audioFormat);
}
public ScreenRecorder(GraphicsConfiguration cfg, Rectangle captureArea, Format fileFormat, Format screenFormat, Format mouseFormat, Format audioFormat) throws IOException, AWTException {
this(cfg, (Rectangle) null, fileFormat, screenFormat, mouseFormat, audioFormat, (File) null);
}
public ScreenRecorder(GraphicsConfiguration cfg, Rectangle captureArea, Format fileFormat, Format screenFormat, Format mouseFormat, Format audioFormat, File movieFolder) throws IOException, AWTException {
this.state = ScreenRecorder.State.DONE;
this.stateMessage = null;
this.sync = new Object();
this.videoTrack = 0;
this.audioTrack = 1;
this.maxRecordingTime = 3600000L;
this.maxFileSize = 9223372036854775807L;
this.counter = 0L;
this.fileFormat = fileFormat;
this.screenFormat = screenFormat;
this.mouseFormat = mouseFormat;
if (this.mouseFormat == null) {
this.mouseFormat = new Format(new Object[]{FormatKeys.FrameRateKey, new Rational(0L, 0L), FormatKeys.EncodingKey, "black"});
}
this.audioFormat = audioFormat;
this.recordedFiles = new ArrayList();
this.captureDevice = cfg.getDevice();
this.captureArea = captureArea == null ? cfg.getBounds() : captureArea;
if (mouseFormat != null && ((Rational) mouseFormat.get(FormatKeys.FrameRateKey)).intValue() > 0) {
this.mouseCaptures = new ArrayBlockingQueue(((Rational) mouseFormat.get(FormatKeys.FrameRateKey)).intValue() * 2);
if (((String) this.mouseFormat.get(FormatKeys.EncodingKey)).equals("black")) {
//Please provide these images location it is the screen pointers
String filelocation = System.getProperty("user.dir") + "/src/main/resources/images/Cursor.black.png";
String filelocation2 = System.getProperty("user.dir") + "/src/main/resources/images/Cursor.black.pressed.png";
this.cursorImg = Images.toBufferedImage(createImage(filelocation));
this.cursorImgPressed = Images.toBufferedImage(createImage(filelocation2));
} else if (((String) this.mouseFormat.get(FormatKeys.EncodingKey)).equals("yellow")) {
this.cursorImg = Images.toBufferedImage(Images.createImage(ScreenRecorder.class, "images/Cursor.yellow.png"));
this.cursorImgPressed = Images.toBufferedImage(Images.createImage(ScreenRecorder.class, "images/Cursor.yellow.pressed.png"));
} else {
this.cursorImg = Images.toBufferedImage(Images.createImage(ScreenRecorder.class, "images/Cursor.white.png"));
this.cursorImgPressed = Images.toBufferedImage(Images.createImage(ScreenRecorder.class, "images/Cursor.white.pressed.png"));
}
this.cursorOffset = new Point(this.cursorImg.getWidth() / -2, this.cursorImg.getHeight() / -2);
}
this.movieFolder = movieFolder;
if (this.movieFolder == null) {
this.movieFolder = new File(System.getProperty("java.io.tmpdir") + File.separator + "screenrecorder");
}
}
public static Image createImage(String resource) {
Image image = Toolkit.getDefaultToolkit().createImage(resource);
return image;
}
protected MovieWriter createMovieWriter(String testName) throws IOException {
screenRecorderName = this.createMovieFile(this.fileFormat, testName);
File f = screenRecorderName;
this.recordedFiles.add(f);
MovieWriter mw = this.w = Registry.getInstance().getWriter(this.fileFormat, f);
Rational videoRate = Rational.max((Rational) this.screenFormat.get(FormatKeys.FrameRateKey), (Rational) this.mouseFormat.get(FormatKeys.FrameRateKey));
this.ffrDuration = videoRate.inverse();
Format videoInputFormat = this.screenFormat.prepend(new Object[]{FormatKeys.MediaTypeKey, MediaType.VIDEO, FormatKeys.EncodingKey, "image", VideoFormatKeys.WidthKey, Integer.valueOf(this.captureArea.width), VideoFormatKeys.HeightKey, Integer.valueOf(this.captureArea.height), FormatKeys.FrameRateKey, videoRate});
Format videoOutputFormat = this.screenFormat.prepend(new Object[]{FormatKeys.FrameRateKey, videoRate, FormatKeys.MimeTypeKey, this.fileFormat.get(FormatKeys.MimeTypeKey)}).append(new Object[]{VideoFormatKeys.WidthKey, Integer.valueOf(this.captureArea.width), VideoFormatKeys.HeightKey, Integer.valueOf(this.captureArea.height)});
this.videoTrack = this.w.addTrack(videoOutputFormat);
if (this.audioFormat != null) {
this.audioTrack = this.w.addTrack(this.audioFormat);
}
Codec encoder = Registry.getInstance().getEncoder(this.w.getFormat(this.videoTrack));
if (encoder == null) {
throw new IOException("No encoder for format " + this.w.getFormat(this.videoTrack));
} else {
this.frameEncoder = encoder;
this.frameEncoder.setInputFormat(videoInputFormat);
this.frameEncoder.setOutputFormat(videoOutputFormat);
if (this.frameEncoder.getOutputFormat() == null) {
throw new IOException("Unable to encode video frames in this output format:\n" + videoOutputFormat);
} else {
if (!videoInputFormat.intersectKeys(new FormatKey[]{VideoFormatKeys.WidthKey, VideoFormatKeys.HeightKey}).matches(videoOutputFormat.intersectKeys(new FormatKey[]{VideoFormatKeys.WidthKey, VideoFormatKeys.HeightKey}))) {
ScaleImageCodec qtw = new ScaleImageCodec();
qtw.setInputFormat(videoInputFormat);
qtw.setOutputFormat(videoOutputFormat.intersectKeys(new FormatKey[]{VideoFormatKeys.WidthKey, VideoFormatKeys.HeightKey}).append(videoInputFormat));
this.frameEncoder = new CodecChain(qtw, this.frameEncoder);
}
if (((Integer) this.screenFormat.get(VideoFormatKeys.DepthKey)).intValue() == 8) {
if (this.w instanceof AVIWriter) {
AVIWriter qtw1 = (AVIWriter) this.w;
qtw1.setPalette(this.videoTrack, Colors.createMacColors());
} else if (this.w instanceof QuickTimeWriter) {
QuickTimeWriter qtw2 = (QuickTimeWriter) this.w;
qtw2.setVideoColorTable(this.videoTrack, Colors.createMacColors());
}
}
this.fileStartTime = System.currentTimeMillis();
return mw;
}
}
}
protected MovieWriter createMovieWriter() throws IOException {
File f = this.createMovieFile(this.fileFormat);
this.recordedFiles.add(f);
MovieWriter mw = this.w = Registry.getInstance().getWriter(this.fileFormat, f);
Rational videoRate = Rational.max((Rational) this.screenFormat.get(FormatKeys.FrameRateKey), (Rational) this.mouseFormat.get(FormatKeys.FrameRateKey));
this.ffrDuration = videoRate.inverse();
Format videoInputFormat = this.screenFormat.prepend(new Object[]{FormatKeys.MediaTypeKey, MediaType.VIDEO, FormatKeys.EncodingKey, "image", VideoFormatKeys.WidthKey, Integer.valueOf(this.captureArea.width), VideoFormatKeys.HeightKey, Integer.valueOf(this.captureArea.height), FormatKeys.FrameRateKey, videoRate});
Format videoOutputFormat = this.screenFormat.prepend(new Object[]{FormatKeys.FrameRateKey, videoRate, FormatKeys.MimeTypeKey, this.fileFormat.get(FormatKeys.MimeTypeKey)}).append(new Object[]{VideoFormatKeys.WidthKey, Integer.valueOf(this.captureArea.width), VideoFormatKeys.HeightKey, Integer.valueOf(this.captureArea.height)});
this.videoTrack = this.w.addTrack(videoOutputFormat);
if (this.audioFormat != null) {
this.audioTrack = this.w.addTrack(this.audioFormat);
}
Codec encoder = Registry.getInstance().getEncoder(this.w.getFormat(this.videoTrack));
if (encoder == null) {
throw new IOException("No encoder for format " + this.w.getFormat(this.videoTrack));
} else {
this.frameEncoder = encoder;
this.frameEncoder.setInputFormat(videoInputFormat);
this.frameEncoder.setOutputFormat(videoOutputFormat);
if (this.frameEncoder.getOutputFormat() == null) {
throw new IOException("Unable to encode video frames in this output format:\n" + videoOutputFormat);
} else {
if (!videoInputFormat.intersectKeys(new FormatKey[]{VideoFormatKeys.WidthKey, VideoFormatKeys.HeightKey}).matches(videoOutputFormat.intersectKeys(new FormatKey[]{VideoFormatKeys.WidthKey, VideoFormatKeys.HeightKey}))) {
ScaleImageCodec qtw = new ScaleImageCodec();
qtw.setInputFormat(videoInputFormat);
qtw.setOutputFormat(videoOutputFormat.intersectKeys(new FormatKey[]{VideoFormatKeys.WidthKey, VideoFormatKeys.HeightKey}).append(videoInputFormat));
this.frameEncoder = new CodecChain(qtw, this.frameEncoder);
}
if (((Integer) this.screenFormat.get(VideoFormatKeys.DepthKey)).intValue() == 8) {
if (this.w instanceof AVIWriter) {
AVIWriter qtw1 = (AVIWriter) this.w;
qtw1.setPalette(this.videoTrack, Colors.createMacColors());
} else if (this.w instanceof QuickTimeWriter) {
QuickTimeWriter qtw2 = (QuickTimeWriter) this.w;
qtw2.setVideoColorTable(this.videoTrack, Colors.createMacColors());
}
}
this.fileStartTime = System.currentTimeMillis();
return mw;
}
}
}
public List<File> getCreatedMovieFiles() {
return Collections.unmodifiableList(this.recordedFiles);
}
protected File createMovieFile(Format fileFormat) throws IOException {
if (!this.movieFolder.exists()) {
this.movieFolder.mkdirs();
} else if (!this.movieFolder.isDirectory()) {
throw new IOException("\"" + this.movieFolder + "\" is not a directory.");
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd \'at\' HH.mm.ss");
File f = new File(this.movieFolder, "ScreenRecording " + dateFormat.format(new Date()) + "." + Registry.getInstance().getExtension(fileFormat));
return f;
}
protected File createMovieFile(Format fileFormat, String name) throws IOException {
if (!this.movieFolder.exists()) {
this.movieFolder.mkdirs();
} else if (!this.movieFolder.isDirectory()) {
throw new IOException("\"" + this.movieFolder + "\" is not a directory.");
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd \'at\' HH.mm.ss");
File f = new File(this.movieFolder, name + dateFormat.format(new Date()) + "." + Registry.getInstance().getExtension(fileFormat));
return f;
}
public ScreenRecorder.State getState() {
return this.state;
}
public String getStateMessage() {
return this.stateMessage;
}
private void setState(ScreenRecorder.State newValue, String msg) {
this.state = newValue;
this.stateMessage = msg;
this.fireStateChanged();
}
public long getStartTime() {
return this.recordingStartTime;
}
public void start() throws IOException {
this.stop();
this.recordedFiles.clear();
this.createMovieWriter();
try {
this.recordingStartTime = System.currentTimeMillis();
this.recordingStopTime = 9223372036854775807L;
this.outputTime = new Rational(0L, 0L);
this.startWriter();
IOException ioe;
try {
this.startScreenCapture();
} catch (AWTException var4) {
ioe = new IOException("Start screen capture failed");
ioe.initCause(var4);
this.stop();
throw ioe;
} catch (IOException var5) {
this.stop();
throw var5;
}
if (this.mouseFormat != null && ((Rational) this.mouseFormat.get(FormatKeys.FrameRateKey)).intValue() > 0) {
this.startMouseCapture();
}
if (this.audioFormat != null) {
try {
this.startAudioCapture();
} catch (LineUnavailableException var3) {
ioe = new IOException("Start audio capture failed");
ioe.initCause(var3);
this.stop();
throw ioe;
}
}
this.setState(ScreenRecorder.State.RECORDING, (String) null);
} catch (IOException var6) {
this.stop();
throw var6;
}
}
public void start(String testName) throws IOException {
this.stop();
this.recordedFiles.clear();
this.createMovieWriter(testName);
try {
this.recordingStartTime = System.currentTimeMillis();
this.recordingStopTime = 9223372036854775807L;
this.outputTime = new Rational(0L, 0L);
this.startWriter();
IOException ioe;
try {
this.startScreenCapture();
} catch (AWTException var4) {
ioe = new IOException("Start screen capture failed");
ioe.initCause(var4);
this.stop();
throw ioe;
} catch (IOException var5) {
this.stop();
throw var5;
}
if (this.mouseFormat != null && ((Rational) this.mouseFormat.get(FormatKeys.FrameRateKey)).intValue() > 0) {
this.startMouseCapture();
}
if (this.audioFormat != null) {
try {
this.startAudioCapture();
} catch (LineUnavailableException var3) {
ioe = new IOException("Start audio capture failed");
ioe.initCause(var3);
this.stop();
throw ioe;
}
}
this.setState(ScreenRecorder.State.RECORDING, (String) null);
} catch (IOException var6) {
this.stop();
throw var6;
}
}
private void startScreenCapture() throws AWTException, IOException {
this.screenCaptureTimer = new ScheduledThreadPoolExecutor(1);
int delay = Math.max(1, (int) (1000.0D / ((Rational) this.screenFormat.get(FormatKeys.FrameRateKey)).doubleValue()));
this.screenGrabber = new ScreenRecorder.ScreenGrabber(this, this.recordingStartTime);
this.screenFuture = this.screenCaptureTimer.scheduleAtFixedRate(this.screenGrabber, (long) delay, (long) delay, TimeUnit.MILLISECONDS);
this.screenGrabber.setFuture(this.screenFuture);
}
protected void startMouseCapture() throws IOException {
this.mouseCaptureTimer = new ScheduledThreadPoolExecutor(1);
int delay = Math.max(1, (int) (1000.0D / ((Rational) this.mouseFormat.get(FormatKeys.FrameRateKey)).doubleValue()));
this.mouseGrabber = new ScreenRecorder.MouseGrabber(this, this.recordingStartTime, this.mouseCaptureTimer);
this.mouseFuture = this.mouseCaptureTimer.scheduleAtFixedRate(this.mouseGrabber, (long) delay, (long) delay, TimeUnit.MILLISECONDS);
final ScreenRecorder.MouseGrabber mouseGrabberF = this.mouseGrabber;
this.awtEventListener = new AWTEventListener() {
public void eventDispatched(AWTEvent event) {
if (event.getID() == 501) {
mouseGrabberF.setMousePressed(true);
} else if (event.getID() == 502) {
mouseGrabberF.setMousePressed(false);
}
}
};
Toolkit.getDefaultToolkit().addAWTEventListener(this.awtEventListener, 16L);
this.mouseGrabber.setFuture(this.mouseFuture);
}
protected void stopMouseCapture() {
if (this.mouseCaptureTimer != null) {
this.mouseGrabber.setStopTime(this.recordingStopTime);
}
if (this.awtEventListener != null) {
Toolkit.getDefaultToolkit().removeAWTEventListener(this.awtEventListener);
this.awtEventListener = null;
}
}
protected void waitUntilMouseCaptureStopped() throws InterruptedException {
if (this.mouseCaptureTimer != null) {
try {
this.mouseFuture.get();
} catch (InterruptedException var2) {
;
} catch (CancellationException var3) {
;
} catch (ExecutionException var4) {
;
}
this.mouseCaptureTimer.shutdown();
this.mouseCaptureTimer.awaitTermination(5000L, TimeUnit.MILLISECONDS);
this.mouseCaptureTimer = null;
this.mouseGrabber.close();
this.mouseGrabber = null;
}
}
private void startAudioCapture() throws LineUnavailableException {
this.audioCaptureTimer = new ScheduledThreadPoolExecutor(1);
boolean delay = true;
this.audioGrabber = new ScreenRecorder.AudioGrabber(this.mixer, this.audioFormat, this.audioTrack, this.recordingStartTime, this.writerQueue);
this.audioFuture = this.audioCaptureTimer.scheduleWithFixedDelay(this.audioGrabber, 0L, 10L, TimeUnit.MILLISECONDS);
this.audioGrabber.setFuture(this.audioFuture);
}
public float getAudioLevelLeft() {
ScreenRecorder.AudioGrabber ag = this.audioGrabber;
return ag != null ? ag.getAudioLevelLeft() : -1.0F;
}
public float getAudioLevelRight() {
ScreenRecorder.AudioGrabber ag = this.audioGrabber;
return ag != null ? ag.getAudioLevelRight() : -1.0F;
}
private void startWriter() {
this.writerQueue = new ArrayBlockingQueue(Math.max(((Rational) this.screenFormat.get(FormatKeys.FrameRateKey)).intValue(), ((Rational) this.mouseFormat.get(FormatKeys.FrameRateKey)).intValue()) + 1);
this.writerThread = new Thread() {
public void run() {
try {
while (ScreenRecorder.this.writerThread == this || !ScreenRecorder.this.writerQueue.isEmpty()) {
try {
Buffer e = (Buffer) ScreenRecorder.this.writerQueue.take();
ScreenRecorder.this.doWrite(e);
} catch (InterruptedException var2) {
break;
}
}
} catch (Throwable var3) {
var3.printStackTrace();
ScreenRecorder.this.recordingFailed(var3.getMessage() == null ? var3.toString() : var3.getMessage());
}
}
};
this.writerThread.start();
}
private void recordingFailed(final String msg) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
try {
ScreenRecorder.this.stop();
ScreenRecorder.this.setState(ScreenRecorder.State.FAILED, msg);
} catch (IOException var2) {
var2.printStackTrace();
}
}
});
}
public void stop() throws IOException {
if (this.state == ScreenRecorder.State.RECORDING) {
this.recordingStopTime = System.currentTimeMillis();
this.stopMouseCapture();
if (this.screenCaptureTimer != null) {
this.screenGrabber.setStopTime(this.recordingStopTime);
}
if (this.audioCaptureTimer != null) {
this.audioGrabber.setStopTime(this.recordingStopTime);
}
try {
this.waitUntilMouseCaptureStopped();
if (this.screenCaptureTimer != null) {
try {
this.screenFuture.get();
} catch (InterruptedException var5) {
;
} catch (CancellationException var6) {
;
} catch (ExecutionException var7) {
;
}
this.screenCaptureTimer.shutdown();
this.screenCaptureTimer.awaitTermination(5000L, TimeUnit.MILLISECONDS);
this.screenCaptureTimer = null;
this.screenGrabber.close();
this.screenGrabber = null;
}
if (this.audioCaptureTimer != null) {
try {
this.audioFuture.get();
} catch (InterruptedException var2) {
;
} catch (CancellationException var3) {
;
} catch (ExecutionException var4) {
;
}
this.audioCaptureTimer.shutdown();
this.audioCaptureTimer.awaitTermination(5000L, TimeUnit.MILLISECONDS);
this.audioCaptureTimer = null;
this.audioGrabber.close();
this.audioGrabber = null;
}
} catch (InterruptedException var8) {
;
}
this.stopWriter();
this.setState(ScreenRecorder.State.DONE, (String) null);
}
}
public void stop(boolean writeFile) throws IOException {
if (this.state == ScreenRecorder.State.RECORDING) {
this.recordingStopTime = System.currentTimeMillis();
this.stopMouseCapture();
if (this.screenCaptureTimer != null) {
this.screenGrabber.setStopTime(this.recordingStopTime);
}
if (this.audioCaptureTimer != null) {
this.audioGrabber.setStopTime(this.recordingStopTime);
}
try {
this.waitUntilMouseCaptureStopped();
if (this.screenCaptureTimer != null) {
try {
this.screenFuture.get();
} catch (InterruptedException var5) {
;
} catch (CancellationException var6) {
;
} catch (ExecutionException var7) {
;
}
this.screenCaptureTimer.shutdown();
this.screenCaptureTimer.awaitTermination(5000L, TimeUnit.MILLISECONDS);
this.screenCaptureTimer = null;
this.screenGrabber.close();
this.screenGrabber = null;
}
if (this.audioCaptureTimer != null) {
try {
this.audioFuture.get();
} catch (InterruptedException var2) {
;
} catch (CancellationException var3) {
;
} catch (ExecutionException var4) {
;
}
this.audioCaptureTimer.shutdown();
this.audioCaptureTimer.awaitTermination(5000L, TimeUnit.MILLISECONDS);
this.audioCaptureTimer = null;
this.audioGrabber.close();
this.audioGrabber = null;
}
} catch (InterruptedException var8) {
;
}
this.stopWriter(writeFile);
this.setState(ScreenRecorder.State.DONE, (String) null);
}
}
private void stopWriter() throws IOException {
Thread pendingWriterThread = this.writerThread;
this.writerThread = null;
try {
if (pendingWriterThread != null) {
pendingWriterThread.interrupt();
pendingWriterThread.join();
}
} catch (InterruptedException var3) {
var3.printStackTrace();
}
if (this.w != null) {
this.w.close();
this.w = null;
}
}
private void stopWriter(boolean writeToFile) throws IOException {
Thread pendingWriterThread = this.writerThread;
this.writerThread = null;
try {
if (pendingWriterThread != null) {
pendingWriterThread.interrupt();
pendingWriterThread.join();
}
} catch (InterruptedException var3) {
var3.printStackTrace();
}
if (!writeToFile) {
try {
screenRecorderName.delete();
} catch (Exception e) {
System.out.print(e.getMessage());
}
w.close();
}
if (this.w != null) {
this.w.close();
this.w = null;
}
}
protected void write(Buffer buf) throws IOException, InterruptedException {
MovieWriter writer = this.w;
if (writer != null) {
Buffer wbuf;
if (buf.track == this.videoTrack) {
if (!((Boolean) writer.getFormat(this.videoTrack).get(VideoFormatKeys.FixedFrameRateKey, Boolean.valueOf(false))).booleanValue()) {
wbuf = new Buffer();
this.frameEncoder.process(buf, wbuf);
this.writerQueue.put(wbuf);
} else {
Rational wbuf2 = buf.timeStamp.add(buf.sampleDuration);
for (boolean isFirst = true; this.outputTime.compareTo(wbuf2) < 0; this.outputTime = this.outputTime.add(this.ffrDuration)) {
buf.timeStamp = this.outputTime;
buf.sampleDuration = this.ffrDuration;
if (isFirst) {
isFirst = false;
} else {
buf.setFlag(BufferFlag.SAME_DATA);
}
Buffer wbuf1 = new Buffer();
if (this.frameEncoder.process(buf, wbuf1) != 0) {
throw new IOException("Codec failed or could not process frame in a single step.");
}
this.writerQueue.put(wbuf1);
}
}
} else {
wbuf = new Buffer();
wbuf.setMetaTo(buf);
wbuf.data = ((byte[]) ((byte[]) buf.data)).clone();
wbuf.length = buf.length;
wbuf.offset = buf.offset;
this.writerQueue.put(wbuf);
}
}
}
private void doWrite(Buffer buf) throws IOException {
final MovieWriter mw = this.w;
long now = System.currentTimeMillis();
if (buf.track == this.videoTrack && buf.isFlag(BufferFlag.KEYFRAME) && (mw.isDataLimitReached() || now - this.fileStartTime > this.maxRecordingTime)) {
(new Thread() {
public void run() {
try {
mw.close();
} catch (IOException var2) {
var2.printStackTrace();
}
}
}).start();
// mw = this.createMovieWriter(); }
mw.write(buf.track, buf);
}
public long getMaxRecordingTime() {
return this.maxRecordingTime;
}
public void setMaxRecordingTime(long maxRecordingTime) {
this.maxRecordingTime = maxRecordingTime;
}
public long getMaxFileSize() {
return this.maxFileSize;
}
public void setMaxFileSize(long maxFileSize) {
this.maxFileSize = maxFileSize;
}
public Mixer getAudioMixer() {
return this.mixer;
}
public void setAudioMixer(Mixer mixer) {
this.mixer = mixer;
}
public static enum State {
DONE,
FAILED,
RECORDING;
private State() {
}
}
private static class AudioGrabber implements Runnable {
private final TargetDataLine line;
private final BlockingQueue<Buffer> queue;
private final Format audioFormat;
private final int audioTrack;
private final long startTime;
private volatile long stopTime = 9223372036854775807L;
private long totalSampleCount;
private ScheduledFuture future;
private long sequenceNumber;
private float audioLevelLeft = -1.0F;
private float audioLevelRight = -1.0F;
private Mixer mixer;
public AudioGrabber(Mixer mixer, Format audioFormat, int audioTrack, long startTime, BlockingQueue<Buffer> queue) throws LineUnavailableException {
this.mixer = mixer;
this.audioFormat = audioFormat;
this.audioTrack = audioTrack;
this.queue = queue;
this.startTime = startTime;
Info lineInfo = new Info(TargetDataLine.class, AudioFormatKeys.toAudioFormat(audioFormat));
if (mixer != null) {
this.line = (TargetDataLine) mixer.getLine(lineInfo);
} else {
this.line = (TargetDataLine) AudioSystem.getLine(lineInfo);
}
try {
BooleanControl e = (BooleanControl) this.line.getControl(Type.MUTE);
e.setValue(false);
} catch (IllegalArgumentException var10) {
;
}
try {
FloatControl e1 = (FloatControl) this.line.getControl(FloatControl.Type.VOLUME);
e1.setValue(Math.max(e1.getValue(), 0.2F));
} catch (IllegalArgumentException var9) {
;
}
this.line.open();
this.line.start();
}
public void setFuture(ScheduledFuture future) {
this.future = future;
}
public void close() {
this.line.close();
}
public synchronized long getStopTime() {
return this.stopTime;
}
public synchronized void setStopTime(long newValue) {
this.stopTime = newValue;
}
public void run() {
Buffer buf = new Buffer();
AudioFormat lineFormat = this.line.getFormat();
buf.format = AudioFormatKeys.fromAudioFormat(lineFormat).append(new Object[]{AudioFormatKeys.SilenceBugKey, Boolean.valueOf(true)});
int bufferSize = lineFormat.getFrameSize() * (int) lineFormat.getSampleRate();
if (((int) lineFormat.getSampleRate() & 1) == 0) {
bufferSize /= 2;
}
byte[] bdat = new byte[bufferSize];
buf.data = bdat;
Rational sampleRate = Rational.valueOf((double) lineFormat.getSampleRate());
Rational frameRate = Rational.valueOf((double) lineFormat.getFrameRate());
int count = this.line.read(bdat, 0, bdat.length);
if (count > 0) {
this.computeAudioLevel(bdat, count, lineFormat);
buf.sampleCount = count / (lineFormat.getSampleSizeInBits() / 8 * lineFormat.getChannels());
buf.sampleDuration = sampleRate.inverse();
buf.offset = 0;
buf.sequenceNumber = (long) (this.sequenceNumber++);
buf.length = count;
buf.track = this.audioTrack;
buf.timeStamp = (new Rational(this.totalSampleCount, 1L)).divide(frameRate);
Rational stopTS = new Rational(this.getStopTime() - this.startTime, 1000L);
if (buf.timeStamp.add(buf.sampleDuration.multiply((long) buf.sampleCount)).compareTo(stopTS) > 0) {
buf.sampleCount = Math.max(0, (int) Math.ceil((double) stopTS.subtract(buf.timeStamp).divide(buf.sampleDuration).floatValue()));
buf.length = buf.sampleCount * lineFormat.getSampleSizeInBits() / 8 * lineFormat.getChannels();
this.future.cancel(false);
}
if (buf.sampleCount > 0) {
try {
this.queue.put(buf);
} catch (InterruptedException var10) {
;
}
}
this.totalSampleCount += (long) buf.sampleCount;
}
}
private void computeAudioLevel(byte[] data, int length, AudioFormat format) {
this.audioLevelLeft = this.audioLevelRight = -1.0F;
if (format.getEncoding().equals(Encoding.PCM_SIGNED)) {
switch (format.getSampleSizeInBits()) {
case 8:
switch (format.getChannels()) {
case 1:
this.audioLevelLeft = this.computeAudioLevelSigned8(data, 0, length, format.getFrameSize());
return;
case 2:
this.audioLevelLeft = this.computeAudioLevelSigned8(data, 0, length, format.getFrameSize());
this.audioLevelRight = this.computeAudioLevelSigned8(data, 1, length, format.getFrameSize());
return;
default:
return;
}
case 16:
if (format.isBigEndian()) {
switch (format.getChannels()) {
case 1:
this.audioLevelLeft = this.computeAudioLevelSigned16BE(data, 0, length, format.getFrameSize());
break;
case 2:
this.audioLevelLeft = this.computeAudioLevelSigned16BE(data, 0, length, format.getFrameSize());
this.audioLevelRight = this.computeAudioLevelSigned16BE(data, 2, length, format.getFrameSize());
}
} else {
switch (format.getChannels()) {
case 1:
case 2:
}
}
}
}
}
private float computeAudioLevelSigned16BE(byte[] data, int offset, int length, int stride) {
double sum = 0.0D;
for (int rms = offset; rms < length; rms += stride) {
int value = data[rms] << 8 | data[rms + 1] & 255;
sum += (double) (value * value);
}
double rms1 = Math.sqrt(sum / (double) ((length - offset) / stride));
return (float) (rms1 / 32768.0D);
}
private float computeAudioLevelSigned8(byte[] data, int offset, int length, int stride) {
double sum = 0.0D;
for (int rms = offset; rms < length; rms += stride) {
byte value = data[rms];
if (value != -128) {
sum += (double) (value * value);
}
}
double rms1 = Math.sqrt(sum / (double) (length / stride));
return (float) (rms1 / 128.0D);
}
public float getAudioLevelLeft() {
return this.audioLevelLeft;
}
public float getAudioLevelRight() {
return this.audioLevelRight;
}
}
protected static class MouseGrabber implements Runnable {
private Point prevCapturedMouseLocation = new Point(2147483647, 2147483647);
private ScheduledThreadPoolExecutor timer;
private ScreenRecorder recorder;
private GraphicsDevice captureDevice;
private Rectangle captureArea;
private BlockingQueue<Buffer> mouseCaptures;
private volatile long stopTime = 9223372036854775807L;
private long startTime;
private Format format;
private ScheduledFuture future;
private volatile boolean mousePressed;
private volatile boolean mouseWasPressed;
private volatile boolean mousePressedRecorded;
public MouseGrabber(ScreenRecorder recorder, long startTime, ScheduledThreadPoolExecutor timer) {
this.timer = timer;
this.format = recorder.mouseFormat;
this.captureDevice = recorder.captureDevice;
this.captureArea = recorder.captureArea;
this.mouseCaptures = recorder.mouseCaptures;
this.startTime = startTime;
}
public void setFuture(ScheduledFuture future) {
this.future = future;
}
public void setMousePressed(boolean newValue) {
if (newValue) {
this.mouseWasPressed = true;
}
this.mousePressed = newValue;
}
public void run() {
try {
this.grabMouse();
} catch (Throwable var2) {
var2.printStackTrace();
this.timer.shutdown();
this.recorder.recordingFailed(var2.getMessage());
}
}
public synchronized long getStopTime() {
return this.stopTime;
}
public synchronized void setStopTime(long newValue) {
this.stopTime = newValue;
}
private void grabMouse() throws InterruptedException {
long now = System.currentTimeMillis();
if (now > this.getStopTime()) {
this.future.cancel(false);
} else {
PointerInfo info = MouseInfo.getPointerInfo();
Point p = info.getLocation();
if (!info.getDevice().equals(this.captureDevice) || !this.captureArea.contains(p)) {
p.setLocation(2147483647, 2147483647);
}
if (!p.equals(this.prevCapturedMouseLocation) || this.mouseWasPressed != this.mousePressedRecorded) {
Buffer buf = new Buffer();
buf.format = this.format;
buf.timeStamp = new Rational(now, 1000L);
buf.data = p;
buf.header = Boolean.valueOf(this.mouseWasPressed);
this.mousePressedRecorded = this.mouseWasPressed;
this.mouseCaptures.put(buf);
this.prevCapturedMouseLocation.setLocation(p);
}
if (!this.mousePressed) {
this.mouseWasPressed = false;
}
}
}
public void close() {
}
}
private static class ScreenGrabber implements Runnable {
private final Format mouseFormat;
private final Object sync;
private Point prevDrawnMouseLocation = new Point(2147483647, 2147483647);
private boolean prevMousePressed = false;
private BufferedImage screenCapture;
private ScreenRecorder recorder;
private ScheduledThreadPoolExecutor screenTimer;
private Robot robot;
private Rectangle captureArea;
private BufferedImage videoImg;
private Graphics2D videoGraphics;
private ArrayBlockingQueue<Buffer> mouseCaptures;
private Rational prevScreenCaptureTime;
private BufferedImage cursorImg;
private BufferedImage cursorImgPressed;
private Point cursorOffset;
private int videoTrack;
private long startTime;
private volatile long stopTime = 9223372036854775807L;
private ScheduledFuture future;
private long sequenceNumber;
public ScreenGrabber(ScreenRecorder recorder, long startTime) throws AWTException, IOException {
this.recorder = recorder;
this.captureArea = recorder.captureArea;
this.robot = new Robot(recorder.captureDevice);
this.mouseFormat = recorder.mouseFormat;
this.mouseCaptures = recorder.mouseCaptures;
this.sync = recorder.sync;
this.cursorImg = recorder.cursorImg;
this.cursorImgPressed = recorder.cursorImgPressed;
this.cursorOffset = recorder.cursorOffset;
this.videoTrack = recorder.videoTrack;
this.prevScreenCaptureTime = new Rational(startTime, 1000L);
this.startTime = startTime;
Format screenFormat = recorder.screenFormat;
if (((Integer) screenFormat.get(VideoFormatKeys.DepthKey, Integer.valueOf(24))).intValue() == 24) {
this.videoImg = new BufferedImage(this.captureArea.width, this.captureArea.height, 1);
} else if (((Integer) screenFormat.get(VideoFormatKeys.DepthKey)).intValue() == 16) {
this.videoImg = new BufferedImage(this.captureArea.width, this.captureArea.height, 9);
} else {
if (((Integer) screenFormat.get(VideoFormatKeys.DepthKey)).intValue() != 8) {
throw new IOException("Unsupported color depth " + screenFormat.get(VideoFormatKeys.DepthKey));
}
this.videoImg = new BufferedImage(this.captureArea.width, this.captureArea.height, 13, Colors.createMacColors());
}
this.videoGraphics = this.videoImg.createGraphics();
this.videoGraphics.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_DISABLE);
this.videoGraphics.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_SPEED);
this.videoGraphics.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED);
}
public void setFuture(ScheduledFuture future) {
this.future = future;
}
public synchronized long getStopTime() {
return this.stopTime;
}
public synchronized void setStopTime(long newValue) {
this.stopTime = newValue;
}
public void run() {
try {
this.grabScreen();
} catch (Throwable var2) {
var2.printStackTrace();
this.screenTimer.shutdown();
this.recorder.recordingFailed(var2.getMessage());
}
}
private void grabScreen() throws IOException, InterruptedException {
BufferedImage previousScreenCapture = this.screenCapture;
long timeBeforeCapture = System.currentTimeMillis();
try {
this.screenCapture = this.robot.createScreenCapture(this.captureArea);
} catch (IllegalMonitorStateException var13) {
return;
}
long timeAfterCapture = System.currentTimeMillis();
if (previousScreenCapture == null) {
previousScreenCapture = this.screenCapture;
}
this.videoGraphics.drawImage(previousScreenCapture, 0, 0, (ImageObserver) null);
Buffer buf = new Buffer();
buf.format = new Format(new Object[]{FormatKeys.MediaTypeKey, MediaType.VIDEO, FormatKeys.EncodingKey, "image"});
boolean hasMouseCapture = false;
if (this.mouseFormat != null && ((Rational) this.mouseFormat.get(FormatKeys.FrameRateKey)).intValue() > 0) {
while (!this.mouseCaptures.isEmpty() && ((Buffer) this.mouseCaptures.peek()).timeStamp.compareTo(new Rational(timeAfterCapture, 1000L)) < 0) {
Buffer p = (Buffer) this.mouseCaptures.poll();
if (p.timeStamp.compareTo(this.prevScreenCaptureTime) > 0) {
if (p.timeStamp.compareTo(new Rational(timeBeforeCapture, 1000L)) < 0) {
previousScreenCapture = this.screenCapture;
this.videoGraphics.drawImage(previousScreenCapture, 0, 0, (ImageObserver) null);
}
Point mcp = (Point) p.data;
this.prevMousePressed = ((Boolean) p.header).booleanValue();
this.prevDrawnMouseLocation.setLocation(mcp.x - this.captureArea.x, mcp.y - this.captureArea.y);
Point p1 = this.prevDrawnMouseLocation;
long localStopTime = this.getStopTime();
if (p.timeStamp.compareTo(new Rational(localStopTime, 1000L)) > 0) {
break;
}
hasMouseCapture = true;
if (this.prevMousePressed) {
this.videoGraphics.drawImage(this.cursorImgPressed, p1.x + this.cursorOffset.x, p1.y + this.cursorOffset.y, (ImageObserver) null);
} else {
this.videoGraphics.drawImage(this.cursorImg, p1.x + this.cursorOffset.x, p1.y + this.cursorOffset.y, (ImageObserver) null);
}
buf.clearFlags();
buf.data = this.videoImg;
buf.sampleDuration = p.timeStamp.subtract(this.prevScreenCaptureTime);
buf.timeStamp = this.prevScreenCaptureTime.subtract(new Rational(this.startTime, 1000L));
buf.track = this.videoTrack;
buf.sequenceNumber = (long) (this.sequenceNumber++);
buf.header = p1.x == 2147483647 ? null : p1;
this.recorder.write(buf);
this.prevScreenCaptureTime = p.timeStamp;
this.videoGraphics.drawImage(previousScreenCapture, p1.x + this.cursorOffset.x, p1.y + this.cursorOffset.y, p1.x + this.cursorOffset.x + this.cursorImg.getWidth() - 1, p1.y + this.cursorOffset.y + this.cursorImg.getHeight() - 1, p1.x + this.cursorOffset.x, p1.y + this.cursorOffset.y, p1.x + this.cursorOffset.x + this.cursorImg.getWidth() - 1, p1.y + this.cursorOffset.y + this.cursorImg.getHeight() - 1, (ImageObserver) null);
}
}
if (!hasMouseCapture && this.prevScreenCaptureTime.compareTo(new Rational(this.getStopTime(), 1000L)) < 0) {
Point p2 = this.prevDrawnMouseLocation;
if (p2 != null) {
if (this.prevMousePressed) {
this.videoGraphics.drawImage(this.cursorImgPressed, p2.x + this.cursorOffset.x, p2.y + this.cursorOffset.y, (ImageObserver) null);
} else {
this.videoGraphics.drawImage(this.cursorImg, p2.x + this.cursorOffset.x, p2.y + this.cursorOffset.y, (ImageObserver) null);
}
}
buf.data = this.videoImg;
buf.sampleDuration = (new Rational(timeAfterCapture, 1000L)).subtract(this.prevScreenCaptureTime);
buf.timeStamp = this.prevScreenCaptureTime.subtract(new Rational(this.startTime, 1000L));
buf.track = this.videoTrack;
buf.sequenceNumber = (long) (this.sequenceNumber++);
buf.header = p2.x == 2147483647 ? null : p2;
this.recorder.write(buf);
this.prevScreenCaptureTime = new Rational(timeAfterCapture, 1000L);
if (p2 != null) {
this.videoGraphics.drawImage(previousScreenCapture, p2.x + this.cursorOffset.x, p2.y + this.cursorOffset.y, p2.x + this.cursorOffset.x + this.cursorImg.getWidth() - 1, p2.y + this.cursorOffset.y + this.cursorImg.getHeight() - 1, p2.x + this.cursorOffset.x, p2.y + this.cursorOffset.y, p2.x + this.cursorOffset.x + this.cursorImg.getWidth() - 1, p2.y + this.cursorOffset.y + this.cursorImg.getHeight() - 1, (ImageObserver) null);
}
}
} else if (this.prevScreenCaptureTime.compareTo(new Rational(this.getStopTime(), 1000L)) < 0) {
buf.data = this.videoImg;
buf.sampleDuration = (new Rational(timeAfterCapture, 1000L)).subtract(this.prevScreenCaptureTime);
buf.timeStamp = this.prevScreenCaptureTime.subtract(new Rational(this.startTime, 1000L));
buf.track = this.videoTrack;
buf.sequenceNumber = (long) (this.sequenceNumber++);
buf.header = null;
this.recorder.write(buf);
this.prevScreenCaptureTime = new Rational(timeAfterCapture, 1000L);
}
if (timeBeforeCapture > this.getStopTime()) {
this.future.cancel(false);
}
}
public void close() {
this.videoGraphics.dispose();
this.videoImg.flush();
}
}
}