Removing jcapture lib, already added in plugin/

1.9.x
Julio Montoya 11 years ago
parent c0b184854a
commit 301da005eb
  1. 11
      main/document/jcapture/.settings/org.eclipse.jdt.core.prefs
  2. 23
      main/document/jcapture/JCaptureApplet.html
  3. 48
      main/document/jcapture/action.php
  4. 99
      main/document/jcapture/applet.php
  5. BIN
      main/document/jcapture/camera.png
  6. 21
      main/document/jcapture/jcapture.jnlp
  7. BIN
      main/document/jcapture/lib/apache-mime4j-0.6.jar
  8. BIN
      main/document/jcapture/lib/commons-codec-1.3.jar
  9. BIN
      main/document/jcapture/lib/commons-logging-1.1.1.jar
  10. BIN
      main/document/jcapture/lib/httpclient-4.0.1.jar
  11. BIN
      main/document/jcapture/lib/httpcore-4.0.1.jar
  12. BIN
      main/document/jcapture/lib/httpmime-4.0.1.jar
  13. BIN
      main/document/jcapture/lib/jcapture.jar
  14. BIN
      main/document/jcapture/lib/transform-3.0.2.jar
  15. 1
      main/document/jcapture/license.txt
  16. 19
      main/document/jcapture/script.js
  17. 1
      main/document/jcapture/src/META-INF/services/com.hammurapi.jcapture.VideoEncoder
  18. 17
      main/document/jcapture/src/com/hammurapi/jcapture/AWTUtilitiesTranslucener.java
  19. 262
      main/document/jcapture/src/com/hammurapi/jcapture/AbstractCaptureApplet.java
  20. 85
      main/document/jcapture/src/com/hammurapi/jcapture/ButtonManager.java
  21. 370
      main/document/jcapture/src/com/hammurapi/jcapture/CaptureConfig.java
  22. 405
      main/document/jcapture/src/com/hammurapi/jcapture/CaptureFrame.java
  23. 606
      main/document/jcapture/src/com/hammurapi/jcapture/CaptureOptionsDialog.java
  24. 361
      main/document/jcapture/src/com/hammurapi/jcapture/ComponentMover.java
  25. 446
      main/document/jcapture/src/com/hammurapi/jcapture/ComponentResizer.java
  26. 28
      main/document/jcapture/src/com/hammurapi/jcapture/FragmentImpl.java
  27. 57
      main/document/jcapture/src/com/hammurapi/jcapture/FrameImpl.java
  28. 21
      main/document/jcapture/src/com/hammurapi/jcapture/GraphicsDeviceTranslucener.java
  29. 80
      main/document/jcapture/src/com/hammurapi/jcapture/ImageToolTip.java
  30. 92
      main/document/jcapture/src/com/hammurapi/jcapture/JCaptureApplet.java
  31. 121
      main/document/jcapture/src/com/hammurapi/jcapture/MappedImage.java
  32. 55
      main/document/jcapture/src/com/hammurapi/jcapture/Movie.java
  33. 1124
      main/document/jcapture/src/com/hammurapi/jcapture/MovieEditorDialog.java
  34. 321
      main/document/jcapture/src/com/hammurapi/jcapture/RecordingControlsFrame.java
  35. 221
      main/document/jcapture/src/com/hammurapi/jcapture/Region.java
  36. 312
      main/document/jcapture/src/com/hammurapi/jcapture/ScreenRecorder.java
  37. 251
      main/document/jcapture/src/com/hammurapi/jcapture/ScreenShot.java
  38. 75
      main/document/jcapture/src/com/hammurapi/jcapture/ShapeImpl.java
  39. 353
      main/document/jcapture/src/com/hammurapi/jcapture/SwfEncoder.java
  40. 17
      main/document/jcapture/src/com/hammurapi/jcapture/Translucener.java
  41. 164
      main/document/jcapture/src/com/hammurapi/jcapture/VideoEncoder.java
  42. 787
      main/document/jcapture/src/com/hammurapi/jcapture/WavFile.java
  43. 26
      main/document/jcapture/src/com/hammurapi/jcapture/WavFileException.java
  44. BIN
      main/document/jcapture/src/com/hammurapi/jcapture/camera.png
  45. BIN
      main/document/jcapture/src/com/hammurapi/jcapture/mouse.png
  46. BIN
      main/document/jcapture/src/com/hammurapi/jcapture/toolbar_buttons.swf

@ -1,11 +0,0 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
org.eclipse.jdt.core.compiler.compliance=1.6
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.source=1.6

@ -1,23 +0,0 @@
<!-- WARNING: Eclipse auto-generated file.
Any modifications will be overwritten. -->
<html>
<body>
<script>
function llamarAplett()
{
document.write("Prueba de sonido");
alert("Prueba de sonido;")
}
</script>
<input type="button" onclick="llamarAplett();"/>
<!--
<applet code='com.hammurapi.jcapture.JCaptureApplet.class' archive='lib\jcapture.jar' width='20' height='20'>
<param name='outputDir' value='C:\_temp'>
</applet>
-->
</body>
</html>

@ -1,48 +0,0 @@
<?php
/**
* JCapture plugin
*
* @author Pavel Vlasov
*/
if (!defined('DOKU_INC')) die();
if (!defined('DOKU_PLUGIN')) define('DOKU_PLUGIN', DOKU_INC . 'lib/plugins/');
require_once (DOKU_PLUGIN . 'action.php');
class action_plugin_jcapture extends DokuWiki_Action_Plugin {
/**
* return some info
*/
function getInfo(){
return array(
'author' => 'Pavel Vlasov',
'email' => 'Pavel.Vlasov@hammurapi.com',
'name' => 'JCapture',
'desc' => 'Plugin for making screen captures.',
'url' => 'http://www.hammurapi.com/dokuwiki/doku.php/products:jcapture:start',
);
}
/**
* Register the eventhandlers
*/
function register(&$controller) {
$controller->register_hook('TOOLBAR_DEFINE', 'AFTER', $this, 'insert_button', array ());
}
/**
* Inserts the toolbar button
*/
function insert_button(& $event, $param) {
$event->data[] = array (
'type' => 'JCapture',
'title' => 'Screen capture',
'icon' => '../../plugins/jcapture/camera.png',
'open' => '<abutton>',
'close' => '</abutton>',
);
}
}

@ -1,99 +0,0 @@
<?php
//fix for Opera XMLHttpRequests
if(!count($_POST) && $HTTP_RAW_POST_DATA){
parse_str($HTTP_RAW_POST_DATA, $_POST);
}
if(!defined('DOKU_INC')) define('DOKU_INC',dirname(__FILE__).'/../../../');
require_once(DOKU_INC.'inc/init.php');
require_once(DOKU_INC.'inc/common.php');
require_once(DOKU_INC.'inc/pageutils.php');
require_once(DOKU_INC.'inc/auth.php');
//close sesseion
session_write_close();
header('Content-Type: text/html; charset=utf-8');
$hostName = "http".($_SERVER['HTTPS'] ? 's' : null).'://'.$_SERVER['HTTP_HOST'];
$imageFormat = "PNG";
$cookies;
foreach (array_keys($_COOKIE) as $cookieName) {
$cookies.=bin2hex($cookieName)."=".bin2hex($_COOKIE[$cookieName]).";";
}
$pageName = $_GET["pageName"];
$edid = $_GET["edid"];
?>
<script language="JavaScript" type="text/javascript"><!--
var _info = navigator.userAgent;
var _ns = false;
var _ns6 = false;
var _ie = (_info.indexOf("MSIE") > 0 && _info.indexOf("Win") > 0 && _info.indexOf("Windows 3.1") < 0);
//--></script>
<comment>
<script language="JavaScript" type="text/javascript"><!--
var _ns = (navigator.appName.indexOf("Netscape") >= 0 && ((_info.indexOf("Win") > 0 && _info.indexOf("Win16") < 0 && java.lang.System.getProperty("os.version").indexOf("3.5") < 0) || (_info.indexOf("Sun") > 0) || (_info.indexOf("Linux") > 0) || (_info.indexOf("AIX") > 0) || (_info.indexOf("OS/2") > 0) || (_info.indexOf("IRIX") > 0)));
var _ns6 = ((_ns == true) && (_info.indexOf("Mozilla/5") >= 0));
//--></script>
</comment>
<script language="JavaScript" type="text/javascript"><!--
if (_ie == true) document.writeln('<object classid="clsid:CAFEEFAC-0017-0000-0020-ABCDEFFEDCBA" NAME = "jCapture" WIDTH = "1" HEIGHT = "1" codebase="http://java.sun.com/update/1.7.0/jinstall-1_7_0-windows-i586.cab#Version=7,0,0,0"><xmp>');
else if (_ns == true && _ns6 == false) document.writeln('<embed ' +
'type="application/x-java-applet;jpi-version=1.7.0" \
ID = "jCaptureApplet" \
scriptable = "true" \
mayscript = "true" \
WIDTH = "1"
JAVA_CODEBASE = "/somenonexistingcodebase" \
HEIGHT = "1"
CODE = "com.hammurapi.jcapture.JCaptureApplet.class" \
ARCHIVE = "<?php echo DOKU_BASE; ?>lib/plugins/jcapture/lib/jcapture.jar" \
NAME = "jCapture" \
dokuBase ="<?php echo bin2hex(DOKU_BASE); ?>" \
sectok ="<?php echo getSecurityToken(); ?>" \
cookies ="<?php echo $cookies; ?>" \
authtok = "<?php echo auth_createToken(); ?>" \
pageName = "<?php echo $pageName; ?>" \
edid = "<?php echo $edid; ?>" \
host ="<?php echo $hostName; ?>" ' +
'scriptable=false ' +
'pluginspage="http://java.sun.com/products/plugin/index.html#download"><xmp>');
//--></script>
<applet id="jCaptureApplet" CODE = "com.hammurapi.jcapture.JCaptureApplet.class" WIDTH="1" HEIGHT="1" ARCHIVE = "<?php echo DOKU_BASE; ?>lib/plugins/jcapture/lib/jcapture.jar" NAME = "jCapture"></xmp>
<PARAM NAME = CODE VALUE = "com.hammurapi.jcapture.JCaptureApplet.class" >
<PARAM NAME = ARCHIVE VALUE = "<?php echo DOKU_BASE; ?>lib/plugins/jcapture/lib/jcapture.jar" >
<PARAM NAME = NAME VALUE = "jCapture" >
<param name="type" value="application/x-java-applet;jpi-version=1.7.0">
<param name="scriptable" value="true">
<param name="mayscript" value="true">
<PARAM NAME = "dokuBase" VALUE="<?php echo bin2hex(DOKU_BASE); ?>">
<PARAM NAME = "sectok" VALUE="<?php echo getSecurityToken(); ?>">
<PARAM NAME = "cookies" VALUE="<?php echo $cookies; ?>">
<PARAM NAME = "host" VALUE="<?php echo $hostName; ?>">
<PARAM NAME = "pageName" VALUE="<?php echo $pageName; ?>">
<PARAM NAME = "edid" VALUE="<?php echo $edid; ?>">
<PARAM NAME = CODEBASE VALUE = "/somenonexistingcodebase" >
<PARAM NAME = "authtok" VALUE="<?php echo auth_createToken(); ?>">
Java 2 Standard Edition v 1.7 or above is required for this applet.<br/>
Download it from <a href="http://java.sun.com">http://java.sun.com</a>.
</applet>
</embed>
</object>
<!--
<APPLET CODE = "com.hammurapi.jcapture.JCaptureApplet.class" ARCHIVE = "<?php echo DOKU_BASE; ?>lib/plugins/jcapture/lib/jcapture.jar" NAME = "jCapture">
<PARAM NAME = "dokuBase" VALUE="<?php echo bin2hex(DOKU_BASE); ?>">
<PARAM NAME = "sectok" VALUE="<?php echo getSecurityToken(); ?>">
<PARAM NAME = "cookies" VALUE="<?php echo $cookies; ?>">
<PARAM NAME = "host" VALUE="<?php echo $hostName; ?>">
Java 2 Standard Edition v 1.7 or above is required for this applet.<br/>
Download it from <a href="http://java.sun.com">http://java.sun.com</a>.
</APPLET>
-->

Binary file not shown.

Before

Width:  |  Height:  |  Size: 164 B

@ -1,21 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<jnlp spec="1.0+" codebase="" href="">
<information>
<title>jCapture</title>
<vendor>Hammurapi Group</vendor>
</information>
<resources>
<!-- Application Resources -->
<j2se version="1.7+" href="http://java.sun.com/products/autodl/j2se" />
<jar href="lib/jcapture.jar" main="true" />
</resources>
<applet-desc
name="jCapture Applet"
main-class="com.hammurapi.jcapture.JCaptureApplet"
width="300"
height="300">
</applet-desc>
<update check="background"/>
</jnlp>

@ -1,19 +0,0 @@
function addBtnActionJCapture($btn, props, edid) {
$btn.click(function() {
var appletDiv = document.getElementById("jCaptureAppletDiv");
if (appletDiv==null) {
var oNewDiv = document.createElement("div");
oNewDiv.id="jCaptureAppletDiv";
//oNewDiv.style.display='none';
document.body.appendChild(oNewDiv);
jQuery("#jCaptureAppletDiv").load(DOKU_BASE+"lib/plugins/jcapture/applet.php?edid="+edid+"&pageName="+document.forms['dw__editform'].elements['id'].value);
} else {
document.getElementById("jCaptureApplet").showCaptureFrame();
}
return false;
});
return true;
}

@ -1,17 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.Frame;
import com.sun.awt.AWTUtilities;
import com.sun.awt.AWTUtilities.Translucency;
public class AWTUtilitiesTranslucener extends Translucener {
@Override
protected void makeTranslucent(Frame frame) {
if (AWTUtilities.isTranslucencySupported(Translucency.TRANSLUCENT)) {
AWTUtilities.setWindowOpacity(frame, 0.7f);
}
}
}

@ -1,262 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.Component;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.ProxySelector;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.Properties;
import java.util.StringTokenizer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import javax.swing.JApplet;
import javax.swing.JOptionPane;
import javax.swing.ProgressMonitorInputStream;
import javax.swing.SwingUtilities;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.net.URLCodec;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.mime.content.InputStreamBody;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.ProxySelectorRoutePlanner;
/**
* Base class for capture applets.
* @author Pavel
*
*/
public abstract class AbstractCaptureApplet extends JApplet {
private static final String OUTPUT_DIR_PARAMETER = "outputDir";
private CaptureFrame captureFrame;
@Override
public void stop() {
if (captureFrame!=null) {
captureFrame.dispose();
captureFrame = null;
}
backgroundProcessor.shutdown();
synchronized (closeables) {
Iterator<Closeable> cit = closeables.iterator();
while (cit.hasNext()) {
try {
cit.next().close();
} catch (Exception e) {
e.printStackTrace();
}
cit.remove();
}
}
super.stop();
}
/**
* Adds closeable to close in applet stop() method.
* @param closeable
*/
public void addCloseable(Closeable closeable) {
synchronized (closeables) {
closeables.add(closeable);
}
}
private Collection<Closeable> closeables = new ArrayList<Closeable>();
public void showCaptureFrame() {
if (captureFrame==null) {
createCaptureFrame();
}
captureFrame.setVisible(true);
}
private ExecutorService backgroundProcessor;
@Override
public void start() {
super.start();
ThreadFactory threadFactory = new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread th=new Thread(r, "Background processor");
th.setPriority(Thread.NORM_PRIORITY);
return th;
}
};
backgroundProcessor = Executors.newSingleThreadExecutor(threadFactory);
SwingUtilities.invokeLater(new Runnable() {
public void run() {
createCaptureFrame();
}
});
try {
// Proxy configuration - requires java.net.NetPermission getProxySelector
proxySelector = ProxySelector.getDefault();
} catch (Exception e) {
System.err.println("Can't obtain proxy information: "+e);
e.printStackTrace();
}
}
public ExecutorService getBackgroundProcessor() {
return backgroundProcessor;
}
protected void createCaptureFrame() {
try {
captureFrame = new CaptureFrame(this);
captureFrame.setVisible(true);
} catch (Exception e) {
JOptionPane.showMessageDialog(
null,
"Error: "+e,
"Cannot create capture window",
JOptionPane.ERROR_MESSAGE);
e.printStackTrace();
}
}
public static String formatByteSize(long bytes) {
if (bytes<1024) {
return bytes + "bytes";
}
if (bytes<1024*1024) {
return MessageFormat.format("{0,number,0.0} Kb", new Object[] {(double) bytes/1024.0});
}
if (bytes<1024*1024*1024) {
return MessageFormat.format("{0,number,0.00} Mb", new Object[] {(double) bytes/(double) (1024.0*1024.0)});
}
return MessageFormat.format("{0,number,0.00} Gb", new Object[] {(double) bytes/(double) (1024.0*1024.0*1024.0)});
}
protected File preferencesFile = new File(System.getProperty("user.home")+File.separator+"."+getClass().getName()+".properties");
public Properties loadConfig() {
try {
if (preferencesFile.isFile()) {
InputStream configStream = new FileInputStream(preferencesFile);
Properties ret = new Properties();
ret.load(configStream);
configStream.close();
return ret;
}
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
public void storeConfig(Properties properties) {
try {
FileOutputStream out = new FileOutputStream(preferencesFile);
properties.store(out, "Config");
out.close();
} catch (Exception e) {
e.printStackTrace();
}
}
protected String getCookies() throws DecoderException {
String cookiesStr = getParameter("cookies");
if (cookiesStr==null) {
return null;
}
StringBuilder ret = new StringBuilder();
StringTokenizer st = new StringTokenizer(cookiesStr, ";");
while (st.hasMoreTokens()) {
String tok = st.nextToken();
int idx = tok.indexOf("=");
ret.append(hex2urlEncoded(tok.substring(0, idx)));
ret.append("=");
ret.append(hex2urlEncoded(tok.substring(idx+1)));
if (st.hasMoreElements()) {
ret.append(";");
}
}
return ret.toString();
}
private String hex2urlEncoded(String hexStr) throws DecoderException {
return new String(URLCodec.encodeUrl(null, Hex.decodeHex(hexStr.toCharArray())));
}
protected ProxySelector proxySelector;
/**
* Posts capture/recording to the web site.
* @param parentComponent Parent component for the progress bar.
* @param content Content - file or byte array.
* @param fileName File name.
* @param mimeType Mime type.
* @return
* @throws Exception
*/
public HttpResponse post(
Component parentComponent,
final InputStream content,
final long contentLength,
String fileName,
String mimeType) throws Exception {
System.out.println("jCapture applet, build @@@time@@@");
/**
* Debugging - save to file.
*/
if (getParameter(OUTPUT_DIR_PARAMETER)!=null) {
OutputStream out = new FileOutputStream(new File(getParameter(OUTPUT_DIR_PARAMETER)+File.separator+fileName));
byte[] buf=new byte[4096];
int l;
while ((l=content.read(buf))!=-1) {
out.write(buf, 0, l);
}
out.close();
content.close();
return null;
}
ProgressMonitorInputStream pmis = new ProgressMonitorInputStream(parentComponent, "Uploading "+ fileName + " ("+formatByteSize(contentLength)+")", content);
InputStreamBody bin = new InputStreamBody(pmis, mimeType, bodyName(fileName)) {
@Override
public long getContentLength() {
return contentLength;
}
};
DefaultHttpClient httpClient = new DefaultHttpClient();
if (proxySelector!=null) {
ProxySelectorRoutePlanner routePlanner = new ProxySelectorRoutePlanner(
httpClient.getConnectionManager().getSchemeRegistry(),
proxySelector);
httpClient.setRoutePlanner(routePlanner);
}
return httpClient.execute(createRequest(fileName, bin));
}
protected abstract HttpUriRequest createRequest(String fileName, InputStreamBody bin) throws Exception;
protected abstract String bodyName(String fileName);
}

@ -1,85 +0,0 @@
package com.hammurapi.jcapture;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.zip.DataFormatException;
import com.flagstone.transform.DefineTag;
import com.flagstone.transform.Movie;
import com.flagstone.transform.MovieTag;
import com.flagstone.transform.Place2;
import com.flagstone.transform.datatype.CoordTransform;
public class ButtonManager {
private final Map<String, Place2> buttons;
private final List<DefineTag> definitions;
public ButtonManager() {
buttons = new HashMap<String, Place2>();
definitions = new ArrayList<DefineTag>();
}
public void loadLibrary(URL libUrl) throws IOException, DataFormatException {
Movie movie = new Movie();
movie.decodeFromUrl(libUrl);
findDefinitions(movie, definitions);
findButtons(movie, buttons);
}
public int maxIdentifier() {
int identifier = 0;
DefineTag object;
for (Iterator<DefineTag>iter = definitions.iterator(); iter.hasNext();) {
object = iter.next();
if (object.getIdentifier() > identifier) {
identifier = object.getIdentifier();
}
}
return identifier;
}
public List<DefineTag> getDefinitions() {
List<DefineTag> list = new ArrayList<DefineTag>(definitions.size());
for (Iterator<DefineTag>iter = definitions.iterator(); iter.hasNext();) {
list.add((DefineTag) iter.next().copy());
}
return list;
}
public Place2 getButton(final String name, final int layer, final int xpos, final int ypos) {
Place2 place = (Place2)buttons.get(name).copy();
place.setLayer(layer);
place.setTransform(new CoordTransform(1, 1, 0, 0, xpos, ypos));
return place;
}
private void findDefinitions(final Movie movie, final List<DefineTag> list) {
MovieTag object;
for (Iterator<MovieTag> iter = movie.getObjects().iterator(); iter.hasNext();) {
object = iter.next();
if (object instanceof DefineTag) {
list.add((DefineTag)object);
}
}
}
private void findButtons(final Movie movie, final Map<String, Place2> list) {
MovieTag object;
Place2 place;
for (Iterator<MovieTag>iter = movie.getObjects().iterator(); iter.hasNext();) {
object = iter.next();
if (object instanceof Place2) {
place = (Place2)object;
if (place.getName() != null) {
list.put(place.getName(), place);
}
}
}
}
}

@ -1,370 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.AWTException;
import java.awt.Component;
import java.awt.MouseInfo;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Robot;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.ServiceLoader;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import javax.sound.sampled.AudioFormat;
public class CaptureConfig implements VideoEncoder.Config {
private static final String MP3_COMMAND_PROPERTY = "mp3command";
private static final String TOOL_BAR_PROPERTY = "toolBar";
private static final String SPEED_SCALE_PROPERTY = "speedScale";
private static final String SOUND_PROPERTY = "sound";
private static final String SCREEN_SCALE_PROPERTY = "screenScale";
private static final String REMOVE_INACTIVITY_PROPERTY = "removeInactivity";
private static final String PLAY_PROPERTY = "play";
private static final String MOUSE_PROPERTY = "mouse";
private static final String MIXER_NAME_PROPERTY = "mixerName";
private static final String LOOP_PROPERTY = "loop";
private static final String INACTIVITY_INTERVAL_PROPERTY = "inactivityInterval";
private static final String IMAGE_FORMAT_PROPERTY = "imageFormat";
private static final String FRAMES_PER_SECOND_PROPERTY = "framesPerSecond";
private static final String BORDER_PROPERTY = "border";
private static final String RECORDING_RECTANGLE_PROPERTY = "recordingRectangle";
private static final String ENCODER_NAME_PROPERTY = "encoderName";
private static final String AUDIO_FORMAT_SAMPLE_SIZE_PROPERTY = "audioFormat.sampleSize";
private static final String AUDIO_FORMAT_SAMPLE_RATE_PROPERTY = "audioFormat.sampleRate";
private static final String AUDIO_FORMAT_CHANNELS_PROPERTY = "audioFormat.channels";
private AudioFormat audioFormat = new AudioFormat(22050.0F, 16, 1, true, false);;
private String mixerName;
private float framesPerSecond = 10.0f;
private double screenScale = 1.0;
private float speedScale = 1.0f;
private boolean removeInactivity;
private double inactivityInterval = 0.7;
private Component parentComponent;
private Rectangle recordingRectangle;
private boolean border = true;
private boolean toolBar = true;
private Robot robot;
private String imageFormat = "PNG";
private boolean sound = true;
private boolean mouse = true;
private boolean loop = true;
private boolean play = false;
private VideoEncoder encoder;
private int grabRange = 3;
private ExecutorService backgroundProcessor;
private String mp3command;
public String getMp3command() {
return mp3command;
}
public void setMp3command(String mp3command) {
this.mp3command = mp3command;
}
public int getGrabRange() {
return grabRange;
}
public ExecutorService getBackgroundProcessor() {
return backgroundProcessor;
}
public void setBackgroundProcessor(ExecutorService backgroundProcessor) {
this.backgroundProcessor = backgroundProcessor;
}
public void setGrabRange(int grabRange) {
this.grabRange = grabRange;
}
public VideoEncoder getEncoder() {
return encoder;
}
public void setEncoder(VideoEncoder encoder) {
this.encoder = encoder;
}
public boolean isLoop() {
return loop;
}
public void setLoop(boolean loop) {
this.loop = loop;
}
public boolean isPlay() {
return play;
}
public void setPlay(boolean play) {
this.play = play;
}
public boolean isSound() {
return sound;
}
public void setSound(boolean sound) {
this.sound = sound;
}
public boolean isMouse() {
return mouse;
}
public void setMouse(boolean mouse) {
this.mouse = mouse;
}
public String getImageFormat() {
return imageFormat;
}
public void setImageFormat(String imageFormat) {
this.imageFormat = imageFormat;
}
public CaptureConfig() throws AWTException {
robot = new Robot();
ServiceLoader<VideoEncoder> sl = ServiceLoader.load(VideoEncoder.class);
List<VideoEncoder> accumulator = new ArrayList<VideoEncoder>();
Iterator<VideoEncoder> vit = sl.iterator();
while (vit.hasNext()) {
accumulator.add(vit.next());
}
Collections.sort(accumulator, new Comparator<VideoEncoder>() {
@Override
public int compare(VideoEncoder o1, VideoEncoder o2) {
return o1.toString().compareTo(o2.toString());
}
});
encoders = Collections.unmodifiableList(accumulator);
if (encoder==null && !encoders.isEmpty()) {
encoder = encoders.get(0);
}
}
/**
* Submits screenshot for processing in a background thread.
* @param task
* @return
*/
public Future<ScreenShot> submit(ScreenShot task) {
return backgroundProcessor.submit(task);
}
public Robot getRobot() {
return robot;
}
public ScreenShot createScreenShot(ScreenShot prev, FileChannel imageChannel) throws IOException {
BufferedImage image = robot.createScreenCapture(recordingRectangle);
Point mouseLocation = MouseInfo.getPointerInfo().getLocation();
if (mouse && recordingRectangle.contains(mouseLocation)) {
mouseLocation.move(mouseLocation.x-recordingRectangle.x, mouseLocation.y-recordingRectangle.y);
} else {
mouseLocation = null;
}
return new ScreenShot(
image,
mouseLocation,
prev,
System.currentTimeMillis(),
grabRange,
isTransparencySupported(),
border,
getScreenScale(),
imageChannel,
getImageFormat());
}
public boolean isTransparencySupported() {
return !"jpeg".equalsIgnoreCase(getImageFormat())
&& !"jpg".equalsIgnoreCase(getImageFormat());
}
public boolean isToolBar() {
return toolBar;
}
public void setToolBar(boolean toolBar) {
this.toolBar = toolBar;
}
public boolean isBorder() {
return border;
}
public void setBorder(boolean border) {
this.border = border;
}
public Rectangle getRecordingRectangle() {
return recordingRectangle;
}
public Properties setRecordingRectangle(Rectangle recordingRectangle) {
Rectangle oldValue = this.recordingRectangle;
this.recordingRectangle = recordingRectangle;
if (this.recordingRectangle!=null && !this.recordingRectangle.equals(oldValue)) {
return store();
}
return null;
}
public AudioFormat getAudioFormat() {
return audioFormat;
}
public void setAudioFormat(AudioFormat audioFormat) {
this.audioFormat = audioFormat;
}
public String getMixerName() {
return mixerName;
}
public void setMixerName(String mixerName) {
this.mixerName = mixerName;
}
public float getFramesPerSecond() {
return framesPerSecond;
}
public void setFramesPerSecond(float framesPerSecond) {
this.framesPerSecond = framesPerSecond;
}
public double getScreenScale() {
return screenScale;
}
public void setScreenScale(double screenScale) {
this.screenScale = screenScale;
}
public float getSpeedScale() {
return speedScale;
}
public void setSpeedScale(float speedScale) {
this.speedScale = speedScale;
}
public boolean isRemoveInactivity() {
return removeInactivity;
}
public void setRemoveInactivity(boolean removeInactivity) {
this.removeInactivity = removeInactivity;
}
public double getInactivityInterval() {
return inactivityInterval;
}
public void setInactivityInterval(double inactivityInterval) {
this.inactivityInterval = inactivityInterval;
}
public Component getParentComponent() {
return parentComponent;
}
public void setParentComponent(Component parentComponent) {
this.parentComponent = parentComponent;
}
void load(Properties properties) {
if (properties!=null) {
try {
if (properties.containsKey(AUDIO_FORMAT_CHANNELS_PROPERTY)) {
audioFormat = new AudioFormat(
Float.parseFloat(properties.getProperty(AUDIO_FORMAT_SAMPLE_RATE_PROPERTY, String.valueOf(audioFormat.getSampleRate()))),
Integer.parseInt(properties.getProperty(AUDIO_FORMAT_SAMPLE_SIZE_PROPERTY, String.valueOf(audioFormat.getSampleSizeInBits()))),
Integer.parseInt(properties.getProperty(AUDIO_FORMAT_CHANNELS_PROPERTY, String.valueOf(audioFormat.getChannels()))),
true, false);
}
border=Boolean.parseBoolean(properties.getProperty(BORDER_PROPERTY, String.valueOf(border)));
framesPerSecond=Float.parseFloat(properties.getProperty(FRAMES_PER_SECOND_PROPERTY, String.valueOf(framesPerSecond)));
imageFormat=properties.getProperty(IMAGE_FORMAT_PROPERTY, String.valueOf(imageFormat));
inactivityInterval=Double.parseDouble(properties.getProperty(INACTIVITY_INTERVAL_PROPERTY, String.valueOf(inactivityInterval)));
loop=Boolean.parseBoolean(properties.getProperty(LOOP_PROPERTY, String.valueOf(loop)));
mixerName=properties.getProperty(MIXER_NAME_PROPERTY, String.valueOf(mixerName));
mouse=Boolean.parseBoolean(properties.getProperty(MOUSE_PROPERTY, String.valueOf(mouse)));
play=Boolean.parseBoolean(properties.getProperty(PLAY_PROPERTY, String.valueOf(play)));
removeInactivity=Boolean.parseBoolean(properties.getProperty(REMOVE_INACTIVITY_PROPERTY, String.valueOf(removeInactivity)));
screenScale=Double.parseDouble(properties.getProperty(SCREEN_SCALE_PROPERTY, String.valueOf(screenScale)));
sound=Boolean.parseBoolean(properties.getProperty(SOUND_PROPERTY, String.valueOf(sound)));
speedScale=Float.parseFloat(properties.getProperty(SPEED_SCALE_PROPERTY, String.valueOf(speedScale)));
toolBar=Boolean.parseBoolean(properties.getProperty(TOOL_BAR_PROPERTY, String.valueOf(toolBar)));
mp3command=properties.getProperty(MP3_COMMAND_PROPERTY);
encoder = null;
String encoderName = properties.getProperty(ENCODER_NAME_PROPERTY);
if (encoderName!=null) {
for (VideoEncoder candidate: getEncoders()) {
if (encoderName.equals(candidate.toString())) {
encoder = candidate;
break;
}
}
}
if (encoder==null && !getEncoders().isEmpty()) {
encoder = getEncoders().get(0);
}
String rr = properties.getProperty(RECORDING_RECTANGLE_PROPERTY);
if (rr!=null && rr.trim().length()>0) {
String[] dims = rr.split(";");
recordingRectangle = new Rectangle(Integer.parseInt(dims[0]), Integer.parseInt(dims[1]), Integer.parseInt(dims[2]), Integer.parseInt(dims[3]));
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
private List<VideoEncoder> encoders;
/**
* @return array of available encoders.
*/
public List<VideoEncoder> getEncoders() {
return encoders;
}
Properties store() {
Properties properties = new Properties();
if (audioFormat!=null) {
properties.setProperty(AUDIO_FORMAT_CHANNELS_PROPERTY, String.valueOf(audioFormat.getChannels()));
properties.setProperty(AUDIO_FORMAT_SAMPLE_RATE_PROPERTY, String.valueOf(audioFormat.getSampleRate()));
properties.setProperty(AUDIO_FORMAT_SAMPLE_SIZE_PROPERTY, String.valueOf(audioFormat.getSampleSizeInBits()));
}
properties.setProperty(BORDER_PROPERTY, String.valueOf(border));
properties.setProperty(FRAMES_PER_SECOND_PROPERTY, String.valueOf(framesPerSecond));
properties.setProperty(IMAGE_FORMAT_PROPERTY, String.valueOf(imageFormat));
properties.setProperty(INACTIVITY_INTERVAL_PROPERTY, String.valueOf(inactivityInterval));
properties.setProperty(LOOP_PROPERTY, String.valueOf(loop));
properties.setProperty(MIXER_NAME_PROPERTY, String.valueOf(mixerName));
properties.setProperty(MOUSE_PROPERTY, String.valueOf(mouse));
properties.setProperty(PLAY_PROPERTY, String.valueOf(play));
properties.setProperty(REMOVE_INACTIVITY_PROPERTY, String.valueOf(removeInactivity));
properties.setProperty(SCREEN_SCALE_PROPERTY, String.valueOf(screenScale));
properties.setProperty(SOUND_PROPERTY, String.valueOf(sound));
properties.setProperty(SPEED_SCALE_PROPERTY, String.valueOf(speedScale));
properties.setProperty(TOOL_BAR_PROPERTY, String.valueOf(toolBar));
if (recordingRectangle!=null) {
properties.setProperty(RECORDING_RECTANGLE_PROPERTY, recordingRectangle.x+";"+recordingRectangle.y+";"+recordingRectangle.width+";"+recordingRectangle.height);
}
if (mp3command!=null) {
properties.setProperty(MP3_COMMAND_PROPERTY, mp3command);
}
if (encoder!=null) {
properties.setProperty(ENCODER_NAME_PROPERTY, encoder.toString());
}
return properties;
}
}

@ -1,405 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Insets;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.ComponentListener;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.imageio.ImageIO;
import javax.swing.AbstractAction;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.SwingUtilities;
import javax.swing.SwingWorker;
import javax.swing.border.LineBorder;
import netscape.javascript.JSObject;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
/**
* License: LGPL.
* @author Pavel Vlasov.
*
*/
public class CaptureFrame extends javax.swing.JFrame {
private JPanel capturePanel;
private SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss-SSS");
private int counter;
private CaptureConfig captureConfig;
private AbstractCaptureApplet applet;
private JButton recordButton;
public CaptureConfig getCaptureConfig() {
return captureConfig;
}
public CaptureFrame(final AbstractCaptureApplet applet) throws Exception {
super("Screen capture");
setIconImage(Toolkit.getDefaultToolkit().getImage(getClass().getResource("camera.png")));
setUndecorated(true);
Translucener.makeFrameTranslucent(this);
setAlwaysOnTop(true);
this.applet = applet;
captureConfig = new CaptureConfig();
captureConfig.load(applet.loadConfig());
captureConfig.setBackgroundProcessor(applet.getBackgroundProcessor());
//--- GUI construction ---
capturePanel = new JPanel();
final JLabel dimensionsLabel = new JLabel("");
capturePanel.add(dimensionsLabel, BorderLayout.CENTER);
capturePanel.addComponentListener(new ComponentAdapter() {
@Override
public void componentResized(ComponentEvent e) {
super.componentResized(e);
dimensionsLabel.setText(e.getComponent().getWidth()+" x "+e.getComponent().getHeight());
}
});
JButton captureButton = new JButton(new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
Rectangle bounds = capturePanel.getBounds();
Point loc = bounds.getLocation();
SwingUtilities.convertPointToScreen(loc, capturePanel);
bounds.setLocation(loc);
Properties props = captureConfig.setRecordingRectangle(bounds);
if (props!=null) {
getApplet().storeConfig(props);
}
capturing.set(true);
setVisible(false);
}
});
captureButton.setText("Capture");
captureButton.setToolTipText("Create a snapshot of the screen");
capturePanel.add(captureButton, BorderLayout.CENTER);
recordButton = new JButton(new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
Rectangle bounds = capturePanel.getBounds();
Point loc = bounds.getLocation();
SwingUtilities.convertPointToScreen(loc, capturePanel);
bounds.setLocation(loc);
Properties props = captureConfig.setRecordingRectangle(bounds);
if (props!=null) {
getApplet().storeConfig(props);
}
recording.set(true);
setVisible(false);
}
});
recordButton.setText("Record");
setRecordButtonState();
capturePanel.add(recordButton, BorderLayout.CENTER);
JButton optionsButton = new JButton(new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
new CaptureOptionsDialog(CaptureFrame.this).setVisible(true);
}
});
optionsButton.setText("Options");
capturePanel.add(optionsButton, BorderLayout.CENTER);
JButton cancelButton = new JButton(new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
CaptureFrame.this.setVisible(false);
}
});
cancelButton.setText("Cancel");
capturePanel.add(cancelButton, BorderLayout.CENTER);
getContentPane().add(capturePanel, BorderLayout.CENTER);
capturePanel.setBorder(new LineBorder(new java.awt.Color(0,0,0), 1, false));
if (captureConfig.getRecordingRectangle()==null) {
setSize(400, 300);
setLocationRelativeTo(null);
} else {
setBounds(captureConfig.getRecordingRectangle());
}
Insets dragInsets = new Insets(5, 5, 5, 5);
new ComponentResizer(dragInsets, this);
ComponentMover cm = new ComponentMover();
cm.registerComponent(this);
cm.setDragInsets(dragInsets);
addComponentListener(new ComponentListener() {
@Override
public void componentShown(ComponentEvent e) {
// TODO Auto-generated method stub
}
@Override
public void componentResized(ComponentEvent e) {
// TODO Auto-generated method stub
}
@Override
public void componentMoved(ComponentEvent e) {
// TODO Auto-generated method stub
}
@Override
public void componentHidden(ComponentEvent e) {
if (capturing.get()) {
capturing.set(false);
try {
capture();
} catch (Exception ex) {
ex.printStackTrace();
}
} else if (recording.get()) {
recording.set(false);
record();
}
}
});
}
void setRecordButtonState() {
if (captureConfig.getEncoder()==null) {
recordButton.setEnabled(false);
recordButton.setToolTipText("Video format not selected. Use Options dialog to select video format.");
} else {
recordButton.setEnabled(true);
recordButton.setToolTipText("Record screen activity and audio");
}
}
public AbstractCaptureApplet getApplet() {
return applet;
}
protected void capture() throws Exception {
try {
Thread.sleep(200); // For Ubuntu.
} catch (InterruptedException ie) {
// Ignore
}
BufferedImage screenShot = captureConfig.createScreenShot(null, null).call().getRegions().get(0).getImage().getImage();
String prefix = getDatePrefix();
String defaultImageFormat = applet.getParameter("imageFormat");
if (defaultImageFormat==null || defaultImageFormat.trim().length()==0) {
defaultImageFormat = "PNG";
}
final String defaultFileExtension=defaultImageFormat.toLowerCase();
final String fileName = JOptionPane.showInputDialog(CaptureFrame.this, "Upload as", applet.getParameter("pageName")+"-capture-"+prefix+"-" + nextCounter() +"."+defaultFileExtension);
if (fileName!=null) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int idx = fileName.lastIndexOf('.');
String imageFormat = idx==-1 ? defaultImageFormat : fileName.substring(idx+1).toUpperCase();
ImageIO.write(screenShot, imageFormat, baos);
final byte[] imageBytes = baos.toByteArray();
System.out.println("Image size: "+imageBytes.length);
// Uploading
SwingWorker<Boolean, Long> task = new SwingWorker<Boolean, Long>() {
@Override
protected Boolean doInBackground() throws Exception {
System.out.println("Uploading in background");
try {
HttpResponse iResponse = applet.post(
CaptureFrame.this,
new ByteArrayInputStream(imageBytes),
imageBytes.length,
fileName,
"application/octet-stream");
System.out.println("Response status line: "+iResponse.getStatusLine());
if (iResponse.getStatusLine().getStatusCode()!=HttpStatus.SC_OK) {
errorMessage = iResponse.getStatusLine();
errorTitle = "Error saving image";
return false;
}
return true;
} catch (Error e) {
errorMessage=e.toString();
errorTitle = "Upload error";
e.printStackTrace();
return false;
}
}
private Object errorMessage;
private String errorTitle;
protected void done() {
try {
if (get()) {
JSObject window = JSObject.getWindow(applet);
String toEval = "insertAtCarret('"+applet.getParameter("edid")+"','{{:"+fileName+"|}}')";
System.out.println("Evaluating: "+toEval);
window.eval(toEval);
CaptureFrame.this.setVisible(false);
} else {
JOptionPane.showMessageDialog(
CaptureFrame.this,
errorMessage,
errorTitle,
JOptionPane.ERROR_MESSAGE);
}
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(
CaptureFrame.this,
e.toString(),
"Exception",
JOptionPane.ERROR_MESSAGE);
}
};
};
task.execute();
} catch (IOException ex) {
JOptionPane.showMessageDialog(
applet,
ex.toString(),
"Error saving image",
JOptionPane.ERROR_MESSAGE);
}
}
}
public int nextCounter() {
return counter++;
}
public String getDatePrefix() {
return dateFormat.format(new Date());
}
protected void record() {
try {
Thread.sleep(200); // For Ubuntu.
} catch (InterruptedException ie) {
// Ignore
}
int borderWidth = 1;
JFrame[] borderFrames = new JFrame[4];
Dimension dim = Toolkit.getDefaultToolkit().getScreenSize();
Rectangle rr = captureConfig.getRecordingRectangle();
Color borderColor = Color.RED;
if (rr.x>=borderWidth) {
// West border
borderFrames[0] = new JFrame();
borderFrames[0].setDefaultCloseOperation(DISPOSE_ON_CLOSE);
borderFrames[0].setSize(borderWidth, rr.height+borderWidth*2);
borderFrames[0].setLocation(rr.x-borderWidth, rr.y-borderWidth);
borderFrames[0].setUndecorated(true);
borderFrames[0].setAlwaysOnTop(true);
borderFrames[0].setFocusableWindowState(false);
borderFrames[0].getContentPane().setBackground(borderColor);
}
if (rr.x+rr.width<dim.width-borderWidth) {
// East border
borderFrames[1] = new JFrame();
borderFrames[1].setDefaultCloseOperation(DISPOSE_ON_CLOSE);
borderFrames[1].setSize(borderWidth, rr.height+borderWidth*2);
borderFrames[1].setLocation(rr.x+rr.width, rr.y-borderWidth);
borderFrames[1].setUndecorated(true);
borderFrames[1].setAlwaysOnTop(true);
borderFrames[1].setFocusableWindowState(false);
borderFrames[1].getContentPane().setBackground(borderColor);
}
if (rr.y>=borderWidth) {
// North border
borderFrames[2] = new JFrame();
borderFrames[2].setDefaultCloseOperation(DISPOSE_ON_CLOSE);
borderFrames[2].setSize(rr.width, borderWidth);
borderFrames[2].setLocation(rr.x, rr.y-borderWidth);
borderFrames[2].setUndecorated(true);
borderFrames[2].setAlwaysOnTop(true);
borderFrames[2].setFocusableWindowState(false);
borderFrames[2].getContentPane().setBackground(borderColor);
}
if (rr.y+rr.height<dim.height-borderWidth) {
// South border
borderFrames[3] = new JFrame();
borderFrames[3].setDefaultCloseOperation(DISPOSE_ON_CLOSE);
borderFrames[3].setSize(rr.width, borderWidth);
borderFrames[3].setLocation(rr.x, rr.y+rr.height);
borderFrames[3].setUndecorated(true);
borderFrames[3].setAlwaysOnTop(true);
borderFrames[3].setFocusableWindowState(false);
borderFrames[3].getContentPane().setBackground(borderColor);
}
RecordingControlsFrame inst = new RecordingControlsFrame(this, borderFrames);
int x = getLocation().x + getWidth() - inst.getWidth();
if (x+inst.getWidth()>dim.getWidth()) {
x = dim.width-inst.getWidth();
} else if (x<0) {
x = 0;
}
int y = rr.getLocation().y+getHeight()+1;
if (y+inst.getHeight()>dim.height) {
y = rr.getLocation().y-inst.getHeight();
if (y<0) {
y=dim.height-inst.getHeight();
}
}
inst.setLocation(x, y);
inst.setVisible(true);
}
private AtomicBoolean capturing = new AtomicBoolean(false);
private AtomicBoolean recording = new AtomicBoolean(false);
}

@ -1,606 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.List;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.Mixer;
import javax.sound.sampled.TargetDataLine;
import javax.swing.BorderFactory;
import javax.swing.ButtonGroup;
import javax.swing.ComboBoxModel;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JTabbedPane;
import javax.swing.JTextField;
import javax.swing.SwingConstants;
public class CaptureOptionsDialog extends javax.swing.JDialog {
private JRadioButton sampleSize16Button;
private JTextField timeLineScaleTextField;
private JCheckBox toobarCheckBox;
private JCheckBox videoBorderCheckBox;
private JTextField screenScaleTextField;
private JLabel timelineScalingLabel;
private JLabel screenScalingLabel;
private JTextField fpsTextField;
private JLabel fpsLabel;
private ButtonGroup sampleSizeButtonGroup;
private JComboBox<String> sampleRateComboBox;
private JRadioButton sampleSize8Button;
private JTextField inactivityIntervalTextField;
private JLabel inactivityIntervalLabel;
private JCheckBox inactivityCheckBox;
private JPanel inactivityPanel;
private JPanel scalingPanel;
private JCheckBox stereoCheckBox;
private JLabel sampleSizeLabel;
private JTabbedPane recordingSettingsPane;
private JCheckBox recordSoundCheckBox;
private JLabel sampleRateLabel;
private JComboBox<String> soundLineComboBox;
private JLabel soundSourceLabel;
private JPanel audioSettingsPanel;
private JPanel videoSettingsPanel;
private JButton cancelButton;
private JButton okButton;
private JPanel recordPanel;
private JComboBox<VideoEncoder> encodersComboBox;
private JTextField mp3Text;
public CaptureOptionsDialog(final CaptureFrame owner) {
super(owner);
setDefaultCloseOperation(DISPOSE_ON_CLOSE);
BorderLayout thisLayout = new BorderLayout();
this.setLayout(thisLayout);
this.setPreferredSize(new java.awt.Dimension(333, 186));
recordPanel = new JPanel();
this.add(recordPanel);
GridBagLayout recordPanelLayout = new GridBagLayout();
recordPanelLayout.rowWeights = new double[] { 0.1, 0.0, 0.0, 0.0 };
recordPanelLayout.rowHeights = new int[] { 7, 7, 20, 7 };
recordPanelLayout.columnWeights = new double[] { 0.1, 0.0, 0.0, 0.0, 0.0 };
recordPanelLayout.columnWidths = new int[] { 20, 7, 7, 7, 7 };
recordPanel.setLayout(recordPanelLayout);
recordPanel.setPreferredSize(new java.awt.Dimension(335, 297));
okButton = new JButton();
recordPanel.add(okButton, new GridBagConstraints(1, 2, 1, 1, 0.0,
0.0, GridBagConstraints.CENTER, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0));
okButton.setText("OK");
okButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
try {
if (recordSoundCheckBox.isSelected()) {
owner.getCaptureConfig().setAudioFormat(audioFormat);
owner.getCaptureConfig().setMixerName((String) soundLineComboBox.getSelectedItem());
} else {
owner.getCaptureConfig().setRemoveInactivity(inactivityCheckBox.isSelected());
if (owner.getCaptureConfig().isRemoveInactivity()) {
owner.getCaptureConfig().setInactivityInterval(Double.parseDouble(inactivityIntervalTextField.getText()));
}
}
owner.getCaptureConfig().setLoop(getLoopCheckBox().isSelected());
owner.getCaptureConfig().setPlay(getPlayCheckBox().isSelected());
owner.getCaptureConfig().setMouse(getMouseCheckBox().isSelected());
owner.getCaptureConfig().setSound(recordSoundCheckBox.isSelected());
owner.getCaptureConfig().setImageFormat(getImageFormatTextField().getText().trim());
owner.getCaptureConfig().setBorder(videoBorderCheckBox.isSelected());
owner.getCaptureConfig().setFramesPerSecond(Float.parseFloat(fpsTextField.getText()));
owner.getCaptureConfig().setScreenScale(Double.parseDouble(screenScaleTextField.getText()) / 100.0);
owner.getCaptureConfig().setSpeedScale((float) (Float.parseFloat(timeLineScaleTextField.getText()) / 100.0));
owner.getCaptureConfig().setToolBar(toobarCheckBox.isSelected());
owner.getApplet().storeConfig(owner.getCaptureConfig().store());
owner.getCaptureConfig().setMp3command(mp3Text.getText());
owner.getCaptureConfig().setEncoder((VideoEncoder) encodersComboBox.getSelectedItem());
owner.setRecordButtonState();
CaptureOptionsDialog.this.setVisible(false);
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(CaptureOptionsDialog.this,
e.toString(), "Error in configuration parameters",
JOptionPane.ERROR_MESSAGE);
}
}
});
cancelButton = new JButton();
recordPanel.add(cancelButton, new GridBagConstraints(3, 2, 1, 1, 0.0,
0.0, GridBagConstraints.CENTER, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0));
cancelButton.setText("Cancel");
cancelButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
CaptureOptionsDialog.this.setVisible(false);
}
});
recordingSettingsPane = new JTabbedPane();
recordPanel.add(recordingSettingsPane, new GridBagConstraints(0, 0, 5,
1, 0.0, 0.0, GridBagConstraints.CENTER,
GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0));
videoSettingsPanel = new JPanel();
GridBagLayout videoSettingsPanelLayout = new GridBagLayout();
recordingSettingsPane.addTab("Video", null, videoSettingsPanel, null);
videoSettingsPanel.setPreferredSize(new java.awt.Dimension(112, 207));
videoSettingsPanelLayout.rowWeights = new double[] { 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1 };
videoSettingsPanelLayout.rowHeights = new int[] { 7, 7, 7, 7, 7, 7, 7, 20 };
videoSettingsPanelLayout.columnWeights = new double[] { 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.1 };
videoSettingsPanelLayout.columnWidths = new int[] { 115, 7, 40, 7, 20,
7, 20, 7, 20 };
videoSettingsPanel.setLayout(videoSettingsPanelLayout);
scalingPanel = new JPanel();
GridBagLayout scalingPanelLayout = new GridBagLayout();
videoSettingsPanel.add(scalingPanel, new GridBagConstraints(0, 6, 1, 1,
0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH,
new Insets(0, 0, 0, 0), 0, 0));
scalingPanel.setBorder(BorderFactory.createTitledBorder("Scaling (%)"));
scalingPanelLayout.rowWeights = new double[] { 0.1, 0.0, 0.1 };
scalingPanelLayout.rowHeights = new int[] { 7, 7, 7 };
scalingPanelLayout.columnWeights = new double[] { 0.0, 0.0, 0.1 };
scalingPanelLayout.columnWidths = new int[] { 7, 7, 7 };
scalingPanel.setLayout(scalingPanelLayout);
scalingPanel.add(getScreenScalingLabel(), new GridBagConstraints(0, 0,
1, 1, 0.0, 0.0, GridBagConstraints.EAST,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
scalingPanel.add(getTimelineScalingLabel(), new GridBagConstraints(0,
2, 1, 1, 0.0, 0.0, GridBagConstraints.EAST,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
scalingPanel.add(getScreenScaleTextField(), new GridBagConstraints(2,
0, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER,
GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
scalingPanel.add(getTimeLineScaleTextField(), new GridBagConstraints(2,
2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER,
GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
inactivityPanel = new JPanel();
GridBagLayout inactivityPanelLayout = new GridBagLayout();
videoSettingsPanel.add(inactivityPanel, new GridBagConstraints(2, 6, 8,
1, 0.0, 0.0, GridBagConstraints.WEST,
GridBagConstraints.VERTICAL, new Insets(0, 0, 0, 0), 0, 0));
videoSettingsPanel.add(getFpsLabel(), new GridBagConstraints(0, 0, 1,
1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0));
videoSettingsPanel.add(getFpsTextField(), new GridBagConstraints(2, 0,
1, 1, 0.0, 0.0, GridBagConstraints.WEST,
GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
videoSettingsPanel.add(getImageFormatLabel(), new GridBagConstraints(0, 2, 1,
1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0));
videoSettingsPanel.add(getImageFormatTextField(), new GridBagConstraints(2, 2,
1, 1, 0.0, 0.0, GridBagConstraints.WEST,
GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
videoSettingsPanel.add(getEncoderLabel(), new GridBagConstraints(0, 4, 1,
1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0));
videoSettingsPanel.add(getEncoderComboBox(), new GridBagConstraints(2, 4,
6, 1, 0.0, 0.0, GridBagConstraints.WEST,
GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
videoSettingsPanel.add(getVideoBorderCheckBox(),
new GridBagConstraints(4, 0, 1, 1, 0.0, 0.0,
GridBagConstraints.WEST, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0));
videoSettingsPanel.add(getLoopCheckBox(),
new GridBagConstraints(6, 2, 1, 1, 0.0, 0.0,
GridBagConstraints.WEST, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0));
videoSettingsPanel.add(getPlayCheckBox(),
new GridBagConstraints(8, 2, 1, 1, 0.0, 0.0,
GridBagConstraints.WEST, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0));
videoSettingsPanel.add(getMouseCheckBox(),
new GridBagConstraints(4, 2, 1, 1, 0.0, 0.0,
GridBagConstraints.WEST, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0));
videoSettingsPanel.add(getJToobarCheckBox(), new GridBagConstraints(6,
0, 1, 1, 0.0, 0.0, GridBagConstraints.WEST,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
inactivityPanel.setBorder(BorderFactory
.createTitledBorder("Inactivity processing"));
inactivityPanel
.setToolTipText("Inactivity handling, enabled if audio is not being recorded.");
inactivityPanelLayout.rowWeights = new double[] { 0.0, 0.0, 0.0 };
inactivityPanelLayout.rowHeights = new int[] { 7, 7, 7 };
inactivityPanelLayout.columnWeights = new double[] { 0.0, 0.0, 0.0,
0.0, 0.1 };
inactivityPanelLayout.columnWidths = new int[] { 7, 7, 7, 47, 7 };
inactivityPanel.setLayout(inactivityPanelLayout);
inactivityPanel.setEnabled(false);
inactivityCheckBox = new JCheckBox();
inactivityPanel.add(inactivityCheckBox, new GridBagConstraints(1, 0, 4,
1, 0.0, 0.0, GridBagConstraints.WEST,
GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
inactivityCheckBox.setText("Remove inactivity");
inactivityCheckBox.setEnabled(false);
inactivityIntervalLabel = new JLabel();
inactivityPanel.add(inactivityIntervalLabel, new GridBagConstraints(1,
2, 1, 1, 0.0, 0.0, GridBagConstraints.EAST,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
inactivityIntervalLabel.setText("Inactivity interval (sec)");
inactivityIntervalLabel.setEnabled(false);
inactivityIntervalTextField = new JTextField();
inactivityPanel.add(inactivityIntervalTextField,
new GridBagConstraints(3, 2, 1, 1, 0.0, 0.0,
GridBagConstraints.CENTER,
GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0),
0, 0));
inactivityIntervalTextField.setText("0.7");
inactivityIntervalTextField.setEnabled(false);
audioSettingsPanel = new JPanel();
GridBagLayout audioSettingsPanelLayout = new GridBagLayout();
recordingSettingsPane.addTab("Audio", null, audioSettingsPanel, null);
audioSettingsPanelLayout.rowWeights = new double[] { 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1 };
audioSettingsPanelLayout.rowHeights = new int[] { 7, 7, 7, 7, 7, 7, 7, 7, 7,
20 };
audioSettingsPanelLayout.columnWeights = new double[] { 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.1 };
audioSettingsPanelLayout.columnWidths = new int[] { 7, 7, 49, 7, 135,
7, 20 };
audioSettingsPanel.setLayout(audioSettingsPanelLayout);
sampleSize16Button = new JRadioButton();
audioSettingsPanel.add(sampleSize16Button, new GridBagConstraints(4, 4,
1, 1, 0.0, 0.0, GridBagConstraints.WEST,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
sampleSize16Button.setText("16");
sampleSize16Button.setSelected(true);
getSampleSizeButtonGroup().add(sampleSize16Button);
sampleSize16Button.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
selectSoundSource();
}
});
recordSoundCheckBox = new JCheckBox();
audioSettingsPanel.add(recordSoundCheckBox, new GridBagConstraints(0,
0, 4, 1, 0.0, 0.0, GridBagConstraints.WEST,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
recordSoundCheckBox.setText("Record sound");
recordSoundCheckBox.setSelected(true);
recordSoundCheckBox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
onSetSound();
}
});
soundSourceLabel = new JLabel();
audioSettingsPanel.add(soundSourceLabel, new GridBagConstraints(0, 6,
1, 1, 0.0, 0.0, GridBagConstraints.EAST,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
soundSourceLabel.setText("Source");
soundLineComboBox = new JComboBox();
audioSettingsPanel.add(soundLineComboBox, new GridBagConstraints(2, 6,
3, 1, 0.0, 0.0, GridBagConstraints.CENTER,
GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
audioSettingsPanel.add(new JLabel("WAV2MP3 command"), new GridBagConstraints(0, 8,
1, 1, 0.0, 0.0, GridBagConstraints.EAST,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
mp3Text = new JTextField();
audioSettingsPanel.add(mp3Text, new GridBagConstraints(2, 8,
5, 1, 0.0, 0.0, GridBagConstraints.CENTER,
GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
sampleRateLabel = new JLabel();
audioSettingsPanel.add(sampleRateLabel, new GridBagConstraints(0, 2, 1,
1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0));
sampleRateLabel.setText("Sample rate (khz)");
sampleSizeLabel = new JLabel();
audioSettingsPanel.add(sampleSizeLabel, new GridBagConstraints(0, 4, 1,
1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0));
sampleSizeLabel.setText("Sample size (bits)");
stereoCheckBox = new JCheckBox();
audioSettingsPanel.add(stereoCheckBox, new GridBagConstraints(4, 2, 1,
1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0));
stereoCheckBox.setText("Stereo");
stereoCheckBox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
selectSoundSource();
}
});
sampleSize8Button = new JRadioButton();
audioSettingsPanel.add(sampleSize8Button, new GridBagConstraints(2, 4,
1, 1, 0.0, 0.0, GridBagConstraints.WEST,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
sampleSize8Button.setText("8");
getSampleSizeButtonGroup().add(sampleSize8Button);
sampleSize8Button.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
selectSoundSource();
}
});
ComboBoxModel<String> sampleRateComboBoxModel = new DefaultComboBoxModel<String>(
new String[] { "5.5", "11", "22", "44" });
sampleRateComboBox = new JComboBox<String>();
audioSettingsPanel.add(sampleRateComboBox, new GridBagConstraints(2, 2,
1, 1, 0.0, 0.0, GridBagConstraints.CENTER,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
sampleRateComboBox.setModel(sampleRateComboBoxModel);
sampleRateComboBox.setSelectedIndex(2);
sampleRateComboBox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
selectSoundSource();
}
});
getImageFormatTextField().setText(owner.getCaptureConfig().getImageFormat());
recordSoundCheckBox.setSelected(owner.getCaptureConfig().isSound());
stereoCheckBox.setSelected(owner.getCaptureConfig().getAudioFormat().getChannels()>1);
if (owner.getCaptureConfig().getAudioFormat().getSampleSizeInBits()==8) {
sampleSize8Button.setSelected(true);
} else {
sampleSize16Button.setSelected(true);
}
float sampleRate = owner.getCaptureConfig().getAudioFormat().getSampleRate();
float proximity = Math.abs(sampleRate-sampleRates[0]);
sampleRateComboBox.setSelectedIndex(0);
for (int i=1; i<sampleRates.length; ++i) {
float prx = Math.abs(sampleRate-sampleRates[i]);
if (prx<proximity) {
sampleRateComboBox.setSelectedIndex(i);
proximity = prx;
}
}
inactivityCheckBox.setSelected(owner.getCaptureConfig().isRemoveInactivity());
if (owner.getCaptureConfig().isRemoveInactivity()) {
inactivityIntervalTextField.setText(String.valueOf(owner.getCaptureConfig().getInactivityInterval()));
}
onSetSound();
videoBorderCheckBox.setSelected(owner.getCaptureConfig().isBorder());
getMouseCheckBox().setSelected(owner.getCaptureConfig().isMouse());
getLoopCheckBox().setSelected(owner.getCaptureConfig().isLoop());
getPlayCheckBox().setSelected(owner.getCaptureConfig().isPlay());
fpsTextField.setText(Float.toString(owner.getCaptureConfig().getFramesPerSecond()));
screenScaleTextField.setText(Long.toString(Math.round(owner.getCaptureConfig().getScreenScale()*100.0)));
timeLineScaleTextField.setText(Long.toString(Math.round(owner.getCaptureConfig().getSpeedScale()*100.0)));
toobarCheckBox.setSelected(owner.getCaptureConfig().isToolBar());
mp3Text.setText(owner.getCaptureConfig().getMp3command());
selectSoundSource();
soundLineComboBox.setSelectedItem(owner.getCaptureConfig().getMixerName());
timeLineScaleTextField.setEnabled(!recordSoundCheckBox.isSelected());
getEncoderComboBox().setSelectedItem(owner.getCaptureConfig().getEncoder());
JPanel aboutPanel = new JPanel();
aboutPanel.setLayout(new BorderLayout());
recordingSettingsPane.addTab("About", aboutPanel);
aboutPanel.add(new JLabel("jCapture", SwingConstants.CENTER), BorderLayout.NORTH);
aboutPanel.add(new JLabel("by Hammurapi Group (http://www.hammurapi.com)", SwingConstants.CENTER), BorderLayout.CENTER);
aboutPanel.add(new JLabel("Memory (available/max): "+AbstractCaptureApplet.formatByteSize(Runtime.getRuntime().freeMemory())+"/"+AbstractCaptureApplet.formatByteSize(Runtime.getRuntime().maxMemory()), SwingConstants.CENTER), BorderLayout.SOUTH);
setSize(400, 300);
setLocationRelativeTo(owner);
}
private AudioFormat audioFormat;
private float[] sampleRates = { 5512.0F, 11025.0F, 22050.0F, 44100.0F };
private DefaultComboBoxModel<String> soundLineComboBoxModel;
private JLabel imageFormatLabel;
private JTextField imageFormatTextField;
private JCheckBox mouseCheckBox;
private JCheckBox playCheckBox;
private JCheckBox loopCheckBox;
private JLabel encoderLabel;
private void selectSoundSource() {
audioFormat = new AudioFormat(
sampleRates[sampleRateComboBox.getSelectedIndex()],
sampleSize8Button.isSelected() ? 8 : 16,
stereoCheckBox.isSelected() ? 2 : 1, true, false);
String sourceName = (String) soundLineComboBox.getSelectedItem();
if (soundLineComboBoxModel == null) {
soundLineComboBoxModel = new DefaultComboBoxModel<String>();
soundLineComboBox.setModel(soundLineComboBoxModel);
} else {
soundLineComboBoxModel.removeAllElements();
}
DataLine.Info info = new DataLine.Info(TargetDataLine.class, audioFormat);
boolean hasSourceName = false;
for (Mixer.Info mi : AudioSystem.getMixerInfo()) {
Mixer mx = AudioSystem.getMixer(mi);
if (mx.isLineSupported(info)) {
soundLineComboBoxModel.addElement(mi.getName());
if (sourceName!=null && mi.getName().equals(sourceName)) {
hasSourceName = true;
}
}
}
if (hasSourceName) {
soundLineComboBoxModel.setSelectedItem(sourceName);
}
}
private ButtonGroup getSampleSizeButtonGroup() {
if (sampleSizeButtonGroup == null) {
sampleSizeButtonGroup = new ButtonGroup();
}
return sampleSizeButtonGroup;
}
private JLabel getFpsLabel() {
if (fpsLabel == null) {
fpsLabel = new JLabel();
fpsLabel.setText("Frames Per Second");
}
return fpsLabel;
}
private JTextField getFpsTextField() {
if (fpsTextField == null) {
fpsTextField = new JTextField();
fpsTextField.setText("10");
fpsTextField.setSize(30, 23);
}
return fpsTextField;
}
private JLabel getImageFormatLabel() {
if (imageFormatLabel == null) {
imageFormatLabel = new JLabel();
imageFormatLabel.setText("Image format");
}
return imageFormatLabel;
}
private JTextField getImageFormatTextField() {
if (imageFormatTextField == null) {
imageFormatTextField = new JTextField();
imageFormatTextField.setText("png");
imageFormatTextField.setSize(30, 23);
}
return imageFormatTextField;
}
private JLabel getEncoderLabel() {
if (encoderLabel == null) {
encoderLabel = new JLabel();
encoderLabel.setText("Video format");
}
return encoderLabel;
}
private JComboBox<VideoEncoder> getEncoderComboBox() {
if (encodersComboBox == null) {
List<VideoEncoder> el = ((CaptureFrame) getOwner()).getCaptureConfig().getEncoders();
encodersComboBox = new JComboBox<VideoEncoder>(el.toArray(new VideoEncoder[el.size()]));
// encodersComboBox.setSize(30, 23);
}
return encodersComboBox;
}
private JLabel getScreenScalingLabel() {
if (screenScalingLabel == null) {
screenScalingLabel = new JLabel();
screenScalingLabel.setText("Graphics");
}
return screenScalingLabel;
}
private JLabel getTimelineScalingLabel() {
if (timelineScalingLabel == null) {
timelineScalingLabel = new JLabel();
timelineScalingLabel.setText("Speed");
}
return timelineScalingLabel;
}
private JTextField getScreenScaleTextField() {
if (screenScaleTextField == null) {
screenScaleTextField = new JTextField();
screenScaleTextField.setText("100");
}
return screenScaleTextField;
}
private JTextField getTimeLineScaleTextField() {
if (timeLineScaleTextField == null) {
timeLineScaleTextField = new JTextField();
timeLineScaleTextField.setText("100");
}
return timeLineScaleTextField;
}
private JCheckBox getVideoBorderCheckBox() {
if (videoBorderCheckBox == null) {
videoBorderCheckBox = new JCheckBox();
videoBorderCheckBox.setText("Border");
}
return videoBorderCheckBox;
}
private JCheckBox getMouseCheckBox() {
if (mouseCheckBox == null) {
mouseCheckBox = new JCheckBox();
mouseCheckBox.setText("Mouse");
}
return mouseCheckBox;
}
private JCheckBox getLoopCheckBox() {
if (loopCheckBox == null) {
loopCheckBox = new JCheckBox();
loopCheckBox.setText("Loop");
}
return loopCheckBox;
}
private JCheckBox getPlayCheckBox() {
if (playCheckBox == null) {
playCheckBox = new JCheckBox();
playCheckBox.setText("Play");
}
return playCheckBox;
}
private JCheckBox getJToobarCheckBox() {
if (toobarCheckBox == null) {
toobarCheckBox = new JCheckBox();
toobarCheckBox.setText("Toolbar");
toobarCheckBox.setSelected(true);
}
return toobarCheckBox;
}
void onSetSound() {
for (Component child : recordSoundCheckBox.getParent().getComponents()) {
if (child != recordSoundCheckBox) {
child.setEnabled(recordSoundCheckBox.isSelected());
}
}
inactivityPanel.setEnabled(!recordSoundCheckBox.isSelected());
timeLineScaleTextField.setEnabled(!recordSoundCheckBox.isSelected());
for (Component child : inactivityPanel.getComponents()) {
child.setEnabled(!recordSoundCheckBox.isSelected());
}
}
}

@ -1,361 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.Component;
import java.awt.Cursor;
import java.awt.Dimension;
import java.awt.GraphicsEnvironment;
import java.awt.Insets;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Window;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import javax.swing.JComponent;
import javax.swing.SwingUtilities;
/**
* This class allows you to move a Component by using a mouse. The Component
* moved can be a high level Window (ie. Window, Frame, Dialog) in which case
* the Window is moved within the desktop. Or the Component can belong to a
* Container in which case the Component is moved within the Container.
*
* When moving a Window, the listener can be added to a child Component of the
* Window. In this case attempting to move the child will result in the Window
* moving. For example, you might create a custom "Title Bar" for an undecorated
* Window and moving of the Window is accomplished by moving the title bar only.
* Multiple components can be registered as "window movers".
*
* Components can be registered when the class is created. Additional components
* can be added at any time using the registerComponent() method.
*
* Taken from http://tips4java.wordpress.com/2009/06/14/moving-windows/
*/
public class ComponentMover extends MouseAdapter {
private Insets dragInsets = new Insets(0, 0, 0, 0);
private Dimension snapSize = new Dimension(1, 1);
private Insets edgeInsets = new Insets(0, 0, 0, 0);
private boolean changeCursor = true;
private boolean autoLayout = false;
private Class destinationClass;
private Component destinationComponent;
private Component destination;
private Component source;
private Point pressed;
private Point location;
private Cursor originalCursor;
private boolean autoscrolls;
private boolean potentialDrag;
/**
* Constructor for moving individual components. The components must be
* regisetered using the registerComponent() method.
*/
public ComponentMover() {
}
/**
* Constructor to specify a Class of Component that will be moved when drag
* events are generated on a registered child component. The events will be
* passed to the first ancestor of this specified class.
*
* @param destinationClass
* the Class of the ancestor component
* @param component
* the Components to be registered for forwarding drag events to
* the ancestor Component.
*/
public ComponentMover(Class destinationClass, Component... components) {
this.destinationClass = destinationClass;
registerComponent(components);
}
/**
* Constructor to specify a parent component that will be moved when drag
* events are generated on a registered child component.
*
* @param destinationComponent
* the component drage events should be forwareded to
* @param components
* the Components to be registered for forwarding drag events to
* the parent component to be moved
*/
public ComponentMover(Component destinationComponent,
Component... components) {
this.destinationComponent = destinationComponent;
registerComponent(components);
}
/**
* Get the auto layout property
*
* @return the auto layout property
*/
public boolean isAutoLayout() {
return autoLayout;
}
/**
* Set the auto layout property
*
* @param autoLayout
* when true layout will be invoked on the parent container
*/
public void setAutoLayout(boolean autoLayout) {
this.autoLayout = autoLayout;
}
/**
* Get the change cursor property
*
* @return the change cursor property
*/
public boolean isChangeCursor() {
return changeCursor;
}
/**
* Set the change cursor property
*
* @param changeCursor
* when true the cursor will be changed to the Cursor.MOVE_CURSOR
* while the mouse is pressed
*/
public void setChangeCursor(boolean changeCursor) {
this.changeCursor = changeCursor;
}
/**
* Get the drag insets
*
* @return the drag insets
*/
public Insets getDragInsets() {
return dragInsets;
}
/**
* Set the drag insets. The insets specify an area where mouseDragged events
* should be ignored and therefore the component will not be moved. This
* will prevent these events from being confused with a MouseMotionListener
* that supports component resizing.
*
* @param dragInsets
*/
public void setDragInsets(Insets dragInsets) {
this.dragInsets = dragInsets;
}
/**
* Get the bounds insets
*
* @return the bounds insets
*/
public Insets getEdgeInsets() {
return edgeInsets;
}
/**
* Set the edge insets. The insets specify how close to each edge of the
* parent component that the child component can be moved. Positive values
* means the component must be contained within the parent. Negative values
* means the component can be moved outside the parent.
*
* @param edgeInsets
*/
public void setEdgeInsets(Insets edgeInsets) {
this.edgeInsets = edgeInsets;
}
/**
* Remove listeners from the specified component
*
* @param component
* the component the listeners are removed from
*/
public void deregisterComponent(Component... components) {
for (Component component : components)
component.removeMouseListener(this);
}
/**
* Add the required listeners to the specified component
*
* @param component
* the component the listeners are added to
*/
public void registerComponent(Component... components) {
for (Component component : components)
component.addMouseListener(this);
}
/**
* Get the snap size
*
* @return the snap size
*/
public Dimension getSnapSize() {
return snapSize;
}
/**
* Set the snap size. Forces the component to be snapped to the closest grid
* position. Snapping will occur when the mouse is dragged half way.
*/
public void setSnapSize(Dimension snapSize) {
if (snapSize.width < 1 || snapSize.height < 1)
throw new IllegalArgumentException(
"Snap sizes must be greater than 0");
this.snapSize = snapSize;
}
/**
* Setup the variables used to control the moving of the component:
*
* source - the source component of the mouse event destination - the
* component that will ultimately be moved pressed - the Point where the
* mouse was pressed in the destination component coordinates.
*/
@Override
public void mousePressed(MouseEvent e) {
source = e.getComponent();
int width = source.getSize().width - dragInsets.left - dragInsets.right;
int height = source.getSize().height - dragInsets.top
- dragInsets.bottom;
Rectangle r = new Rectangle(dragInsets.left, dragInsets.top, width,
height);
if (r.contains(e.getPoint()))
setupForDragging(e);
}
private void setupForDragging(MouseEvent e) {
source.addMouseMotionListener(this);
potentialDrag = true;
// Determine the component that will ultimately be moved
if (destinationComponent != null) {
destination = destinationComponent;
} else if (destinationClass == null) {
destination = source;
} else // forward events to destination component
{
destination = SwingUtilities.getAncestorOfClass(destinationClass,
source);
}
pressed = e.getLocationOnScreen();
location = destination.getLocation();
if (changeCursor) {
originalCursor = source.getCursor();
source.setCursor(Cursor.getPredefinedCursor(Cursor.MOVE_CURSOR));
}
// Making sure autoscrolls is false will allow for smoother dragging of
// individual components
if (destination instanceof JComponent) {
JComponent jc = (JComponent) destination;
autoscrolls = jc.getAutoscrolls();
jc.setAutoscrolls(false);
}
}
/**
* Move the component to its new location. The dragged Point must be in the
* destination coordinates.
*/
@Override
public void mouseDragged(MouseEvent e) {
Point dragged = e.getLocationOnScreen();
int dragX = getDragDistance(dragged.x, pressed.x, snapSize.width);
int dragY = getDragDistance(dragged.y, pressed.y, snapSize.height);
int locationX = location.x + dragX;
int locationY = location.y + dragY;
// Mouse dragged events are not generated for every pixel the mouse
// is moved. Adjust the location to make sure we are still on a
// snap value.
// while (locationX < edgeInsets.left)
// locationX += snapSize.width;
//
// while (locationY < edgeInsets.top)
// locationY += snapSize.height;
//
// Dimension d = getBoundingSize(destination);
// while (locationX + destination.getSize().width + edgeInsets.right > d.width)
// locationX -= snapSize.width;
//
// while (locationY + destination.getSize().height + edgeInsets.bottom > d.height)
// locationY -= snapSize.height;
// Adjustments are finished, move the component
destination.setLocation(locationX, locationY);
}
/*
* Determine how far the mouse has moved from where dragging started (Assume
* drag direction is down and right for positive drag distance)
*/
private int getDragDistance(int larger, int smaller, int snapSize) {
int halfway = snapSize / 2;
int drag = larger - smaller;
drag += (drag < 0) ? -halfway : halfway;
drag = (drag / snapSize) * snapSize;
return drag;
}
/*
* Get the bounds of the parent of the dragged component.
*/
private Dimension getBoundingSize(Component source) {
if (source instanceof Window) {
GraphicsEnvironment env = GraphicsEnvironment
.getLocalGraphicsEnvironment();
Rectangle bounds = env.getMaximumWindowBounds();
return new Dimension(bounds.width, bounds.height);
} else {
return source.getParent().getSize();
}
}
/**
* Restore the original state of the Component
*/
@Override
public void mouseReleased(MouseEvent e) {
if (!potentialDrag)
return;
source.removeMouseMotionListener(this);
potentialDrag = false;
if (changeCursor)
source.setCursor(originalCursor);
if (destination instanceof JComponent) {
((JComponent) destination).setAutoscrolls(autoscrolls);
}
// Layout the components on the parent container
if (autoLayout) {
if (destination instanceof JComponent) {
((JComponent) destination).revalidate();
} else {
destination.validate();
}
}
}
}

@ -1,446 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.Component;
import java.awt.Cursor;
import java.awt.Dimension;
import java.awt.GraphicsEnvironment;
import java.awt.Insets;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Window;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.HashMap;
import java.util.Map;
import javax.swing.JComponent;
import javax.swing.SwingUtilities;
/**
* The ComponentResizer allows you to resize a component by dragging a border of
* the component.
*
* Taken from http://tips4java.wordpress.com/2009/09/13/resizing-components/
*/
public class ComponentResizer extends MouseAdapter {
private final static Dimension MINIMUM_SIZE = new Dimension(10, 10);
private final static Dimension MAXIMUM_SIZE = new Dimension(
Integer.MAX_VALUE, Integer.MAX_VALUE);
private static Map<Integer, Integer> cursors = new HashMap<Integer, Integer>();
{
cursors.put(1, Cursor.N_RESIZE_CURSOR);
cursors.put(2, Cursor.W_RESIZE_CURSOR);
cursors.put(4, Cursor.S_RESIZE_CURSOR);
cursors.put(8, Cursor.E_RESIZE_CURSOR);
cursors.put(3, Cursor.NW_RESIZE_CURSOR);
cursors.put(9, Cursor.NE_RESIZE_CURSOR);
cursors.put(6, Cursor.SW_RESIZE_CURSOR);
cursors.put(12, Cursor.SE_RESIZE_CURSOR);
}
private Insets dragInsets;
private Dimension snapSize;
private int direction;
protected static final int NORTH = 1;
protected static final int WEST = 2;
protected static final int SOUTH = 4;
protected static final int EAST = 8;
private Cursor sourceCursor;
private boolean resizing;
private Rectangle bounds;
private Point pressed;
private boolean autoscrolls;
private Dimension minimumSize = MINIMUM_SIZE;
private Dimension maximumSize = MAXIMUM_SIZE;
/**
* Convenience contructor. All borders are resizable in increments of a
* single pixel. Components must be registered separately.
*/
public ComponentResizer() {
this(new Insets(5, 5, 5, 5), new Dimension(1, 1));
}
/**
* Convenience contructor. All borders are resizable in increments of a
* single pixel. Components can be registered when the class is created or
* they can be registered separately afterwards.
*
* @param components
* components to be automatically registered
*/
public ComponentResizer(Component... components) {
this(new Insets(5, 5, 5, 5), new Dimension(1, 1), components);
}
/**
* Convenience contructor. Eligible borders are resisable in increments of a
* single pixel. Components can be registered when the class is created or
* they can be registered separately afterwards.
*
* @param dragInsets
* Insets specifying which borders are eligible to be resized.
* @param components
* components to be automatically registered
*/
public ComponentResizer(Insets dragInsets, Component... components) {
this(dragInsets, new Dimension(1, 1), components);
}
/**
* Create a ComponentResizer.
*
* @param dragInsets
* Insets specifying which borders are eligible to be resized.
* @param snapSize
* Specify the dimension to which the border will snap to when
* being dragged. Snapping occurs at the halfway mark.
* @param components
* components to be automatically registered
*/
public ComponentResizer(Insets dragInsets, Dimension snapSize,
Component... components) {
setDragInsets(dragInsets);
setSnapSize(snapSize);
registerComponent(components);
}
/**
* Get the drag insets
*
* @return the drag insets
*/
public Insets getDragInsets() {
return dragInsets;
}
/**
* Set the drag dragInsets. The insets specify an area where mouseDragged
* events are recognized from the edge of the border inwards. A value of 0
* for any size will imply that the border is not resizable. Otherwise the
* appropriate drag cursor will appear when the mouse is inside the
* resizable border area.
*
* @param dragInsets
* Insets to control which borders are resizeable.
*/
public void setDragInsets(Insets dragInsets) {
validateMinimumAndInsets(minimumSize, dragInsets);
this.dragInsets = dragInsets;
}
/**
* Get the components maximum size.
*
* @return the maximum size
*/
public Dimension getMaximumSize() {
return maximumSize;
}
/**
* Specify the maximum size for the component. The component will still be
* constrained by the size of its parent.
*
* @param maximumSize
* the maximum size for a component.
*/
public void setMaximumSize(Dimension maximumSize) {
this.maximumSize = maximumSize;
}
/**
* Get the components minimum size.
*
* @return the minimum size
*/
public Dimension getMinimumSize() {
return minimumSize;
}
/**
* Specify the minimum size for the component. The minimum size is
* constrained by the drag insets.
*
* @param minimumSize
* the minimum size for a component.
*/
public void setMinimumSize(Dimension minimumSize) {
validateMinimumAndInsets(minimumSize, dragInsets);
this.minimumSize = minimumSize;
}
/**
* Remove listeners from the specified component
*
* @param component
* the component the listeners are removed from
*/
public void deregisterComponent(Component... components) {
for (Component component : components) {
component.removeMouseListener(this);
component.removeMouseMotionListener(this);
}
}
/**
* Add the required listeners to the specified component
*
* @param component
* the component the listeners are added to
*/
public void registerComponent(Component... components) {
for (Component component : components) {
component.addMouseListener(this);
component.addMouseMotionListener(this);
}
}
/**
* Get the snap size.
*
* @return the snap size.
*/
public Dimension getSnapSize() {
return snapSize;
}
/**
* Control how many pixels a border must be dragged before the size of the
* component is changed. The border will snap to the size once dragging has
* passed the halfway mark.
*
* @param snapSize
* Dimension object allows you to separately spcify a horizontal
* and vertical snap size.
*/
public void setSnapSize(Dimension snapSize) {
this.snapSize = snapSize;
}
/**
* When the components minimum size is less than the drag insets then we
* can't determine which border should be resized so we need to prevent this
* from happening.
*/
private void validateMinimumAndInsets(Dimension minimum, Insets drag) {
int minimumWidth = drag.left + drag.right;
int minimumHeight = drag.top + drag.bottom;
if (minimum.width < minimumWidth || minimum.height < minimumHeight) {
String message = "Minimum size cannot be less than drag insets";
throw new IllegalArgumentException(message);
}
}
/**
*/
@Override
public void mouseMoved(MouseEvent e) {
Component source = e.getComponent();
Point location = e.getPoint();
direction = 0;
if (location.x < dragInsets.left)
direction += WEST;
if (location.x > source.getWidth() - dragInsets.right - 1)
direction += EAST;
if (location.y < dragInsets.top)
direction += NORTH;
if (location.y > source.getHeight() - dragInsets.bottom - 1)
direction += SOUTH;
// Mouse is no longer over a resizable border
if (direction == 0) {
source.setCursor(sourceCursor);
} else // use the appropriate resizable cursor
{
int cursorType = cursors.get(direction);
Cursor cursor = Cursor.getPredefinedCursor(cursorType);
source.setCursor(cursor);
}
}
@Override
public void mouseEntered(MouseEvent e) {
if (!resizing) {
Component source = e.getComponent();
sourceCursor = source.getCursor();
}
}
@Override
public void mouseExited(MouseEvent e) {
if (!resizing) {
Component source = e.getComponent();
source.setCursor(sourceCursor);
}
}
@Override
public void mousePressed(MouseEvent e) {
// The mouseMoved event continually updates this variable
if (direction == 0)
return;
// Setup for resizing. All future dragging calculations are done based
// on the original bounds of the component and mouse pressed location.
resizing = true;
Component source = e.getComponent();
pressed = e.getPoint();
SwingUtilities.convertPointToScreen(pressed, source);
bounds = source.getBounds();
// Making sure autoscrolls is false will allow for smoother resizing
// of components
if (source instanceof JComponent) {
JComponent jc = (JComponent) source;
autoscrolls = jc.getAutoscrolls();
jc.setAutoscrolls(false);
}
}
/**
* Restore the original state of the Component
*/
@Override
public void mouseReleased(MouseEvent e) {
resizing = false;
Component source = e.getComponent();
source.setCursor(sourceCursor);
if (source instanceof JComponent) {
((JComponent) source).setAutoscrolls(autoscrolls);
}
}
/**
* Resize the component ensuring location and size is within the bounds of
* the parent container and that the size is within the minimum and maximum
* constraints.
*
* All calculations are done using the bounds of the component when the
* resizing started.
*/
@Override
public void mouseDragged(MouseEvent e) {
if (resizing == false)
return;
Component source = e.getComponent();
Point dragged = e.getPoint();
SwingUtilities.convertPointToScreen(dragged, source);
changeBounds(source, direction, bounds, pressed, dragged);
}
protected void changeBounds(Component source, int direction,
Rectangle bounds, Point pressed, Point current) {
// Start with original locaton and size
int x = bounds.x;
int y = bounds.y;
int width = bounds.width;
int height = bounds.height;
// Resizing the West or North border affects the size and location
if (WEST == (direction & WEST)) {
int drag = getDragDistance(pressed.x, current.x, snapSize.width);
int maximum = Math.min(width + x, maximumSize.width);
drag = getDragBounded(drag, snapSize.width, width,
minimumSize.width, maximum);
x -= drag;
width += drag;
}
if (NORTH == (direction & NORTH)) {
int drag = getDragDistance(pressed.y, current.y, snapSize.height);
int maximum = Math.min(height + y, maximumSize.height);
drag = getDragBounded(drag, snapSize.height, height,
minimumSize.height, maximum);
y -= drag;
height += drag;
}
// Resizing the East or South border only affects the size
if (EAST == (direction & EAST)) {
int drag = getDragDistance(current.x, pressed.x, snapSize.width);
Dimension boundingSize = getBoundingSize(source);
int maximum = Math.min(boundingSize.width - x, maximumSize.width);
drag = getDragBounded(drag, snapSize.width, width,
minimumSize.width, maximum);
width += drag;
}
if (SOUTH == (direction & SOUTH)) {
int drag = getDragDistance(current.y, pressed.y, snapSize.height);
Dimension boundingSize = getBoundingSize(source);
int maximum = Math.min(boundingSize.height - y, maximumSize.height);
drag = getDragBounded(drag, snapSize.height, height,
minimumSize.height, maximum);
height += drag;
}
source.setBounds(x, y, width, height);
source.validate();
}
/*
* Determine how far the mouse has moved from where dragging started
*/
private int getDragDistance(int larger, int smaller, int snapSize) {
int halfway = snapSize / 2;
int drag = larger - smaller;
drag += (drag < 0) ? -halfway : halfway;
drag = (drag / snapSize) * snapSize;
return drag;
}
/*
* Adjust the drag value to be within the minimum and maximum range.
*/
private int getDragBounded(int drag, int snapSize, int dimension,
int minimum, int maximum) {
while (dimension + drag < minimum)
drag += snapSize;
while (dimension + drag > maximum)
drag -= snapSize;
return drag;
}
/*
* Keep the size of the component within the bounds of its parent.
*/
private Dimension getBoundingSize(Component source) {
if (source instanceof Window) {
GraphicsEnvironment env = GraphicsEnvironment
.getLocalGraphicsEnvironment();
Rectangle bounds = env.getMaximumWindowBounds();
return new Dimension(bounds.width, bounds.height);
} else {
return source.getParent().getSize();
}
}
}

@ -1,28 +0,0 @@
package com.hammurapi.jcapture;
import java.io.File;
import java.util.List;
import com.hammurapi.jcapture.VideoEncoder.Fragment;
class FragmentImpl implements Fragment {
private File audio;
private List<Frame> frames;
FragmentImpl( List<Frame> frames, File audio) {
this.audio = audio;
this.frames = frames;
}
@Override
public List<Frame> getFrames() {
return frames;
}
@Override
public File getAudio() {
return audio;
}
}

@ -1,57 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.Dimension;
import java.awt.Point;
import java.util.List;
import com.hammurapi.jcapture.VideoEncoder.Fragment.Frame;
class FrameImpl implements Frame {
private List<Shape> shapes;
private Point mousePointer;
private Dimension size;
private boolean isActive;
FrameImpl(List<Shape> shapes, Point mousePointer, Dimension size, boolean isActive) {
super();
this.shapes = shapes;
this.mousePointer = mousePointer;
this.size = size;
this.isActive = isActive;
}
/**
* Merges frame before this frame into this frame by incorporating its shapes.
* This method is used for merging deleted frames.
* @param frame
*/
void merge(Frame frame) {
for (Shape shape: shapes) {
if (shape.getContent().coversEverything()) {
return; // No need in previous shapes.
}
}
shapes.addAll(0, frame.getShapes());
}
@Override
public List<Shape> getShapes() {
return shapes;
}
@Override
public Point getMousePointer() {
return mousePointer;
}
@Override
public Dimension getSize() {
return size;
}
@Override
public boolean isActive() {
return isActive;
}
}

@ -1,21 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.Frame;
import java.awt.GraphicsDevice;
import java.awt.GraphicsEnvironment;
import java.awt.GraphicsDevice.WindowTranslucency;
public class GraphicsDeviceTranslucener extends Translucener {
@Override
protected void makeTranslucent(Frame frame) {
GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment();
GraphicsDevice gd = ge.getDefaultScreenDevice();
//If translucent windows aren't supported, exit.
if (gd.isWindowTranslucencySupported(WindowTranslucency.TRANSLUCENT)) {
frame.setOpacity(0.7f);
}
}
}

@ -1,80 +0,0 @@
/*
* This file is an adapted example from javareference.com
* for more information visit,
* http://www.javareference.com
*/
package com.hammurapi.jcapture;
import java.awt.Dimension;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.Image;
import javax.swing.JComponent;
import javax.swing.JToolTip;
import javax.swing.SwingUtilities;
import javax.swing.plaf.metal.MetalToolTipUI;
/**
* This class extends JToolTip and set the UI to ImageToolTipUI.
*
* @author Rahul Sapkal(rahul@javareference.com)
*/
public class ImageToolTip extends JToolTip {
/**
* This class extends MetalToolTipUI and provides customizes it to draw a
* given image on it.
*
* @author Rahul Sapkal(rahul@javareference.com)
*/
private class ImageToolTipUI extends MetalToolTipUI {
private Image m_image;
public ImageToolTipUI(Image image) {
m_image = image;
}
/**
* This method is overriden from the MetalToolTipUI to draw the given
* image and text
*/
public void paint(Graphics g, JComponent c) {
FontMetrics metrics = c.getFontMetrics(g.getFont());
g.setColor(c.getForeground());
g.drawString(((ImageToolTip) c).text, 3, 15);
g.drawImage(m_image, 3, metrics.getHeight() + 3, c);
}
/**
* This method is overriden from the MetalToolTipUI to return the
* appropiate preferred size to size the ToolTip to show both the text
* and image.
*/
public Dimension getPreferredSize(JComponent c) {
FontMetrics metrics = c.getFontMetrics(c.getFont());
String tipText = ((JToolTip) c).getTipText();
if (tipText == null) {
tipText = "";
}
int width = SwingUtilities.computeStringWidth(metrics, tipText);
int height = metrics.getHeight() + m_image.getHeight(c) + 6;
if (width < m_image.getWidth(c)) {
width = m_image.getWidth(c);
}
return new Dimension(width, height);
}
}
private String text;
public ImageToolTip(String text, Image image) {
this.text = text;
setUI(new ImageToolTipUI(image));
}
}

@ -1,92 +0,0 @@
package com.hammurapi.jcapture;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Hex;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.mime.MultipartEntity;
import org.apache.http.entity.mime.content.InputStreamBody;
import org.apache.http.entity.mime.content.StringBody;
public class JCaptureApplet extends AbstractCaptureApplet {
private static final String HTTPS_PREFIX = "https://";
protected HttpUriRequest createRequest(String fileName, InputStreamBody bin) throws Exception {
String uploadUrl = getParameter("uploadUrl");
if (uploadUrl==null || uploadUrl.trim().length()==0) {
String host = getParameter("host");
String dokuHost = host;
if (dokuHost.toLowerCase().startsWith(HTTPS_PREFIX)) {
if (dokuHost.lastIndexOf(":")<HTTPS_PREFIX.length()) { // No port number
dokuHost+=":443";
}
} else if (dokuHost.endsWith(":80")) {
dokuHost = dokuHost.substring(0, dokuHost.length()-3);
}
System.out.println("DokuHost: "+dokuHost);
String dokuBase = getDokuBase();
System.out.println("DokuBase: "+dokuBase);
StringBuilder uploadUrlBuilder = new StringBuilder(dokuHost);
if (dokuBase.startsWith(host)) {
dokuBase = dokuBase.substring(host.length());
}
uploadUrlBuilder.append(dokuBase);
uploadUrlBuilder.append("lib/exe/mediamanager.php");
uploadUrl = uploadUrlBuilder.toString();
}
System.out.println("Uploading to "+uploadUrl);
HttpPost httppost = new HttpPost(uploadUrl);
if (!httppost.containsHeader("Cookie")) {
httppost.setHeader("Cookie", getCookies());
}
httppost.setHeader("Pragma", "No-cache");
MultipartEntity reqEntity = new MultipartEntity();
String sectok = getParameter("sectok");
if (sectok!=null && sectok.trim().length()>0) {
reqEntity.addPart("sectok", new StringBody(sectok));
}
reqEntity.addPart("ow", new StringBody("1"));
String opaque = getParameter("opaque");
if (opaque!=null && opaque.trim().length()>0) {
reqEntity.addPart("opaque", new StringBody(opaque));
}
reqEntity.addPart("Filename", new StringBody(fileName));
int nsIdx = fileName.lastIndexOf(":");
String namespace;
if (nsIdx==-1) {
namespace = ":";
} else {
namespace = ":"+fileName.substring(0, nsIdx);
fileName = fileName.substring(nsIdx+1);
}
if (namespace!=null) {
reqEntity.addPart("ns", new StringBody(namespace));
}
reqEntity.addPart("Filedata", bin);
httppost.setEntity(reqEntity);
return httppost;
}
String getDokuBase() throws DecoderException {
return new String(Hex.decodeHex(getParameter("dokuBase").toCharArray()));
}
@Override
protected String bodyName(String fileName) {
return fileName.substring(fileName.lastIndexOf(":")+1);
}
}

@ -1,121 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.ref.Reference;
import java.lang.ref.SoftReference;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.FileChannel.MapMode;
import java.util.zip.Adler32;
import javax.imageio.ImageIO;
/**
* Mapped image is softly kept in memory and also is written to a temporary file.
* If image reference is cleared by the garbage collector, the image is loaded from the file on demand.
* @author Pavel
*
*/
public class MappedImage {
private Reference<BufferedImage> imageRef;
private Reference<byte[]> imageBytesRef;
private MappedByteBuffer buffer;
private int height;
private int width;
private String format;
private long checksum;
private int bytesLength;
public MappedImage(final BufferedImage image, String format, FileChannel channel) throws IOException {
if (format==null) {
throw new NullPointerException("Format is null");
}
class HardReference extends SoftReference<BufferedImage> {
HardReference(BufferedImage referent) {
super(referent);
}
@Override
public BufferedImage get() {
return image;
}
}
imageRef = channel==null ? new HardReference(image) : new SoftReference<BufferedImage>(image);
width = image.getWidth();
height = image.getHeight();
this.format = format;
if (channel!=null) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write(imageRef.get(), format, baos);
baos.close();
byte[] imageBytes = baos.toByteArray();
Adler32 adler = new Adler32();
adler.update(imageBytes);
checksum = adler.getValue();
bytesLength = imageBytes.length;
imageBytesRef = new SoftReference<byte[]>(imageBytes);
synchronized (channel) {
long position = channel.position();
channel.write(ByteBuffer.wrap(imageBytes));
buffer = channel.map(MapMode.READ_ONLY, position, imageBytes.length);
}
}
}
public byte[] getImageBytes() throws IOException {
if (imageBytesRef==null) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write(imageRef.get(), format, baos);
return baos.toByteArray();
}
byte[] ret = imageBytesRef.get();
if (ret==null) {
buffer.load();
buffer.rewind();
ret = new byte[buffer.remaining()];
buffer.get(ret);
if (bytesLength != ret.length) {
throw new IllegalStateException("Invalid image bytes length, expected "+bytesLength+", got "+ret.length);
}
Adler32 adler = new Adler32();
adler.update(ret);
if (checksum != adler.getValue()) {
throw new IllegalStateException("Invalid image bytes checksum");
}
imageBytesRef = new SoftReference<byte[]>(ret);
}
return ret;
}
/**
* Reads from reference, if reference was cleared, loads from the mapped buffer.
* @return
* @throws IOException
*/
public BufferedImage getImage() throws IOException {
BufferedImage ret = imageRef.get();
if (ret==null) {
ret = ImageIO.read(new ByteArrayInputStream(getImageBytes()));
imageRef = new SoftReference<BufferedImage>(ret);
}
return ret;
}
public int getHeight() {
return height;
}
public int getWidth() {
return width;
}
}

@ -1,55 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.Dimension;
import java.io.Closeable;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.List;
public class Movie implements Closeable {
private float framesPerSecond;
private List<VideoEncoder.Fragment> fragments;
private Dimension frameDimension;
private Closeable imagesFileCloseable;
public Movie(Dimension frameDimension, float framesPerSecond, List<VideoEncoder.Fragment> fragments, Closeable imagesFileCloseable) {
super();
this.frameDimension = frameDimension;
this.framesPerSecond = framesPerSecond;
this.fragments = fragments;
this.imagesFileCloseable = imagesFileCloseable;
}
public List<VideoEncoder.Fragment> getFragments() {
return fragments;
}
public float getFramesPerSecond() {
return framesPerSecond;
}
public Dimension getFrameDimension() {
return frameDimension;
}
@Override
public String toString() {
int frames = 0;
for (VideoEncoder.Fragment f: fragments) {
frames+=f.getFrames().size();
}
long length = (long) (frames/framesPerSecond);
return MessageFormat.format("{0,number,00}:{1,number,00}:{2,number,00}, {3} frames", length/3600, (length/60) % 60, length % 60, frames);
}
@Override
public void close() throws IOException {
if (imagesFileCloseable!=null) {
imagesFileCloseable.close();
}
}
}

@ -1,321 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ComponentEvent;
import java.awt.event.ComponentListener;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.SwingWorker;
import javax.swing.WindowConstants;
import javax.swing.border.LineBorder;
import netscape.javascript.JSObject;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
public class RecordingControlsFrame extends javax.swing.JFrame {
private static final String RESUME_TEXT = "Resume";
private static final String PAUSE_TEXT = "Pause";
private JButton pauseButton;
private JButton cancelButton;
private JButton stopButton;
private ScreenRecorder screenRecorder;
private CaptureFrame captureFrame;
public RecordingControlsFrame(final CaptureFrame captureFrame, final JFrame[] borderFrames) {
super("jCapture recording");
setIconImage(captureFrame.getIconImage());
this.captureFrame = captureFrame;
setUndecorated(true);
setAlwaysOnTop(!getBounds().intersects(captureFrame.getBounds()));
addComponentListener(new ComponentListener() {
@Override
public void componentShown(ComponentEvent e) {
for (JFrame bf: borderFrames) {
if (bf!=null) {
bf.setVisible(true);
}
}
}
@Override
public void componentResized(ComponentEvent e) {
// TODO Auto-generated method stub
}
@Override
public void componentMoved(ComponentEvent e) {
// TODO Auto-generated method stub
}
@Override
public void componentHidden(ComponentEvent e) {
for (JFrame bf: borderFrames) {
if (bf!=null) {
bf.setVisible(false);
}
}
}
});
JPanel contentPanel = new JPanel();
contentPanel.setBorder(new LineBorder(new java.awt.Color(0, 0, 0), 1, false));
getContentPane().add(contentPanel, BorderLayout.CENTER);
GridBagLayout thisLayout = new GridBagLayout();
setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
thisLayout.rowWeights = new double[] { 0.0, 0.1, 0.0 };
thisLayout.rowHeights = new int[] { 7, 7, 7 };
thisLayout.columnWeights = new double[] { 0.0, 0.1, 0.0, 0.1, 0.0, 0.1, 0.0 };
thisLayout.columnWidths = new int[] { 7, 20, 7, 20, 7, 7, 7 };
contentPanel.setLayout(thisLayout);
pauseButton = new JButton();
contentPanel.add(pauseButton, new GridBagConstraints(1, 1, 1, 1, 0.0,
0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH,
new Insets(0, 0, 0, 0), 0, 0));
pauseButton.setText(PAUSE_TEXT);
pauseButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
try {
if (PAUSE_TEXT.equals(pauseButton.getText())) {
screenRecorder.stop();
pauseButton.setText(RESUME_TEXT);
} else {
screenRecorder.start();
pauseButton.setText(PAUSE_TEXT);
}
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(RecordingControlsFrame.this,
e.toString(), "Error pausing/resuming recording",
JOptionPane.ERROR_MESSAGE);
}
}
});
stopButton = new JButton();
contentPanel.add(stopButton, new GridBagConstraints(3, 1, 1, 1, 0.0,
0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH,
new Insets(0, 0, 0, 0), 0, 0));
stopButton.setText("Stop");
stopButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
pauseButton.setEnabled(false);
stopButton.setEnabled(false);
cancelButton.setEnabled(false);
SwingWorker<Movie, Long> task = new SwingWorker<Movie, Long>() {
@Override
protected Movie doInBackground() throws Exception {
screenRecorder.stop();
return screenRecorder.getMovie();
}
@Override
protected void done() {
try {
final Movie movie = get();
if (movie!=null) {
if (JOptionPane.showConfirmDialog(RecordingControlsFrame.this, "Would you like to edit the movie before uploading?", "Edit movie?", JOptionPane.YES_NO_OPTION)==JOptionPane.YES_OPTION) {
new MovieEditorDialog(
RecordingControlsFrame.this,
movie,
captureFrame.getCaptureConfig().getBackgroundProcessor(),
captureFrame.getCaptureConfig().getInactivityInterval(),
captureFrame.getCaptureConfig().getImageFormat());
} else {
uploadMovie(movie);
}
} else {
JOptionPane.showMessageDialog(
RecordingControlsFrame.this,
"Recording discarded",
"Saving recording",
JOptionPane.INFORMATION_MESSAGE);
RecordingControlsFrame.this.setVisible(false);
}
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(
RecordingControlsFrame.this, e.toString(),
"Error saving recording",
JOptionPane.ERROR_MESSAGE);
RecordingControlsFrame.this.setVisible(false);
}
}
};
task.execute();
}
});
cancelButton = new JButton();
contentPanel.add(cancelButton, new GridBagConstraints(5, 1, 1, 1, 0.0,
0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH,
new Insets(0, 0, 0, 0), 0, 0));
cancelButton.setText("Cancel");
cancelButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
if (JOptionPane.showConfirmDialog(RecordingControlsFrame.this, "Are you sure you want to discard the recording?", "Confirm discarding movie", JOptionPane.YES_NO_OPTION)==JOptionPane.YES_OPTION) {;
try {
screenRecorder.stop();
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(RecordingControlsFrame.this,
e.toString(), "Error cancelling recording",
JOptionPane.ERROR_MESSAGE);
} finally {
RecordingControlsFrame.this.setVisible(false);
captureFrame.setVisible(true);
}
}
}
});
pack();
this.setSize(301, 40);
captureFrame.getCaptureConfig().setParentComponent(this);
try {
screenRecorder = new ScreenRecorder(captureFrame.getCaptureConfig(), captureFrame.getApplet());
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(this, e.toString(), "Error starting recording", JOptionPane.ERROR_MESSAGE);
setVisible(false);
}
}
/**
* Asks for file name and uploads the movie.
* @param movie
*/
void uploadMovie(final Movie movie) {
try {
if (movie!=null) {
final String fileName = JOptionPane.showInputDialog(
RecordingControlsFrame.this,
"Upload as",
captureFrame.getApplet().getParameter("pageName")+
"-recording-"+
captureFrame.getDatePrefix()+
"-" + captureFrame.nextCounter() +"."+captureFrame.getCaptureConfig().getEncoder().getFileExtension());
if (fileName!=null) {
// Uploading
SwingWorker<Dimension, Long> task = new SwingWorker<Dimension, Long>() {
@Override
protected Dimension doInBackground() throws Exception {
File savedTo = null;
try {
// encode and upload
File tmpFile = File.createTempFile("jCaptureMovie", "."+captureFrame.getCaptureConfig().getEncoder().getFileExtension());
FileOutputStream out = new FileOutputStream(tmpFile);
Dimension dimension = captureFrame.getCaptureConfig().getEncoder().encode(captureFrame.getCaptureConfig(), movie, out);
if (dimension==null) {
return null;
}
out.close();
savedTo = tmpFile;
HttpResponse iResponse = captureFrame.getApplet().post(
RecordingControlsFrame.this,
new FileInputStream(tmpFile),
tmpFile.length(),
fileName,
"application/octet-stream");
if (iResponse!=null) {
System.out.println("Response status line: "+iResponse.getStatusLine());
if (iResponse.getStatusLine().getStatusCode()!=HttpStatus.SC_OK) {
errorMessage = iResponse.getStatusLine();
errorTitle = "Error saving recording";
return null;
}
}
if (!tmpFile.delete()) {
tmpFile.deleteOnExit();
}
return dimension;
} catch (Error e) {
errorMessage=e.toString();
if (savedTo!=null) {
errorMessage=errorMessage + ",\n recording was saved to "+savedTo.getAbsolutePath();
}
errorTitle = "Upload error";
e.printStackTrace();
return null;
}
}
private Object errorMessage;
private String errorTitle;
protected void done() {
try {
Dimension dimension = get();
if (dimension!=null) {
JSObject window = JSObject.getWindow(captureFrame.getApplet());
String toEval = "insertAtCarret('"+captureFrame.getApplet().getParameter("edid")+"','{{:"+fileName+"?"+dimension.width+"x"+dimension.height+"|}}')";
System.out.println("Evaluating: "+toEval);
window.eval(toEval);
} else {
JOptionPane.showMessageDialog(
RecordingControlsFrame.this,
errorMessage,
errorTitle,
JOptionPane.ERROR_MESSAGE);
}
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(
RecordingControlsFrame.this,
e.toString(),
"Exception",
JOptionPane.ERROR_MESSAGE);
}
};
};
task.execute();
}
} else {
JOptionPane.showMessageDialog(
RecordingControlsFrame.this,
"Recording discarded",
"Saving recording",
JOptionPane.INFORMATION_MESSAGE);
}
} finally {
RecordingControlsFrame.this.setVisible(false);
}
}
}

@ -1,221 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.Color;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.nio.channels.FileChannel;
public class Region extends Rectangle {
private static final int TRANSPARENT_COLOR = new Color(0,0,0,0).getRGB();
private BufferedImage master;
private BufferedImage prev;
private int grabRange;
private MappedImage image;
private boolean transparency;
public Region(BufferedImage master, String format, FileChannel channel, BufferedImage prev, boolean transparency, int x, int y, int grabRange) {
this.master = master;
this.format = format;
this.channel = channel;
this.prev = prev;
this.transparency = transparency;
this.grabRange = grabRange;
setBounds(x-grabRange, y-grabRange, grabRange*2+1, grabRange*2+1);
}
/**
* Special case when region covers the whole image.
* @param master
* @param x
* @param y
* @param grabRange
* @throws IOException
*/
public Region(MappedImage master) throws IOException {
this.image = master;
this.grabRange = 0;
imageLocation = new Point(0,0);
coversEverything = true;
setBounds(0,0,master.getWidth(),master.getHeight());
BufferedImage img = master.getImage();
for (int sx=0, sw=master.getWidth(); sx<sw; ++sx) {
for (int sy=0, sh=master.getHeight(); sy<sh; ++sy) {
imageHash^=img.getRGB(sx, sy);
Long.rotateRight(imageHash, 1);
}
}
}
private Point imageLocation;
private String format;
private FileChannel channel;
public void grabImage() throws IOException {
if (image==null) {
imageLocation = new Point(Math.max(0, x), Math.max(0, y));
int imageWidth = width;
int widthDelta = imageWidth+imageLocation.x - master.getWidth();
if (widthDelta>0) {
imageWidth-=widthDelta;
}
int imageHeight = height;
int heightDelta = imageHeight+imageLocation.y - master.getHeight();
if (heightDelta>0) {
imageHeight-=heightDelta;
}
BufferedImage bImage = new BufferedImage(imageWidth, imageHeight, BufferedImage.TYPE_INT_ARGB);
for (int x=0; x<imageWidth; ++x) {
for (int y=0; y<imageHeight; ++y) {
int xt = x + imageLocation.x;
int yt = y + imageLocation.y;
int newPixel = master.getRGB(xt, yt);
int oldPixel = prev.getRGB(xt, yt);
int pixelRGB = newPixel==oldPixel && transparency ? TRANSPARENT_COLOR : newPixel;
bImage.setRGB(x, y, pixelRGB);
imageHash^=pixelRGB;
Long.rotateRight(imageHash, 1);
}
}
image = new MappedImage(bImage, format, channel);
//For debugging
// Graphics2D ssg = image.createGraphics();
// ssg.setColor(java.awt.Color.GRAY);
// ssg.drawRect(0, 0, image.getWidth()-1, image.getHeight()-1);
// Make eligible for garbage collection.
master = null;
prev = null;
}
}
public MappedImage getImage() {
return image;
}
public Point getImageLocation() {
return imageLocation;
}
/**
* Add point, extend region if added (if it is within grab range). Returns true if point was added
*/
boolean merge(int x, int y) {
if (image!=null) {
throw new IllegalStateException("Image already grabbed");
}
if (contains(x, y)) {
int newMinX = Math.min(x-grabRange, this.x);
int newMinY = Math.min(y-grabRange, this.y);
int newMaxX = Math.max(x+grabRange, this.x+this.width);
int newMaxY = Math.max(y+grabRange, this.y+this.height);
setBounds(newMinX, newMinY, newMaxX-newMinX, newMaxY-newMinY);
return true;
}
return false;
}
/**
*
* @param region
* @return True if region is within grab range and was merged.
*/
boolean merge(Region region) {
if (region==this) {
throw new IllegalArgumentException("Self-merge");
}
if (image!=null) {
throw new IllegalStateException("Image already grabbed");
}
if (intersects(region)) {
int newMinX = Math.min(region.x, this.x);
int newMinY = Math.min(region.y, this.y);
int newMaxX = Math.max(region.x+region.width, this.x+this.width);
int newMaxY = Math.max(region.y+region.height, this.y+this.height);
setBounds(newMinX, newMinY, newMaxX-newMinX, newMaxY-newMinY);
return true;
}
return false;
}
private Region masterImageRegion;
/**
* @return Region with the identical image.
*/
public Region getMasterImageRegion() {
return masterImageRegion;
}
/**
* Sets master region with identical image.
* @param masterImageRegion
*/
public void setMasterImageRegion(Region masterImageRegion) {
this.masterImageRegion = masterImageRegion;
while (this.masterImageRegion.getMasterImageRegion()!=null) {
this.masterImageRegion = this.masterImageRegion.getMasterImageRegion();
}
}
public boolean imageEquals(Region other) throws IOException {
if (image==null) {
throw new IllegalStateException("Image not grabbed");
}
MappedImage otherImage = other.getImage();
if (otherImage==null
|| imageHash!=other.imageHash
|| image.getHeight()!=otherImage.getHeight()
|| image.getWidth()!=otherImage.getWidth()) {
return false;
}
BufferedImage bImage = image.getImage();
BufferedImage oImage = otherImage.getImage();
for (int sx=0, sw=bImage.getWidth(); sx<sw; ++sx) {
for (int sy=0, sh=bImage.getHeight(); sy<sh; ++sy) {
if (bImage.getRGB(sx, sy)!=oImage.getRGB(sx, sy)) {
return false;
}
}
}
return true;
}
private long imageHash;
/**
* Analyzes if this region image is a duplicate of the argument region image and if so sets it as its master.
* @param sr
* @return true if duplicate.
* @throws IOException
*/
public boolean dedup(Region sr) throws IOException {
if (imageEquals(sr)) {
setMasterImageRegion(sr);
image=null;
return true;
}
return false;
}
public boolean coversEverything() {
return coversEverything;
}
private boolean coversEverything;
}

@ -1,312 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.Dimension;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Future;
import java.util.zip.DataFormatException;
import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.Mixer;
import javax.sound.sampled.TargetDataLine;
import javax.swing.ProgressMonitor;
import com.hammurapi.jcapture.VideoEncoder.Fragment.Frame.Shape;
import com.hammurapi.jcapture.VideoEncoder.Fragment.Frame.Shape.Image;
import com.hammurapi.jcapture.VideoEncoder.Fragment.Frame.Shape.ShapeContent;
/**
* Records screen into SWF movie.
* @author Pavel Vlasov
*
*/
public class ScreenRecorder {
private CaptureConfig config;
private Closeable imagesFileCloseable;
class Fragment {
private ScreenShot first;
float getActualFps() {
return first.getFramesPerSecond();
}
private class AudioRecordingThread extends SafeThread {
public AudioRecordingThread() {
super("Audio recording thread");
}
@Override
protected void runInternal() throws Exception {
AudioSystem.write(new AudioInputStream(targetDataLine), AudioFileFormat.Type.WAVE, audioSink);
}
}
private class ScreenCapturingThread extends SafeThread {
public ScreenCapturingThread() {
super("Screen capturing thread");
}
@Override
protected void runInternal() throws Exception {
long start = System.currentTimeMillis();
ScreenShot screenShot = null;
for (int shot=0; !isDone; ++shot) {
long toSleep = (shot+1)*frameLength - (System.currentTimeMillis()-start);
if (toSleep>0) {
Thread.sleep(toSleep);
}
screenShot = config.createScreenShot(screenShot, imagesChannel);
if (first==null) {
first = screenShot;
}
screenshots.add(config.submit(screenShot));
}
System.out.println("Captured "+screenshots.size()+" screenshots");
}
}
public Fragment() throws Exception {
if (targetDataLine!=null) {
audioSink = File.createTempFile("jCaptureAudioSink", ".wav");
targetDataLine.start();
audioRecordingThread = new AudioRecordingThread();
audioRecordingThread.start();
}
screenCapturingThread = new ScreenCapturingThread();
screenCapturingThread.start();
}
File audioSink;
List<Future<ScreenShot>> screenshots = new ArrayList<Future<ScreenShot>>();
AudioRecordingThread audioRecordingThread;
ScreenCapturingThread screenCapturingThread;
volatile boolean isDone;
void stop() throws Exception {
if (targetDataLine!=null) {
targetDataLine.stop();
}
isDone = true;
if (audioRecordingThread!=null) {
audioRecordingThread.join();
}
screenCapturingThread.join();
if (screenCapturingThread.getException()!=null) {
throw screenCapturingThread.getException();
}
if (audioRecordingThread!=null && audioRecordingThread.getException()!=null) {
throw audioRecordingThread.getException();
}
}
}
LinkedList<Fragment> fragments = new LinkedList<Fragment>();
private FileChannel imagesChannel;
public ScreenRecorder(CaptureConfig config, AbstractCaptureApplet applet) throws Exception {
this.config = config;
final File imagesFile = File.createTempFile("jCaptureImages", ".tmp");
imagesFile.deleteOnExit();
final RandomAccessFile raf = new RandomAccessFile(imagesFile, "rw");
this.imagesChannel = raf.getChannel();
imagesFileCloseable = new Closeable() {
@Override
public void close() throws IOException {
imagesChannel.close();
raf.close();
if (!imagesFile.delete()) {
imagesFile.deleteOnExit();
}
}
};
applet.addCloseable(imagesFileCloseable);
if (config.isSound()) {
DataLine.Info info = new DataLine.Info(TargetDataLine.class, config.getAudioFormat());
Mixer mixer = null;
Mixer firstMixer = null;
for (Mixer.Info mi: AudioSystem.getMixerInfo()) {
Mixer mx = AudioSystem.getMixer(mi);
if (mx.isLineSupported(info)) {
if (firstMixer==null) {
firstMixer = mx;
}
if (config.getMixerName()==null || mi.getName().equals(config.getMixerName())) {
mixer = mx;
break;
}
}
}
if (mixer==null) {
mixer = firstMixer;
}
if (mixer!=null) {
targetDataLine = (TargetDataLine) mixer.getLine(info);
targetDataLine.open(config.getAudioFormat());
}
}
frameLength = (long) (1000.0/config.getFramesPerSecond());
start();
}
public synchronized void start() throws Exception {
fragments.add(new Fragment());
}
public void stop() throws Exception {
fragments.getLast().stop();
}
/**
* Recording is discarded if saveTo is null
* @param saveTo
* @return Movie size in pixels or null if saving was cancelled.
* @throws IOException
* @throws DataFormatException
*/
public Movie getMovie() throws Exception {
stop();
if (targetDataLine!=null) {
targetDataLine.close();
}
int totalWork = 3;
for (Fragment f: fragments) {
totalWork+=f.screenshots.size()+1;
}
Map<Region, Image> imageCache = new IdentityHashMap<Region, VideoEncoder.Fragment.Frame.Shape.Image>();
Dimension frameDimension = null;
ProgressMonitor progressMonitor = new ProgressMonitor(config.getParentComponent(), "Encoding video", "Preparing frames", 0, totalWork+4);
try {
int progressCounter = 0;
//In frames
int inactivityInterval = config.isRemoveInactivity() && !config.isSound() ? (int) (1000.0 * config.getInactivityInterval() / frameLength) : -1;
float fps = -1;
final List<VideoEncoder.Fragment> fragmentCollector = new ArrayList<VideoEncoder.Fragment>();
for (Fragment fragment: fragments) {
if (progressMonitor.isCanceled()) {
return null;
}
if (fps<0) {
fps = config.isSound() ? fragment.getActualFps() : config.getSpeedScale()*fragment.getActualFps();
}
progressMonitor.setProgress(++progressCounter);
int lastActivity = -1;
List<VideoEncoder.Fragment.Frame> framesCollector = new ArrayList<VideoEncoder.Fragment.Frame>();
for (Future<ScreenShot> sf: fragment.screenshots) {
if (progressMonitor.isCanceled()) {
return null;
}
ScreenShot screenShot = sf.get();
if (inactivityInterval<0 || screenShot.isActive() || screenShot.getSecNo()-lastActivity<inactivityInterval) {
List<Shape> frameShapes = new ArrayList<VideoEncoder.Fragment.Frame.Shape>();
for (Region region: screenShot.getRegions()) {
ShapeContent content;
if (region.getMasterImageRegion()==null) {
content = new ShapeImpl.ImageImpl(region.getImage(), region.coversEverything());
imageCache.put(region, (Image) content);
if (frameDimension==null && region.coversEverything()) {
frameDimension = region.getSize();
}
} else {
content = new ShapeImpl.ImageReferenceImpl(imageCache.get(region.getMasterImageRegion()));
}
frameShapes.add(new ShapeImpl(region.getImageLocation(), content));
}
framesCollector.add(new FrameImpl(frameShapes, screenShot.getMousePosition(), screenShot.getSize(), screenShot.isActive()));
} else {
progressMonitor.setProgress(++progressCounter); // Skipping frame, report progress here.
}
if (screenShot.isActive()) {
lastActivity = screenShot.getSecNo();
}
progressMonitor.setProgress(++progressCounter);
}
fragmentCollector.add(new FragmentImpl(Collections.unmodifiableList(framesCollector), fragment.audioSink));
}
return new Movie(frameDimension, fps, fragmentCollector, imagesFileCloseable);
} finally {
progressMonitor.close();
}
}
private static abstract class SafeThread extends Thread {
private Exception exception;
public SafeThread(String name) {
super(name);
}
@Override
public void run() {
try {
runInternal();
} catch (Exception e) {
this.exception = e;
e.printStackTrace();
}
}
protected abstract void runInternal() throws Exception;
public Exception getException() {
return exception;
}
}
long frameLength;
private TargetDataLine targetDataLine;
}

@ -1,251 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.AlphaComposite;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import java.util.concurrent.Callable;
import javax.imageio.ImageIO;
public class ScreenShot implements Callable<ScreenShot> {
private final ScreenShot prev;
private final int secNo;
private ScreenShot next;
final private long timeStamp;
private int grabRange;
private boolean transparency;
private MappedImage image;
private Point mousePosition;
private double scale;
private boolean border;
private Dimension size;
private FileChannel imageChannel;
private String imageFormat;
public ScreenShot(
BufferedImage image,
Point mousePosition,
ScreenShot prev,
long timeStamp,
int grabRange,
boolean transparency,
boolean border,
double scale,
FileChannel imageChannel,
String imageFormat) throws IOException {
this.image = new MappedImage(image, imageFormat, imageChannel);
this.mousePosition = mousePosition;
this.prev = prev;
if (prev==null) {
secNo=0;
} else {
prev.next = this;
secNo = prev.secNo+1;
}
this.timeStamp = timeStamp;
this.grabRange = grabRange;
this.transparency = transparency;
this.scale = scale;
this.border = border;
this.imageChannel = imageChannel;
this.imageFormat = imageFormat;
}
public Point getMousePosition() {
return mousePosition;
}
/**
* Calculates actual FPS.
* @return
*/
public float getFramesPerSecond() {
long start = timeStamp;
long end = 0;
int length = 0;
for (ScreenShot sibling = next; sibling!=null; sibling=sibling.next) {
++length;
end = sibling.timeStamp;
}
if (length==0) {
return -1; // No way to tell.
}
return (float) (length * 1000.0)/(end - start);
}
private List<Region> regions;
private long totalPixels;
private long differentPixels;
public double getDiffLevel() {
return (double) differentPixels/(double) totalPixels;
}
/**
* If images are different more than diffThreshold, then the
* entire screenshot shall be taken.
*/
private double diffThreshold = 0.7;
/**
* Performs processing and returns self.
* Screenshot is structured as Callable to simplify live processing in a background thread.
*/
@Override
public ScreenShot call() throws Exception {
BufferedImage img = image.getImage();
// No petty scaling.
if (scale<0.99 || scale > 1.01) {
BufferedImage scaled = new BufferedImage((int) (img.getWidth()*scale), (int) (img.getHeight()*scale), img.getType());
Graphics2D g = scaled.createGraphics();
g.setComposite(AlphaComposite.Src);
g.setRenderingHint(RenderingHints.KEY_INTERPOLATION,RenderingHints.VALUE_INTERPOLATION_BILINEAR);
g.setRenderingHint(RenderingHints.KEY_RENDERING,RenderingHints.VALUE_RENDER_QUALITY);
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING,RenderingHints.VALUE_ANTIALIAS_ON);
g.drawImage(img, 0, 0, scaled.getWidth(), scaled.getHeight(), null);
g.dispose();
img = scaled;
if (mousePosition!=null) {
mousePosition = new Point((int) (mousePosition.x*scale), (int) (mousePosition.y*scale));
}
}
if (border) {
Graphics2D ssg = img.createGraphics();
ssg.setColor(java.awt.Color.GRAY);
ssg.drawRect(0, 0, img.getWidth()-1, img.getHeight()-1);
}
size = new Dimension(image.getWidth(), image.getHeight());
regions = new ArrayList<Region>();
if (prev==null) {
regions.add(new Region(image));
} else {
BufferedImage pimg = prev.image.getImage();
for (int x=0, w=img.getWidth(); x<w; ++x) {
Y: for (int y=0, h=img.getHeight(); y<h; ++y) {
++totalPixels;
int newPixel = img.getRGB(x, y);
int oldPixel = pimg.getRGB(x, y);
if (newPixel!=oldPixel) {
++differentPixels;
for (Region region: regions) {
if (region.merge(x, y)) {
continue Y;
}
}
regions.add(new Region(img, imageFormat, imageChannel, pimg, transparency, x, y, grabRange));
}
}
}
if (getDiffLevel()>diffThreshold) {
regions.clear();
regions.add(new Region(image));
} else {
// Merging adjacent regions
for (int i=0; i<regions.size()-1; ++i) {
ListIterator<Region> lit = regions.listIterator(i+1);
Region master = regions.get(i);
while (lit.hasNext()) {
if (master.merge(lit.next())) {
lit.remove();
}
}
}
for (Region region: regions) {
region.grabImage();
}
}
// Eligible for garbage collection
if (prev!=null) {
prev.image=null;
}
}
// De-dup
ListIterator<Region> oit = regions.listIterator();
R: while (oit.hasNext()) {
Region or = oit.next();
if (oit.hasPrevious()) {
ListIterator<Region> iit = regions.listIterator(oit.previousIndex());
while (iit.hasPrevious()) {
if (or.dedup(iit.previous())) {
continue R;
}
}
}
for (ScreenShot sibling=prev; sibling!=null; sibling=sibling.prev) {
for (Region sr: sibling.regions) {
if (or.dedup(sr)) {
continue R;
}
}
}
}
return this;
}
public void dump(File dir, String imageFormat) throws IOException {
for (int i=0; i<regions.size(); ++i) {
BufferedImage img = regions.get(i).getImage().getImage();
if (img!=null) {
ImageIO.write(img, imageFormat, new File(dir, "s_"+secNo+"_"+i+"."+imageFormat));
}
}
}
public List<Region> getRegions() {
return regions;
}
public int getSecNo() {
return secNo;
}
public boolean isActive() {
if (!regions.isEmpty()) {
return true;
}
if (mousePosition==null) {
if (prev==null) {
return false;
}
if (prev.getMousePosition()!=null) {
return true;
}
return false;
}
if (prev==null) {
return true;
}
if (!mousePosition.equals(prev.getMousePosition())) {
return true;
}
return false;
}
public Dimension getSize() {
return size;
}
}

@ -1,75 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.Point;
import com.hammurapi.jcapture.VideoEncoder.Fragment.Frame.Shape;
class ShapeImpl implements Shape {
static class ImageImpl implements Image {
MappedImage image;
private boolean coversEverything;
ImageImpl(MappedImage image, boolean coversEverything) {
super();
this.image = image;
this.coversEverything = coversEverything;
}
@Override
public boolean coversEverything() {
return coversEverything;
}
@Override
public MappedImage getImage() {
return image;
}
}
static class ImageReferenceImpl implements ImageReference {
private Image image;
ImageReferenceImpl(Image image) {
super();
if (image==null) {
throw new NullPointerException();
}
this.image = image;
}
@Override
public boolean coversEverything() {
return image.coversEverything();
}
@Override
public Image getImage() {
return image;
}
}
private Point location;
private ShapeContent content;
ShapeImpl(Point location, ShapeContent content) {
super();
this.location = location;
this.content = content;
}
@Override
public Point getLocation() {
return location;
}
@Override
public ShapeContent getContent() {
return content;
}
}

@ -1,353 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.AlphaComposite;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.OutputStream;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import javax.imageio.ImageIO;
import javax.swing.JOptionPane;
import javax.swing.ProgressMonitor;
import com.flagstone.transform.Background;
import com.flagstone.transform.DefineTag;
import com.flagstone.transform.DoAction;
import com.flagstone.transform.Movie;
import com.flagstone.transform.MovieHeader;
import com.flagstone.transform.MovieTag;
import com.flagstone.transform.Place2;
import com.flagstone.transform.Remove;
import com.flagstone.transform.Remove2;
import com.flagstone.transform.ShowFrame;
import com.flagstone.transform.action.Action;
import com.flagstone.transform.action.BasicAction;
import com.flagstone.transform.coder.Coder;
import com.flagstone.transform.datatype.Bounds;
import com.flagstone.transform.datatype.CoordTransform;
import com.flagstone.transform.datatype.WebPalette;
import com.flagstone.transform.image.ImageTag;
import com.flagstone.transform.util.image.ImageDecoder;
import com.flagstone.transform.util.image.ImageRegistry;
import com.flagstone.transform.util.image.ImageShape;
import com.flagstone.transform.util.shape.Canvas;
import com.flagstone.transform.util.sound.SoundFactory;
import com.hammurapi.jcapture.VideoEncoder.Fragment.Frame;
import com.hammurapi.jcapture.VideoEncoder.Fragment.Frame.Shape;
import com.hammurapi.jcapture.VideoEncoder.Fragment.Frame.Shape.Image;
import com.hammurapi.jcapture.VideoEncoder.Fragment.Frame.Shape.ImageReference;
import com.hammurapi.jcapture.VideoEncoder.Fragment.Frame.Shape.ShapeContent;
public class SwfEncoder implements VideoEncoder {
@Override
public String getFileExtension() {
return "swf";
}
@Override
public String getMimeType() {
return "application/x-shockwave-flash";
}
@Override
public String toString() {
return "SWF";
}
@Override
public Dimension encode(Config config,
com.hammurapi.jcapture.Movie source,
OutputStream out)
throws Exception {
AtomicInteger uid = new AtomicInteger();
/**
* For reusing shape id's.
*/
int maxId = Coder.USHORT_MAX;
ButtonManager manager = null;
if (config.isToolBar()) {
manager = new ButtonManager();
manager.loadLibrary(getClass().getResource("toolbar_buttons.swf"));
uid.set(manager.maxIdentifier()+1);
}
Canvas path = new Canvas();
path.setPixels(false);
int minImgLayer = 10;
int imgLayer = minImgLayer;
int maxImgLayer = maxId - 1000;
int mouseLayer = maxImgLayer+1;
int mouseUid = -1;
Place2 mousePlace = null;
ImageTag mouseImage = null;
int layer = maxImgLayer+2;
int totalWork = 0;
for (Fragment frg: source.getFragments()) {
totalWork = frg.getFrames().size()+1;
}
ProgressMonitor progressMonitor = new ProgressMonitor(config.getParentComponent(), "Encoding to SWF", "Composing movie", 0, totalWork);
int progressCounter = 0;
progressMonitor.setNote("Composing movie");
boolean firstFrame = true;
Dimension ret = null;
Map<Image, ImageTag> imageCache = new IdentityHashMap<Image, ImageTag>();
Movie movie = new Movie();
Point prevMouseLocation = null;
int frameNo = 0;
for (Fragment fragment: source.getFragments()) {
SoundFactory soundFactory = null;
boolean soundHeaderAdded = false;
File audio = fragment.getAudio();
if (audio!=null) {
progressMonitor.setNote("Loading sound");
soundFactory = new SoundFactory();
// MP3 conversion
if (config.getMp3command()!=null && config.getMp3command().trim().length()>0) {
audio = new File(audio.getAbsolutePath()+".mp3");
Runtime runtime = Runtime.getRuntime();
Process proc = runtime.exec(MessageFormat.format(config.getMp3command(), new Object[] {fragment.getAudio().getAbsolutePath(), audio.getAbsolutePath()}));
proc.waitFor();
if (!fragment.getAudio().delete()) {
fragment.getAudio().deleteOnExit();
}
}
soundFactory.read(audio);
}
progressMonitor.setProgress(++progressCounter);
if (progressMonitor.isCanceled()) {
return null;
}
for (Frame frame: fragment.getFrames()) {
if (progressMonitor.isCanceled()) {
return null;
}
boolean addStop = false;
++frameNo;
if (firstFrame) {
firstFrame = false;
MovieHeader header = new MovieHeader();
header.setCompressed(true);
header.setFrameRate(source.getFramesPerSecond());
int toolbarHeight = 29 * 20;
int toolbarWidth = 495 * 20;
int toolbarX = 0; // - image.getWidth()*20/2;
int toolbarY = frame.getSize().height*20;
int movieWidth = frame.getSize().width*20;
int movieHeight = frame.getSize().height*20;
if (config.isToolBar()) {
movieHeight+=toolbarHeight;
}
ret = new Dimension(movieWidth/20, movieHeight/20);
float toolbarScaleX = (float) movieWidth / (float) toolbarWidth;
float toolbarScaleY = 1.0f;
Bounds movieBounds = new Bounds(0, 0, movieWidth, movieHeight);
header.setFrameSize(movieBounds);
movie.add(header);
movie.add(new Background(WebPalette.WHITE.color()));
if (config.isToolBar()) {
// Add all the shapes etc used for buttons
List<DefineTag> toolbarDefinitions = manager.getDefinitions();
movie.getObjects().addAll(toolbarDefinitions);
Place2 placeBackground = manager.getButton("background", layer++, 0, 0);
placeBackground.setTransform(new CoordTransform(toolbarScaleX, toolbarScaleY, 0, 0, toolbarX, toolbarY));
// Get the button to use and give its position
movie.add(placeBackground);
movie.add(manager.getButton("play_button", layer++, toolbarX + 500, toolbarY + toolbarHeight / 2));
movie.add(manager.getButton("progress_bar", layer++, toolbarX + 1000, toolbarY + toolbarHeight / 2));
movie.add(manager.getButton("volume_control", layer++, toolbarX + 5600, toolbarY + toolbarHeight / 2));
if (!config.isPlay()) {
addStop = true;
}
}
}
if (!soundHeaderAdded && soundFactory!=null) {
movie.add(soundFactory.streamHeader(source.getFramesPerSecond()));
soundHeaderAdded = true;
}
if (soundFactory!=null) {
MovieTag soundBlock = soundFactory.streamSound();
if (soundBlock != null) {
movie.add(soundBlock);
}
}
for (Shape shape: frame.getShapes()) {
if (shape.getContent().coversEverything() || imgLayer==maxImgLayer) {
for (int i=minImgLayer; i<=imgLayer; ++i) {
movie.add(new Remove2(i));
}
imgLayer = minImgLayer;
}
ShapeContent shapeContent = shape.getContent();
Image image;
if (shapeContent instanceof Image) {
image = (Image) shapeContent;
} else if (shape.getContent() instanceof ImageReference) {
image = ((ImageReference) shapeContent).getImage();
} else {
throw new IllegalArgumentException("Unexpected content type: "+shapeContent);
}
ImageTag imageTag = imageCache.get(image);
if (imageTag==null) {
try {
ImageDecoder decoder = ImageRegistry.getImageProvider("image/"+config.getImageFormat().toLowerCase());
decoder.read(new ByteArrayInputStream(image.getImage().getImageBytes()));
imageTag = decoder.defineImage(uid.incrementAndGet());
imageCache.put(image, imageTag);
movie.add(imageTag);
} catch (Exception e) {
// Doing our best to create movie, even with flaws.
System.err.println("Error encoding image at frame "+frameNo+": "+e);
e.printStackTrace();
if (JOptionPane.showConfirmDialog(config.getParentComponent(),
"Error encoding image ("+image.getImage().getWidth()+"*"+image.getImage().getHeight()+") at frame "+frameNo+": "+e+". Continue encoding?",
"Encoding error",
JOptionPane.YES_NO_OPTION,
JOptionPane.ERROR_MESSAGE)==JOptionPane.NO_OPTION) {
throw e;
}
}
}
int shapeId = uid.incrementAndGet();
DefineTag shapeTag = new ImageShape().defineShape(shapeId, imageTag);
Place2 place = Place2.show(shapeTag.getIdentifier(), imgLayer++, shape.getLocation().x*20, shape.getLocation().y*20);
movie.add(shapeTag);
movie.add(place);
}
Point mouseLocation = frame.getMousePointer();
if (mouseLocation!=null) {
if (mouseImage==null) {
BufferedImage mouseBi = ImageIO.read(getClass().getResource("mouse.png"));
if (config.getScreenScale()<0.99 || config.getScreenScale() > 1.01) {
BufferedImage scaled = new BufferedImage((int) (mouseBi.getWidth()*config.getScreenScale()), (int) (mouseBi.getHeight()*config.getScreenScale()), mouseBi.getType());
Graphics2D g = scaled.createGraphics();
g.setComposite(AlphaComposite.Src);
g.setRenderingHint(RenderingHints.KEY_INTERPOLATION,RenderingHints.VALUE_INTERPOLATION_BILINEAR);
g.setRenderingHint(RenderingHints.KEY_RENDERING,RenderingHints.VALUE_RENDER_QUALITY);
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING,RenderingHints.VALUE_ANTIALIAS_ON);
g.drawImage(mouseBi, 0, 0, scaled.getWidth(), scaled.getHeight(), null);
g.dispose();
mouseBi = scaled;
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write(mouseBi, "PNG", baos);
baos.close();
ImageDecoder decoder = ImageRegistry.getImageProvider("image/png");
decoder.read(new ByteArrayInputStream(baos.toByteArray()));
mouseImage = decoder.defineImage(uid.incrementAndGet());
movie.add(mouseImage);
}
if (!mouseLocation.equals(prevMouseLocation)) {
prevMouseLocation = mouseLocation;
mouseUid = uid.incrementAndGet();
DefineTag mShape = new ImageShape().defineShape(uid.incrementAndGet(), mouseImage); //createRect(mouseUid, 100, 100, WebPalette.RED.color());
if (mousePlace==null) {
mousePlace = Place2.show(mShape.getIdentifier(), mouseLayer, mouseLocation.x*20, mouseLocation.y*20);
} else {
mousePlace = Place2.replace(mShape.getIdentifier(), mouseLayer, mouseLocation.x*20, mouseLocation.y*20);
}
movie.add(mShape);
movie.add(mousePlace);
}
} else if (mouseUid!=-1) {
Remove remove = new Remove(mouseUid, mouseLayer);
movie.add(remove);
}
if (addStop) {
DoAction cmd = new DoAction(new ArrayList<Action>());
cmd.add(BasicAction.STOP);
movie.add(cmd);
}
movie.add(ShowFrame.getInstance());
progressMonitor.setProgress(++progressCounter);
}
progressMonitor.setProgress(++progressCounter);
if (soundFactory!=null) {
progressMonitor.setNote("Recording trailing sound");
MovieTag block;
while ((block = soundFactory.streamSound()) != null) {
movie.add(block);
movie.add(ShowFrame.getInstance());
}
}
if (audio!=null) {
if (!audio.delete()) {
audio.deleteOnExit();
}
}
}
if (!config.isLoop()) {
List<Action> actions = new ArrayList<Action>();
actions.add(BasicAction.STOP);
actions.add(BasicAction.END);
DoAction doAction = new DoAction(actions);
movie.add(doAction);
movie.add(ShowFrame.getInstance());
}
progressMonitor.setProgress(++progressCounter);
progressMonitor.setNote("Encoding movie");
movie.encodeToStream(out);
source.close();
return ret;
}
}

@ -1,17 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.Frame;
abstract class Translucener {
protected abstract void makeTranslucent(Frame frame);
static void makeFrameTranslucent(Frame frame) throws Exception {
String jVersion = System.getProperty("java.version");
if (jVersion==null || "1.6".equals(jVersion) || jVersion.startsWith("1.6.")) {
((Translucener) Class.forName("com.hammurapi.jcapture.AWTUtilitiesTranslucener").newInstance()).makeTranslucent(frame);
} else {
((Translucener) Class.forName("com.hammurapi.jcapture.GraphicsDeviceTranslucener").newInstance()).makeTranslucent(frame);
}
}
}

@ -1,164 +0,0 @@
package com.hammurapi.jcapture;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Point;
import java.io.File;
import java.io.OutputStream;
import java.util.List;
/**
* This is a service interface to be implemented by video encoders.
* jCapture discovers encoders using java.util.ServiceLoader.
*
* All interfaces used by this interface are defined as nested for easy reference.
* @author Pavel
*
*/
public interface VideoEncoder {
interface Config {
/**
*
* @return true if encoder shall add a toolbar to the movie.
*/
boolean isToolBar();
/**
*
* @return true if movie shall be played in a loop.
*/
boolean isLoop();
/**
*
* @return true if movie shall start playing after downloading.
*/
boolean isPlay();
/**
* @return For scaling mouse pointer.
*/
double getScreenScale();
/**
* For progress monitor.
* @return
*/
Component getParentComponent();
String getImageFormat();
/**
* @return OS command to convert WAV to MP3 if encoder requires/benefits from it.
*/
String getMp3command();
}
/**
* Movie fragment is a collection of frames with associated audio.
* @author Pavel
*
*/
interface Fragment {
/**
* Frame contains zero or more shapes and mouse location.
* @author Pavel
*
*/
interface Frame {
boolean isActive();
/**
* Image shape to be placed on the screen.
* @author Pavel
*
*/
interface Shape {
/**
* Base interface for shape content.
* @author Pavel
*
*/
interface ShapeContent {
/**
* @return true if this shape covers the entire screen area.
*/
boolean coversEverything();
}
interface Image extends ShapeContent {
MappedImage getImage();
}
/**
* References already defined image.
* @author Pavel
*
*/
interface ImageReference extends ShapeContent {
Image getImage();
}
Point getLocation();
ShapeContent getContent();
}
/**
* Frame's shapes.
* @return
*/
List<Shape> getShapes();
Point getMousePointer();
Dimension getSize();
}
/**
* Fragment frames.
* @return
*/
List<Frame> getFrames();
/**
* Audio file (WAV).
* @return
*/
File getAudio();
}
String getFileExtension();
String getMimeType();
/**
* This method shall return output format name, e.g. SWF.
* @return
*/
String toString();
/**
* Encodes video to output stream.
* @param fragments Fragments to encode
* @param out Output stream
* @param progressMonitor Progress monitor has work allocated for each frame plus one unit of work per fragment for sound decoding plus one unit for final encoding.
* @param progressCounter current progress counter position.
* @return movie size or null if operation was cancelled
*/
Dimension encode(Config config, Movie movie, OutputStream out) throws Exception;
}

@ -1,787 +0,0 @@
package com.hammurapi.jcapture;
// This file was taken from http://www.labbookpages.co.uk/audio/javaWavFiles.html
// Wav file IO class
// A.Greensted
// http://www.labbookpages.co.uk
// File format is based on the information from
// http://www.sonicspot.com/guide/wavefiles.html
// http://www.blitter.com/~russtopia/MIDI/~jglatt/tech/wave.htm
// Version 1.0
import java.io.*;
public class WavFile {
private enum IOState {
READING, WRITING, CLOSED
};
private final static int BUFFER_SIZE = 4096;
private final static int FMT_CHUNK_ID = 0x20746D66;
private final static int DATA_CHUNK_ID = 0x61746164;
private final static int RIFF_CHUNK_ID = 0x46464952;
private final static int RIFF_TYPE_ID = 0x45564157;
private File file; // File that will be read from or written to
private IOState ioState; // Specifies the IO State of the Wav File (used for
// snaity checking)
private int bytesPerSample; // Number of bytes required to store a single
// sample
private long numFrames; // Number of frames within the data section
private FileOutputStream oStream; // Output stream used for writting data
private FileInputStream iStream; // Input stream used for reading data
private double floatScale; // Scaling factor used for int <-> float
// conversion
private double floatOffset; // Offset factor used for int <-> float
// conversion
private boolean wordAlignAdjust; // Specify if an extra byte at the end of
// the data chunk is required for word
// alignment
// Wav Header
private int numChannels; // 2 bytes unsigned, 0x0001 (1) to 0xFFFF (65,535)
private long sampleRate; // 4 bytes unsigned, 0x00000001 (1) to 0xFFFFFFFF
// (4,294,967,295)
// Although a java int is 4 bytes, it is signed,
// so need to use a long
private int blockAlign; // 2 bytes unsigned, 0x0001 (1) to 0xFFFF (65,535)
private int validBits; // 2 bytes unsigned, 0x0002 (2) to 0xFFFF (65,535)
// Buffering
private byte[] buffer; // Local buffer used for IO
private int bufferPointer; // Points to the current position in local buffer
private int bytesRead; // Bytes read after last read into local buffer
private long frameCounter; // Current number of frames read or written
// Cannot instantiate WavFile directly, must either use newWavFile() or
// openWavFile()
private WavFile() {
buffer = new byte[BUFFER_SIZE];
}
public int getNumChannels() {
return numChannels;
}
public long getNumFrames() {
return numFrames;
}
public long getFramesRemaining() {
return numFrames - frameCounter;
}
public long getSampleRate() {
return sampleRate;
}
public int getValidBits() {
return validBits;
}
public static WavFile newWavFile(File file, int numChannels,
long numFrames, int validBits, long sampleRate) throws IOException,
WavFileException {
// Instantiate new Wavfile and initialise
WavFile wavFile = new WavFile();
wavFile.file = file;
wavFile.numChannels = numChannels;
wavFile.numFrames = numFrames;
wavFile.sampleRate = sampleRate;
wavFile.bytesPerSample = (validBits + 7) / 8;
wavFile.blockAlign = wavFile.bytesPerSample * numChannels;
wavFile.validBits = validBits;
// Sanity check arguments
if (numChannels < 1 || numChannels > 65535)
throw new WavFileException(
"Illegal number of channels, valid range 1 to 65536");
if (numFrames < 0)
throw new WavFileException("Number of frames must be positive");
if (validBits < 2 || validBits > 65535)
throw new WavFileException(
"Illegal number of valid bits, valid range 2 to 65536");
if (sampleRate < 0)
throw new WavFileException("Sample rate must be positive");
// Create output stream for writing data
wavFile.oStream = new FileOutputStream(file);
// Calculate the chunk sizes
long dataChunkSize = wavFile.blockAlign * numFrames;
long mainChunkSize = 4 + // Riff Type
8 + // Format ID and size
16 + // Format data
8 + // Data ID and size
dataChunkSize;
// Chunks must be word aligned, so if odd number of audio data bytes
// adjust the main chunk size
if (dataChunkSize % 2 == 1) {
mainChunkSize += 1;
wavFile.wordAlignAdjust = true;
} else {
wavFile.wordAlignAdjust = false;
}
// Set the main chunk size
putLE(RIFF_CHUNK_ID, wavFile.buffer, 0, 4);
putLE(mainChunkSize, wavFile.buffer, 4, 4);
putLE(RIFF_TYPE_ID, wavFile.buffer, 8, 4);
// Write out the header
wavFile.oStream.write(wavFile.buffer, 0, 12);
// Put format data in buffer
long averageBytesPerSecond = sampleRate * wavFile.blockAlign;
putLE(FMT_CHUNK_ID, wavFile.buffer, 0, 4); // Chunk ID
putLE(16, wavFile.buffer, 4, 4); // Chunk Data Size
putLE(1, wavFile.buffer, 8, 2); // Compression Code (Uncompressed)
putLE(numChannels, wavFile.buffer, 10, 2); // Number of channels
putLE(sampleRate, wavFile.buffer, 12, 4); // Sample Rate
putLE(averageBytesPerSecond, wavFile.buffer, 16, 4); // Average Bytes
// Per Second
putLE(wavFile.blockAlign, wavFile.buffer, 20, 2); // Block Align
putLE(validBits, wavFile.buffer, 22, 2); // Valid Bits
// Write Format Chunk
wavFile.oStream.write(wavFile.buffer, 0, 24);
// Start Data Chunk
putLE(DATA_CHUNK_ID, wavFile.buffer, 0, 4); // Chunk ID
putLE(dataChunkSize, wavFile.buffer, 4, 4); // Chunk Data Size
// Write Format Chunk
wavFile.oStream.write(wavFile.buffer, 0, 8);
// Calculate the scaling factor for converting to a normalised double
if (wavFile.validBits > 8) {
// If more than 8 validBits, data is signed
// Conversion required multiplying by magnitude of max positive
// value
wavFile.floatOffset = 0;
wavFile.floatScale = Long.MAX_VALUE >> (64 - wavFile.validBits);
} else {
// Else if 8 or less validBits, data is unsigned
// Conversion required dividing by max positive value
wavFile.floatOffset = 1;
wavFile.floatScale = 0.5 * ((1 << wavFile.validBits) - 1);
}
// Finally, set the IO State
wavFile.bufferPointer = 0;
wavFile.bytesRead = 0;
wavFile.frameCounter = 0;
wavFile.ioState = IOState.WRITING;
return wavFile;
}
public static WavFile openWavFile(File file) throws IOException,
WavFileException {
// Instantiate new Wavfile and store the file reference
WavFile wavFile = new WavFile();
wavFile.file = file;
// Create a new file input stream for reading file data
wavFile.iStream = new FileInputStream(file);
// Read the first 12 bytes of the file
int bytesRead = wavFile.iStream.read(wavFile.buffer, 0, 12);
if (bytesRead != 12)
throw new WavFileException("Not enough wav file bytes for header");
// Extract parts from the header
long riffChunkID = getLE(wavFile.buffer, 0, 4);
long chunkSize = getLE(wavFile.buffer, 4, 4);
long riffTypeID = getLE(wavFile.buffer, 8, 4);
// Check the header bytes contains the correct signature
if (riffChunkID != RIFF_CHUNK_ID)
throw new WavFileException(
"Invalid Wav Header data, incorrect riff chunk ID");
if (riffTypeID != RIFF_TYPE_ID)
throw new WavFileException(
"Invalid Wav Header data, incorrect riff type ID");
// Check that the file size matches the number of bytes listed in header
if (file.length() != chunkSize + 8) {
throw new WavFileException("Header chunk size (" + chunkSize
+ ") does not match file size (" + file.length() + ")");
}
boolean foundFormat = false;
boolean foundData = false;
// Search for the Format and Data Chunks
while (true) {
// Read the first 8 bytes of the chunk (ID and chunk size)
bytesRead = wavFile.iStream.read(wavFile.buffer, 0, 8);
if (bytesRead == -1)
throw new WavFileException(
"Reached end of file without finding format chunk");
if (bytesRead != 8)
throw new WavFileException("Could not read chunk header");
// Extract the chunk ID and Size
long chunkID = getLE(wavFile.buffer, 0, 4);
chunkSize = getLE(wavFile.buffer, 4, 4);
// Word align the chunk size
// chunkSize specifies the number of bytes holding data. However,
// the data should be word aligned (2 bytes) so we need to calculate
// the actual number of bytes in the chunk
long numChunkBytes = (chunkSize % 2 == 1) ? chunkSize + 1
: chunkSize;
if (chunkID == FMT_CHUNK_ID) {
// Flag that the format chunk has been found
foundFormat = true;
// Read in the header info
bytesRead = wavFile.iStream.read(wavFile.buffer, 0, 16);
// Check this is uncompressed data
int compressionCode = (int) getLE(wavFile.buffer, 0, 2);
if (compressionCode != 1)
throw new WavFileException("Compression Code "
+ compressionCode + " not supported");
// Extract the format information
wavFile.numChannels = (int) getLE(wavFile.buffer, 2, 2);
wavFile.sampleRate = getLE(wavFile.buffer, 4, 4);
wavFile.blockAlign = (int) getLE(wavFile.buffer, 12, 2);
wavFile.validBits = (int) getLE(wavFile.buffer, 14, 2);
if (wavFile.numChannels == 0)
throw new WavFileException(
"Number of channels specified in header is equal to zero");
if (wavFile.blockAlign == 0)
throw new WavFileException(
"Block Align specified in header is equal to zero");
if (wavFile.validBits < 2)
throw new WavFileException(
"Valid Bits specified in header is less than 2");
if (wavFile.validBits > 64)
throw new WavFileException(
"Valid Bits specified in header is greater than 64, this is greater than a long can hold");
// Calculate the number of bytes required to hold 1 sample
wavFile.bytesPerSample = (wavFile.validBits + 7) / 8;
if (wavFile.bytesPerSample * wavFile.numChannels != wavFile.blockAlign)
throw new WavFileException(
"Block Align does not agree with bytes required for validBits and number of channels");
// Account for number of format bytes and then skip over
// any extra format bytes
numChunkBytes -= 16;
if (numChunkBytes > 0)
wavFile.iStream.skip(numChunkBytes);
} else if (chunkID == DATA_CHUNK_ID) {
// Check if we've found the format chunk,
// If not, throw an exception as we need the format information
// before we can read the data chunk
if (foundFormat == false)
throw new WavFileException(
"Data chunk found before Format chunk");
// Check that the chunkSize (wav data length) is a multiple of
// the
// block align (bytes per frame)
if (chunkSize % wavFile.blockAlign != 0)
throw new WavFileException(
"Data Chunk size is not multiple of Block Align");
// Calculate the number of frames
wavFile.numFrames = chunkSize / wavFile.blockAlign;
// Flag that we've found the wave data chunk
foundData = true;
break;
} else {
// If an unknown chunk ID is found, just skip over the chunk
// data
wavFile.iStream.skip(numChunkBytes);
}
}
// Throw an exception if no data chunk has been found
if (foundData == false)
throw new WavFileException("Did not find a data chunk");
// Calculate the scaling factor for converting to a normalised double
if (wavFile.validBits > 8) {
// If more than 8 validBits, data is signed
// Conversion required dividing by magnitude of max negative value
wavFile.floatOffset = 0;
wavFile.floatScale = 1 << (wavFile.validBits - 1);
} else {
// Else if 8 or less validBits, data is unsigned
// Conversion required dividing by max positive value
wavFile.floatOffset = -1;
wavFile.floatScale = 0.5 * ((1 << wavFile.validBits) - 1);
}
wavFile.bufferPointer = 0;
wavFile.bytesRead = 0;
wavFile.frameCounter = 0;
wavFile.ioState = IOState.READING;
return wavFile;
}
// Get and Put little endian data from local buffer
// ------------------------------------------------
private static long getLE(byte[] buffer, int pos, int numBytes) {
numBytes--;
pos += numBytes;
long val = buffer[pos] & 0xFF;
for (int b = 0; b < numBytes; b++)
val = (val << 8) + (buffer[--pos] & 0xFF);
return val;
}
private static void putLE(long val, byte[] buffer, int pos, int numBytes) {
for (int b = 0; b < numBytes; b++) {
buffer[pos] = (byte) (val & 0xFF);
val >>= 8;
pos++;
}
}
// Sample Writing and Reading
// --------------------------
private void writeSample(long val) throws IOException {
for (int b = 0; b < bytesPerSample; b++) {
if (bufferPointer == BUFFER_SIZE) {
oStream.write(buffer, 0, BUFFER_SIZE);
bufferPointer = 0;
}
buffer[bufferPointer] = (byte) (val & 0xFF);
val >>= 8;
bufferPointer++;
}
}
private long readSample() throws IOException, WavFileException {
long val = 0;
for (int b = 0; b < bytesPerSample; b++) {
if (bufferPointer == bytesRead) {
int read = iStream.read(buffer, 0, BUFFER_SIZE);
if (read == -1)
throw new WavFileException("Not enough data available");
bytesRead = read;
bufferPointer = 0;
}
int v = buffer[bufferPointer];
if (b < bytesPerSample - 1 || bytesPerSample == 1)
v &= 0xFF;
val += v << (b * 8);
bufferPointer++;
}
return val;
}
// Integer
// -------
public int readFrames(int[] sampleBuffer, int numFramesToRead)
throws IOException, WavFileException {
return readFrames(sampleBuffer, 0, numFramesToRead);
}
public int readFrames(int[] sampleBuffer, int offset, int numFramesToRead)
throws IOException, WavFileException {
if (ioState != IOState.READING)
throw new IOException("Cannot read from WavFile instance");
for (int f = 0; f < numFramesToRead; f++) {
if (frameCounter == numFrames)
return f;
for (int c = 0; c < numChannels; c++) {
sampleBuffer[offset] = (int) readSample();
offset++;
}
frameCounter++;
}
return numFramesToRead;
}
public int readFrames(int[][] sampleBuffer, int numFramesToRead)
throws IOException, WavFileException {
return readFrames(sampleBuffer, 0, numFramesToRead);
}
public int readFrames(int[][] sampleBuffer, int offset, int numFramesToRead)
throws IOException, WavFileException {
if (ioState != IOState.READING)
throw new IOException("Cannot read from WavFile instance");
for (int f = 0; f < numFramesToRead; f++) {
if (frameCounter == numFrames)
return f;
for (int c = 0; c < numChannels; c++)
sampleBuffer[c][offset] = (int) readSample();
offset++;
frameCounter++;
}
return numFramesToRead;
}
public int writeFrames(int[] sampleBuffer, int numFramesToWrite)
throws IOException, WavFileException {
return writeFrames(sampleBuffer, 0, numFramesToWrite);
}
public int writeFrames(int[] sampleBuffer, int offset, int numFramesToWrite)
throws IOException, WavFileException {
if (ioState != IOState.WRITING)
throw new IOException("Cannot write to WavFile instance");
for (int f = 0; f < numFramesToWrite; f++) {
if (frameCounter == numFrames)
return f;
for (int c = 0; c < numChannels; c++) {
writeSample(sampleBuffer[offset]);
offset++;
}
frameCounter++;
}
return numFramesToWrite;
}
public int writeFrames(int[][] sampleBuffer, int numFramesToWrite)
throws IOException, WavFileException {
return writeFrames(sampleBuffer, 0, numFramesToWrite);
}
public int writeFrames(int[][] sampleBuffer, int offset,
int numFramesToWrite) throws IOException, WavFileException {
if (ioState != IOState.WRITING)
throw new IOException("Cannot write to WavFile instance");
for (int f = 0; f < numFramesToWrite; f++) {
if (frameCounter == numFrames)
return f;
for (int c = 0; c < numChannels; c++)
writeSample(sampleBuffer[c][offset]);
offset++;
frameCounter++;
}
return numFramesToWrite;
}
// Long
// ----
public int readFrames(long[] sampleBuffer, int numFramesToRead)
throws IOException, WavFileException {
return readFrames(sampleBuffer, 0, numFramesToRead);
}
public int readFrames(long[] sampleBuffer, int offset, int numFramesToRead)
throws IOException, WavFileException {
if (ioState != IOState.READING)
throw new IOException("Cannot read from WavFile instance");
for (int f = 0; f < numFramesToRead; f++) {
if (frameCounter == numFrames)
return f;
for (int c = 0; c < numChannels; c++) {
sampleBuffer[offset] = readSample();
offset++;
}
frameCounter++;
}
return numFramesToRead;
}
public int readFrames(long[][] sampleBuffer, int numFramesToRead)
throws IOException, WavFileException {
return readFrames(sampleBuffer, 0, numFramesToRead);
}
public int readFrames(long[][] sampleBuffer, int offset, int numFramesToRead)
throws IOException, WavFileException {
if (ioState != IOState.READING)
throw new IOException("Cannot read from WavFile instance");
for (int f = 0; f < numFramesToRead; f++) {
if (frameCounter == numFrames)
return f;
for (int c = 0; c < numChannels; c++)
sampleBuffer[c][offset] = readSample();
offset++;
frameCounter++;
}
return numFramesToRead;
}
public int writeFrames(long[] sampleBuffer, int numFramesToWrite)
throws IOException, WavFileException {
return writeFrames(sampleBuffer, 0, numFramesToWrite);
}
public int writeFrames(long[] sampleBuffer, int offset, int numFramesToWrite)
throws IOException, WavFileException {
if (ioState != IOState.WRITING)
throw new IOException("Cannot write to WavFile instance");
for (int f = 0; f < numFramesToWrite; f++) {
if (frameCounter == numFrames)
return f;
for (int c = 0; c < numChannels; c++) {
writeSample(sampleBuffer[offset]);
offset++;
}
frameCounter++;
}
return numFramesToWrite;
}
public int writeFrames(long[][] sampleBuffer, int numFramesToWrite)
throws IOException, WavFileException {
return writeFrames(sampleBuffer, 0, numFramesToWrite);
}
public int writeFrames(long[][] sampleBuffer, int offset,
int numFramesToWrite) throws IOException, WavFileException {
if (ioState != IOState.WRITING)
throw new IOException("Cannot write to WavFile instance");
for (int f = 0; f < numFramesToWrite; f++) {
if (frameCounter == numFrames)
return f;
for (int c = 0; c < numChannels; c++)
writeSample(sampleBuffer[c][offset]);
offset++;
frameCounter++;
}
return numFramesToWrite;
}
// Double
// ------
public int readFrames(double[] sampleBuffer, int numFramesToRead)
throws IOException, WavFileException {
return readFrames(sampleBuffer, 0, numFramesToRead);
}
public int readFrames(double[] sampleBuffer, int offset, int numFramesToRead)
throws IOException, WavFileException {
if (ioState != IOState.READING)
throw new IOException("Cannot read from WavFile instance");
for (int f = 0; f < numFramesToRead; f++) {
if (frameCounter == numFrames)
return f;
for (int c = 0; c < numChannels; c++) {
sampleBuffer[offset] = floatOffset + (double) readSample()
/ floatScale;
offset++;
}
frameCounter++;
}
return numFramesToRead;
}
public int readFrames(double[][] sampleBuffer, int numFramesToRead)
throws IOException, WavFileException {
return readFrames(sampleBuffer, 0, numFramesToRead);
}
public int readFrames(double[][] sampleBuffer, int offset,
int numFramesToRead) throws IOException, WavFileException {
if (ioState != IOState.READING)
throw new IOException("Cannot read from WavFile instance");
for (int f = 0; f < numFramesToRead; f++) {
if (frameCounter == numFrames)
return f;
for (int c = 0; c < numChannels; c++)
sampleBuffer[c][offset] = floatOffset + (double) readSample()
/ floatScale;
offset++;
frameCounter++;
}
return numFramesToRead;
}
public int writeFrames(double[] sampleBuffer, int numFramesToWrite)
throws IOException, WavFileException {
return writeFrames(sampleBuffer, 0, numFramesToWrite);
}
public int writeFrames(double[] sampleBuffer, int offset,
int numFramesToWrite) throws IOException, WavFileException {
if (ioState != IOState.WRITING)
throw new IOException("Cannot write to WavFile instance");
for (int f = 0; f < numFramesToWrite; f++) {
if (frameCounter == numFrames)
return f;
for (int c = 0; c < numChannels; c++) {
writeSample((long) (floatScale * (floatOffset + sampleBuffer[offset])));
offset++;
}
frameCounter++;
}
return numFramesToWrite;
}
public int writeFrames(double[][] sampleBuffer, int numFramesToWrite)
throws IOException, WavFileException {
return writeFrames(sampleBuffer, 0, numFramesToWrite);
}
public int writeFrames(double[][] sampleBuffer, int offset,
int numFramesToWrite) throws IOException, WavFileException {
if (ioState != IOState.WRITING)
throw new IOException("Cannot write to WavFile instance");
for (int f = 0; f < numFramesToWrite; f++) {
if (frameCounter == numFrames)
return f;
for (int c = 0; c < numChannels; c++)
writeSample((long) (floatScale * (floatOffset + sampleBuffer[c][offset])));
offset++;
frameCounter++;
}
return numFramesToWrite;
}
public void close() throws IOException {
// Close the input stream and set to null
if (iStream != null) {
iStream.close();
iStream = null;
}
if (oStream != null) {
// Write out anything still in the local buffer
if (bufferPointer > 0)
oStream.write(buffer, 0, bufferPointer);
// If an extra byte is required for word alignment, add it to the
// end
if (wordAlignAdjust)
oStream.write(0);
// Close the stream and set to null
oStream.close();
oStream = null;
}
// Flag that the stream is closed
ioState = IOState.CLOSED;
}
public void display() {
display(System.out);
}
public void display(PrintStream out) {
out.printf("File: %s\n", file);
out.printf("Channels: %d, Frames: %d\n", numChannels, numFrames);
out.printf("IO State: %s\n", ioState);
out.printf("Sample Rate: %d, Block Align: %d\n", sampleRate, blockAlign);
out.printf("Valid Bits: %d, Bytes per sample: %d\n", validBits,
bytesPerSample);
}
public static void main(String[] args) {
if (args.length < 1) {
System.err.println("Must supply filename");
System.exit(1);
}
try {
for (String filename : args) {
WavFile readWavFile = openWavFile(new File(filename));
readWavFile.display();
long numFrames = readWavFile.getNumFrames();
int numChannels = readWavFile.getNumChannels();
int validBits = readWavFile.getValidBits();
long sampleRate = readWavFile.getSampleRate();
WavFile writeWavFile = newWavFile(new File("out.wav"),
numChannels, numFrames, validBits, sampleRate);
final int BUF_SIZE = 5001;
// int[] buffer = new int[BUF_SIZE * numChannels];
// long[] buffer = new long[BUF_SIZE * numChannels];
double[] buffer = new double[BUF_SIZE * numChannels];
int framesRead = 0;
int framesWritten = 0;
do {
framesRead = readWavFile.readFrames(buffer, BUF_SIZE);
framesWritten = writeWavFile.writeFrames(buffer, BUF_SIZE);
System.out.printf("%d %d\n", framesRead, framesWritten);
} while (framesRead != 0);
readWavFile.close();
writeWavFile.close();
}
WavFile writeWavFile = newWavFile(new File("out2.wav"), 1, 10, 23,
44100);
double[] buffer = new double[10];
writeWavFile.writeFrames(buffer, 10);
writeWavFile.close();
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
}

@ -1,26 +0,0 @@
package com.hammurapi.jcapture;
//This file was taken from http://www.labbookpages.co.uk/audio/javaWavFiles.html and package declaration was added.
public class WavFileException extends Exception
{
public WavFileException()
{
super();
}
public WavFileException(String message)
{
super(message);
}
public WavFileException(String message, Throwable cause)
{
super(message, cause);
}
public WavFileException(Throwable cause)
{
super(cause);
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 164 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 519 B

Loading…
Cancel
Save