Tutorial: Using Xuggler in Processing

It took me a while to figure out, but Xuggler is a powerful audio and video java wrapper of ffmpeg. For a recent art project of mine titled "Lo and Behold, I am become as a God," I had to use Xuggler to sync audio and video in real time. In the processing forums, there's a lot of questions about how to use processing to sync audio and video in real time, but the GSVideo library doesn't capture audio, it only captures video. Another option is GSPipeline, which can sync audio and video, but seems to only work well in Linux - I haven't tried Mac To write a program that can capture audio and video in java within Windows 7, I turned to Xuggler.

The follow steps are what I've done to make Xuggler operate with Processing 2.0b5 or 2.0b6 with Windows 7 64 bit:

Step 1: Download and Install the Xuggler 64 bit Build for Windows. Available here.

Step 2: Download the necessary Xuggler jars, I've zipped them together and can be downloaded from here. I've placed the jars files in the zipped folder within my processing sketch, in a folder called 'code.'

Step 3: You can either download and run my example code from here (jars included), or look at the example code below the video.

Here's an example of some work I did with Xuggler in processing:



My next tutorial will be about how to set up the Youtube data API with processing, to be able to automatically upload these videos to your youtube account.




import java.awt.image.BufferedImage;
import java.awt.image.ImageObserver;
import java.util.concurrent.TimeUnit;
import java.awt.*;
import processing.video.*;
import javax.sound.sampled.*;

IMediaWriter imw;
IStreamCoder isc;
BufferedImage bgr;
int vidRate = 30;
long sTime;
long fTime;

Capture cam;

final int audioStreamIndex = 1;
final int audioStreamId = 1;
final int channelCount = 2;
int sampleRate;

AudioFormat audioFormat;
AudioInputStream audioInputStream;
TargetDataLine aline;
AudioFormat targetType;

byte[] audioBuf;
int audionumber;

int widthCapture=640;
int heightCapture=480;

boolean recording;

void setup() {
  frameRate(30);
  size(widthCapture, heightCapture, JAVA2D);
  cam = new Capture(this, widthCapture, heightCapture);
  cam.start();
  avSetup();
}

void draw() {
  if (cam.available()) {
    cam.read();
    image(cam.get(), 0, 0);

    if (recording) {
      if (imw.isOpen()) {
        //video recording stuff
        long cTime = System.nanoTime()-fTime;
        if (cTime >= (double)1000/vidRate) {
          bgr.getGraphics().drawImage(cam.getImage(), 0, 0,
          new ImageObserver() {
            public boolean imageUpdate(Image i, int a, int b, int c, int d, int e) {
              return true;
            }
          }
          );
          imw.encodeVideo(0, bgr, System.nanoTime()-sTime, TimeUnit.NANOSECONDS);
          //audio recording stuff
          if (aline.available() == 88200) {
            int nBytesRead = aline.read(audioBuf, 0, aline.available());//audioBuf.length);//aline.available());
            if (nBytesRead>0) {
              IBuffer iBuf = IBuffer.make(null, audioBuf, 0, nBytesRead);
              IAudioSamples smp = IAudioSamples.make(iBuf, channelCount, IAudioSamples.Format.FMT_S16);

              if (smp!=null) {
                long numSample = nBytesRead/smp.getSampleSize();
                smp.setComplete(true, numSample, (int) audioFormat.getSampleRate(), audioFormat.getChannels(), IAudioSamples.Format.FMT_S16, (System.nanoTime()-sTime) / 1000);
                smp.put(audioBuf, 1, 0, aline.available());
                try {
                  imw.encodeAudio(audionumber, smp);
                }
                catch(Exception e) {
                  println("EXCEPTION: " + e);
                }
              }
            }
          }
          fTime = System.nanoTime();
        }
      }
    }
  }
}

public void keyPressed() {
  if (key == 'r') {
    if (!recording) {
      println("recording");
      avRecorderSetup();
      recording = true;
    }
  }
  if (key == 's') {
    if (recording) {
      println("saving");
      imw.flush();
      imw.close();
      recording = false;
    }
  }
}

void avSetup() {
  audioFormat = new AudioFormat(44100.0F, 16, channelCount, true, false);
  sampleRate = (int) audioFormat.getSampleRate();
  DataLine.Info info = new DataLine.Info(TargetDataLine.class, audioFormat);
  try {
    aline = (TargetDataLine) AudioSystem.getLine(info);
    aline.open(audioFormat);
    aline.start();
    println("audio line");
  }
  catch (LineUnavailableException e)
  {
    println("unable to get a recording line");
    e.printStackTrace();
    exit();
  }
  int bufferSize = (int) audioFormat.getSampleRate() * audioFormat.getFrameSize();
  audioBuf = new byte[bufferSize];
  targetType = aline.getFormat();
  audioInputStream = new AudioInputStream(aline);
}

void avRecorderSetup() {
  imw = ToolFactory.makeWriter(sketchPath("D:/myVideo.mp4"));//or "output.avi" or "output.mov"
  imw.open();
  imw.setForceInterleave(true);
  imw.addVideoStream(0, 0, IRational.make((double)vidRate), widthCapture, heightCapture);
  audionumber = imw.addAudioStream(audioStreamIndex, audioStreamId, channelCount, sampleRate);
  isc = imw.getContainer().getStream(0).getStreamCoder();
  bgr = new BufferedImage(widthCapture, heightCapture, BufferedImage.TYPE_3BYTE_BGR);
  sTime = fTime = System.nanoTime();
}

-->