aboutsummaryrefslogtreecommitdiffstats
path: root/src/com/jsyn/engine
diff options
context:
space:
mode:
authorPhil Burk <[email protected]>2014-12-30 16:53:03 -0800
committerPhil Burk <[email protected]>2014-12-30 16:53:03 -0800
commit534969d42ca5168d645678345cd21242fe41f389 (patch)
treee8f5d1cba1ec57685e76ceb923d8da25a7846cfb /src/com/jsyn/engine
parenta4d8ca95178d2e3acfc3299a4b73e84c2646d24e (diff)
Initial commit of code.
Diffstat (limited to 'src/com/jsyn/engine')
-rw-r--r--src/com/jsyn/engine/LoadAnalyzer.java61
-rw-r--r--src/com/jsyn/engine/MultiTable.java230
-rw-r--r--src/com/jsyn/engine/SynthesisEngine.java683
3 files changed, 974 insertions, 0 deletions
diff --git a/src/com/jsyn/engine/LoadAnalyzer.java b/src/com/jsyn/engine/LoadAnalyzer.java
new file mode 100644
index 0000000..cbf7ed5
--- /dev/null
+++ b/src/com/jsyn/engine/LoadAnalyzer.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009 Phil Burk, Mobileer Inc
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.jsyn.engine;
+
+/** Measure CPU load. */
+public class LoadAnalyzer {
+ private long stopTime;
+ private long previousStopTime;
+ private long startTime;
+ private double averageTotalTime;
+ private double averageOnTime;
+
+ protected LoadAnalyzer() {
+ stopTime = System.nanoTime();
+ }
+
+ /**
+ * Call this when you stop doing something. Ideally all of the time since start() was spent on
+ * doing something without interruption.
+ */
+ public void stop() {
+ previousStopTime = stopTime;
+ stopTime = System.nanoTime();
+ long onTime = stopTime - startTime;
+ long totalTime = stopTime - previousStopTime;
+ if (totalTime > 0) {
+ // Recursive averaging filter.
+ double rate = 0.01;
+ averageOnTime = (averageOnTime * (1.0 - rate)) + (onTime * rate);
+ averageTotalTime = (averageTotalTime * (1.0 - rate)) + (totalTime * rate);
+ }
+ }
+
+ /** Call this when you start doing something. */
+ public void start() {
+ startTime = System.nanoTime();
+ }
+
+ /** Calculate, on average, how much of the time was spent doing something. */
+ public double getAverageLoad() {
+ if (averageTotalTime > 0.0) {
+ return averageOnTime / averageTotalTime;
+ } else {
+ return 0.0;
+ }
+ }
+}
diff --git a/src/com/jsyn/engine/MultiTable.java b/src/com/jsyn/engine/MultiTable.java
new file mode 100644
index 0000000..48b03cd
--- /dev/null
+++ b/src/com/jsyn/engine/MultiTable.java
@@ -0,0 +1,230 @@
+/*
+ * Copyright 2009 Phil Burk, Mobileer Inc
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.jsyn.engine;
+
+/*
+ * Multiple tables of sawtooth data.
+ * organized by octaves below the Nyquist Rate.
+ * used to generate band-limited Sawtooth, Impulse, Pulse, Square and Triangle BL waveforms
+ *
+ <pre>
+ Analysis of octave requirements for tables.
+
+ OctavesIndex Frequency Partials
+ 0 N/2 11025 1
+ 1 N/4 5512 2
+ 2 N/8 2756 4
+ 3 N/16 1378 8
+ 4 N/32 689 16
+ 5 N/64 344 32
+ 6 N/128 172 64
+ 7 N/256 86 128
+ </pre>
+ *
+ * @author Phil Burk (C) 2009 Mobileer Inc
+ */
+public class MultiTable {
+
+ public final static int NUM_TABLES = 8;
+ public final static int CYCLE_SIZE = (1 << 10);
+
+ private static MultiTable instance = new MultiTable(NUM_TABLES, CYCLE_SIZE);
+ private double phaseScalar;
+ private float[][] tables; // array of array of tables
+
+ /**************************************************************************
+ * Initialize sawtooth wavetables. Table[0] should contain a pure sine wave. Succeeding tables
+ * should have increasing numbers of partials.
+ */
+ public MultiTable(int numTables, int cycleSize) {
+ int tableSize = cycleSize + 1;
+
+ // Allocate array of arrays.
+ tables = new float[numTables][tableSize];
+
+ float[] sineTable = tables[0];
+
+ phaseScalar = (float) (cycleSize * 0.5);
+
+ /* Fill initial sine table with values for -PI to PI. */
+ for (int j = 0; j < tableSize; j++) {
+ sineTable[j] = (float) Math.sin(((((double) j) / (double) cycleSize) * Math.PI * 2.0)
+ - Math.PI);
+ }
+
+ /*
+ * Build each table from scratch and scale partials by raised cosine* to eliminate Gibbs
+ * effect.
+ */
+ for (int i = 1; i < numTables; i++) {
+ int numPartials;
+ double kGibbs;
+ float[] table = tables[i];
+
+ /* Add together partials for this table. */
+ numPartials = 1 << i;
+ kGibbs = Math.PI / (2 * numPartials);
+ for (int k = 0; k < numPartials; k++) {
+ double ampl, cGibbs;
+ int sineIndex = 0;
+ int partial = k + 1;
+ cGibbs = Math.cos(k * kGibbs);
+ /* Calculate amplitude for Nth partial */
+ ampl = cGibbs * cGibbs / partial;
+
+ for (int j = 0; j < tableSize; j++) {
+ table[j] += (float) ampl * sineTable[sineIndex];
+ sineIndex += partial;
+ /* Wrap index at end of table.. */
+ if (sineIndex >= cycleSize) {
+ sineIndex -= cycleSize;
+ }
+ }
+ }
+ }
+
+ /* Normalize after */
+ for (int i = 1; i < numTables; i++) {
+ normalizeArray(tables[i]);
+ }
+ }
+
+ /**************************************************************************/
+ public static float normalizeArray(float[] fdata) {
+ float max, val, gain;
+ int i;
+
+ // determine maximum value.
+ max = 0.0f;
+ for (i = 0; i < fdata.length; i++) {
+ val = Math.abs(fdata[i]);
+ if (val > max)
+ max = val;
+ }
+ if (max < 0.0000001f)
+ max = 0.0000001f;
+ // scale array
+ gain = 1.0f / max;
+ for (i = 0; i < fdata.length; i++)
+ fdata[i] *= gain;
+ return gain;
+ }
+
+ /*****************************************************************************
+ * When the phaseInc maps to the highest level table, then we start interpolating between the
+ * highest table and the raw sawtooth value (phase). When phaseInc points to highest table:
+ * flevel = NUM_TABLES - 1 = -1 - log2(pInc); log2(pInc) = - NUM_TABLES pInc = 2**(-NUM_TABLES)
+ */
+ private final static double LOWEST_PHASE_INC_INV = (1 << NUM_TABLES);
+
+ /**************************************************************************/
+ /* Phase ranges from -1.0 to +1.0 */
+ public double calculateSawtooth(double currentPhase, double positivePhaseIncrement,
+ double flevel) {
+ float[] tableBase;
+ double val;
+ double hiSam; /* Use when verticalFraction is 1.0 */
+ double loSam; /* Use when verticalFraction is 0.0 */
+ double sam1, sam2;
+
+ /* Use Phase to determine sampleIndex into table. */
+ double findex = ((phaseScalar * currentPhase) + phaseScalar);
+ // findex is > 0 so we do not need to call floor().
+ int sampleIndex = (int) findex;
+ double horizontalFraction = findex - sampleIndex;
+ int tableIndex = (int) flevel;
+
+ if (tableIndex > (NUM_TABLES - 2)) {
+ /*
+ * Just use top table and mix with arithmetic sawtooth if below lowest frequency.
+ * Generate new fraction for interpolating between 0.0 and lowest table frequency.
+ */
+ double fraction = positivePhaseIncrement * LOWEST_PHASE_INC_INV;
+ tableBase = tables[(NUM_TABLES - 1)];
+
+ /* Get adjacent samples. Assume guard point present. */
+ sam1 = tableBase[sampleIndex];
+ sam2 = tableBase[sampleIndex + 1];
+ /* Interpolate between adjacent samples. */
+ loSam = sam1 + (horizontalFraction * (sam2 - sam1));
+
+ /* Use arithmetic version for low frequencies. */
+ /* fraction is 0.0 at 0 Hz */
+ val = currentPhase + (fraction * (loSam - currentPhase));
+ } else {
+
+ double verticalFraction = flevel - tableIndex;
+
+ if (tableIndex < 0) {
+ if (tableIndex < -1) // above Nyquist!
+ {
+ val = 0.0;
+ } else {
+ /*
+ * At top of supported range, interpolate between 0.0 and first partial.
+ */
+ tableBase = tables[0]; /* Sine wave table. */
+
+ /* Get adjacent samples. Assume guard point present. */
+ sam1 = tableBase[sampleIndex];
+ sam2 = tableBase[sampleIndex + 1];
+
+ /* Interpolate between adjacent samples. */
+ hiSam = sam1 + (horizontalFraction * (sam2 - sam1));
+ /* loSam = 0.0 */
+ // verticalFraction is 0.0 at Nyquist
+ val = verticalFraction * hiSam;
+ }
+ } else {
+ /*
+ * Interpolate between adjacent levels to prevent harmonics from popping.
+ */
+ tableBase = tables[tableIndex + 1];
+
+ /* Get adjacent samples. Assume guard point present. */
+ sam1 = tableBase[sampleIndex];
+ sam2 = tableBase[sampleIndex + 1];
+
+ /* Interpolate between adjacent samples. */
+ hiSam = sam1 + (horizontalFraction * (sam2 - sam1));
+
+ /* Get adjacent samples. Assume guard point present. */
+ tableBase = tables[tableIndex];
+ sam1 = tableBase[sampleIndex];
+ sam2 = tableBase[sampleIndex + 1];
+
+ /* Interpolate between adjacent samples. */
+ loSam = sam1 + (horizontalFraction * (sam2 - sam1));
+
+ val = loSam + (verticalFraction * (hiSam - loSam));
+ }
+ }
+ return val;
+ }
+
+ public double convertPhaseIncrementToLevel(double positivePhaseIncrement) {
+ if (positivePhaseIncrement < 1.0e-30) {
+ positivePhaseIncrement = 1.0e-30;
+ }
+ return -1.0 - (Math.log(positivePhaseIncrement) / Math.log(2.0));
+ }
+
+ public static MultiTable getInstance() {
+ return instance;
+ }
+
+}
diff --git a/src/com/jsyn/engine/SynthesisEngine.java b/src/com/jsyn/engine/SynthesisEngine.java
new file mode 100644
index 0000000..119435f
--- /dev/null
+++ b/src/com/jsyn/engine/SynthesisEngine.java
@@ -0,0 +1,683 @@
+/*
+ * Copyright 2009 Phil Burk, Mobileer Inc
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.jsyn.engine;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.logging.Logger;
+
+import com.jsyn.JSyn;
+import com.jsyn.Synthesizer;
+import com.jsyn.devices.AudioDeviceFactory;
+import com.jsyn.devices.AudioDeviceInputStream;
+import com.jsyn.devices.AudioDeviceManager;
+import com.jsyn.devices.AudioDeviceOutputStream;
+import com.jsyn.unitgen.UnitGenerator;
+import com.softsynth.shared.time.ScheduledCommand;
+import com.softsynth.shared.time.ScheduledQueue;
+import com.softsynth.shared.time.TimeStamp;
+
+//TODO Resolve problem with HearDAHDSR where "Rate" port.set is not reflected in knob. Engine not running.
+//TODO new tutorial and docs on website
+//TODO AutoStop on DAHDSR
+//TODO Test/example SequentialData queueOn and queueOff
+
+//TODO Abstract device interface. File device!
+//TODO Measure thread switching sync, performance for multi-core synthesis. Use 4 core pro.
+//TODO Optimize SineOscillatorPhaseModulated
+//TODO More circuits.
+//TODO DC blocker
+//TODO Swing scope probe UIs, auto ranging
+
+/**
+ * Internal implementation of JSyn Synthesizer. The public API is in the Synthesizer interface. This
+ * class might be used directly internally.
+ *
+ * @author Phil Burk (C) 2009 Mobileer Inc
+ * @see Synthesizer
+ */
+public class SynthesisEngine implements Runnable, Synthesizer {
+ private final static int BLOCKS_PER_BUFFER = 8;
+ private final static int FRAMES_PER_BUFFER = Synthesizer.FRAMES_PER_BLOCK * BLOCKS_PER_BUFFER;
+ public static final int DEFAULT_FRAME_RATE = 44100;
+
+ private AudioDeviceManager audioDeviceManager;
+ private AudioDeviceOutputStream audioOutputStream;
+ private AudioDeviceInputStream audioInputStream;
+ private Thread audioThread;
+ private ScheduledQueue<ScheduledCommand> commandQueue = new ScheduledQueue<ScheduledCommand>();
+ private volatile boolean go;
+
+ private InterleavingBuffer inputBuffer;
+ private InterleavingBuffer outputBuffer;
+ private double inverseNyquist;
+ private long frameCount;
+ private boolean pullDataEnabled = true;
+ private boolean useRealTime = true;
+ private boolean started;
+ private int frameRate = DEFAULT_FRAME_RATE;
+ private double framePeriod = 1.0 / frameRate;
+
+ // List of all units added to the synth.
+ private ArrayList<UnitGenerator> allUnitList = new ArrayList<UnitGenerator>();
+ // List of running units.
+ private ArrayList<UnitGenerator> runningUnitList = new ArrayList<UnitGenerator>();
+ // List of units stopping because of autoStop.
+ private ArrayList<UnitGenerator> stoppingUnitList = new ArrayList<UnitGenerator>();
+
+ private LoadAnalyzer loadAnalyzer;
+ // private int numOutputChannels;
+ // private int numInputChannels;
+ private CopyOnWriteArrayList<Runnable> audioTasks = new CopyOnWriteArrayList<Runnable>();
+ /** A fraction corresponding to exactly -96 dB. */
+ public static final double DB96 = (1.0 / 63095.73444801943);
+ /** A fraction that is approximately -90.3 dB. Defined as 1 bit of an S16. */
+ public static final double DB90 = (1.0 / (1 << 15));
+
+ static Logger logger = Logger.getLogger(SynthesisEngine.class.getName());
+
+ public SynthesisEngine(AudioDeviceManager audioDeviceManager) {
+ this.audioDeviceManager = audioDeviceManager;
+ }
+
+ public SynthesisEngine() {
+ this(AudioDeviceFactory.createAudioDeviceManager());
+ }
+
+ @Override
+ public String getVersion() {
+ return JSyn.VERSION;
+ }
+
+ @Override
+ public int getVersionCode() {
+ return JSyn.VERSION_CODE;
+ }
+
+ @Override
+ public String toString() {
+ return "JSyn " + JSyn.VERSION_TEXT;
+ }
+
+ public boolean isPullDataEnabled() {
+ return pullDataEnabled;
+ }
+
+ /**
+ * If set true then audio data will be pulled from the output ports of connected unit
+ * generators. The final unit in a tree of units needs to be start()ed.
+ *
+ * @param pullDataEnabled
+ */
+ public void setPullDataEnabled(boolean pullDataEnabled) {
+ this.pullDataEnabled = pullDataEnabled;
+ }
+
+ private void setupAudioBuffers(int numInputChannels, int numOutputChannels) {
+ inputBuffer = new InterleavingBuffer(FRAMES_PER_BUFFER, Synthesizer.FRAMES_PER_BLOCK,
+ numInputChannels);
+ outputBuffer = new InterleavingBuffer(FRAMES_PER_BUFFER, Synthesizer.FRAMES_PER_BLOCK,
+ numOutputChannels);
+ }
+
+ public void terminate() {
+ }
+
+ class InterleavingBuffer {
+ private double[] interleavedBuffer;
+ ChannelBlockBuffer[] blockBuffers;
+
+ InterleavingBuffer(int framesPerBuffer, int framesPerBlock, int samplesPerFrame) {
+ interleavedBuffer = new double[framesPerBuffer * samplesPerFrame];
+ // Allocate buffers for each channel of synthesis output.
+ blockBuffers = new ChannelBlockBuffer[samplesPerFrame];
+ for (int i = 0; i < blockBuffers.length; i++) {
+ blockBuffers[i] = new ChannelBlockBuffer(framesPerBlock);
+ }
+ }
+
+ int deinterleave(int inIndex) {
+ for (int jf = 0; jf < Synthesizer.FRAMES_PER_BLOCK; jf++) {
+ for (int iob = 0; iob < blockBuffers.length; iob++) {
+ ChannelBlockBuffer buffer = blockBuffers[iob];
+ buffer.values[jf] = interleavedBuffer[inIndex++];
+ }
+ }
+ return inIndex;
+ }
+
+ int interleave(int outIndex) {
+ for (int jf = 0; jf < Synthesizer.FRAMES_PER_BLOCK; jf++) {
+ for (int iob = 0; iob < blockBuffers.length; iob++) {
+ ChannelBlockBuffer buffer = blockBuffers[iob];
+ interleavedBuffer[outIndex++] = buffer.values[jf];
+ }
+ }
+ return outIndex;
+ }
+
+ public double[] getChannelBuffer(int i) {
+ return blockBuffers[i].values;
+ }
+
+ public void clear() {
+ for (int i = 0; i < blockBuffers.length; i++) {
+ blockBuffers[i].clear();
+ }
+ }
+ }
+
+ class ChannelBlockBuffer {
+ private double[] values;
+
+ ChannelBlockBuffer(int framesPerBlock) {
+ values = new double[framesPerBlock];
+ }
+
+ void clear() {
+ for (int i = 0; i < values.length; i++) {
+ values[i] = 0.0f;
+ }
+ }
+ }
+
+ @Override
+ public void start() {
+ // TODO Use constants.
+ start(DEFAULT_FRAME_RATE, -1, 0, -1, 2);
+ }
+
+ @Override
+ public void start(int frameRate) {
+ // TODO Use constants.
+ start(frameRate, -1, 0, -1, 2);
+ }
+
+ @Override
+ public synchronized void start(int frameRate, int inputDeviceID, int numInputChannels,
+ int outputDeviceID, int numOutputChannels) {
+ if (started) {
+ logger.info("JSyn already started.");
+ return;
+ }
+
+ this.frameRate = frameRate;
+ this.framePeriod = 1.0 / frameRate;
+
+ // Set rate for any units that have already been added.
+ for (UnitGenerator ugen : allUnitList) {
+ ugen.setFrameRate(frameRate);
+ }
+
+ // this.numInputChannels = numInputChannels;
+ // this.numOutputChannels = numOutputChannels;
+ setupAudioBuffers(numInputChannels, numOutputChannels);
+
+ logger.info("Pure Java JSyn from www.softsynth.com, rate = " + frameRate + ", "
+ + (useRealTime ? "RT" : "NON-RealTime") + ", " + JSyn.VERSION_TEXT);
+
+ inverseNyquist = 2.0 / frameRate;
+
+ if (useRealTime) {
+ if (numInputChannels > 0) {
+ audioInputStream = audioDeviceManager.createInputStream(inputDeviceID, frameRate,
+ numInputChannels);
+ }
+ if (numOutputChannels > 0) {
+ audioOutputStream = audioDeviceManager.createOutputStream(outputDeviceID,
+ frameRate, numOutputChannels);
+ }
+ audioThread = new Thread(this);
+ logger.fine("Synth thread old priority = " + audioThread.getPriority());
+ audioThread.setPriority(audioThread.getPriority() + 2);
+ logger.fine("Synth thread new priority = " + audioThread.getPriority());
+ go = true;
+ audioThread.start();
+ }
+
+ started = true;
+ }
+
+ @Override
+ public boolean isRunning() {
+ return go;
+ }
+
+ @Override
+ public synchronized void stop() {
+ if (!started) {
+ logger.info("JSyn already stopped.");
+ return;
+ }
+
+ if (useRealTime) {
+ // Stop audio synthesis and all units.
+ go = false;
+ if (audioThread != null) {
+ try {
+ // Interrupt now, otherwise audio thread will wait for audio I/O.
+ audioThread.interrupt();
+
+ audioThread.join(1000);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ synchronized (runningUnitList) {
+ runningUnitList.clear();
+ }
+ started = false;
+ }
+
+ @Override
+ public void run() {
+ logger.fine("JSyn synthesis thread starting.");
+ try {
+ if (audioInputStream != null) {
+ logger.finer("JSyn synthesis thread trying to start audio INPUT!");
+ audioInputStream.start();
+ String msg = String.format("Input Latency in = %5.1f msec",
+ 1000 * audioInputStream.getLatency());
+ logger.fine(msg);
+ }
+ if (audioOutputStream != null) {
+ logger.finer("JSyn synthesis thread trying to start audio OUTPUT!");
+ audioOutputStream.start();
+ String msg = String.format("Output Latency = %5.1f msec",
+ 1000 * audioOutputStream.getLatency());
+ logger.fine(msg);
+ // Buy some time while we fill the buffer.
+ audioOutputStream.write(outputBuffer.interleavedBuffer);
+ }
+ loadAnalyzer = new LoadAnalyzer();
+ while (go) {
+ if (audioInputStream != null) {
+ audioInputStream.read(inputBuffer.interleavedBuffer);
+ }
+
+ loadAnalyzer.start();
+ runAudioTasks();
+ generateNextBuffer();
+ loadAnalyzer.stop();
+
+ if (audioOutputStream != null) {
+ // This call will block when the output is full.
+ audioOutputStream.write(outputBuffer.interleavedBuffer);
+ }
+ }
+
+ } catch (Throwable e) {
+ e.printStackTrace();
+ go = false;
+
+ } finally {
+ logger.info("JSyn synthesis thread in finally code.");
+ // Stop audio system.
+ if (audioInputStream != null) {
+ audioInputStream.stop();
+ }
+ if (audioOutputStream != null) {
+ audioOutputStream.stop();
+ }
+ }
+ logger.fine("JSyn synthesis thread exiting.");
+ }
+
+ private void runAudioTasks() {
+ for (Runnable task : audioTasks) {
+ task.run();
+ }
+ }
+
+ // TODO We need to implement a sharedSleeper like we use in JSyn1.
+ public void generateNextBuffer() {
+ int outIndex = 0;
+ int inIndex = 0;
+ for (int i = 0; i < BLOCKS_PER_BUFFER; i++) {
+ if (inputBuffer != null) {
+ inIndex = inputBuffer.deinterleave(inIndex);
+ }
+
+ TimeStamp timeStamp = createTimeStamp();
+ // Try putting this up here so incoming time-stamped events will get
+ // scheduled later.
+ processScheduledCommands(timeStamp);
+ clearBlockBuffers();
+ synthesizeBuffer();
+
+ if (outputBuffer != null) {
+ outIndex = outputBuffer.interleave(outIndex);
+ }
+ frameCount += Synthesizer.FRAMES_PER_BLOCK;
+ }
+ }
+
+ @Override
+ public double getCurrentTime() {
+ return frameCount * framePeriod;
+ }
+
+ @Override
+ public TimeStamp createTimeStamp() {
+ return new TimeStamp(getCurrentTime());
+ }
+
+ private void processScheduledCommands(TimeStamp timeStamp) {
+ List<ScheduledCommand> timeList = commandQueue.removeNextList(timeStamp);
+
+ while (timeList != null) {
+ while (!timeList.isEmpty()) {
+ ScheduledCommand command = timeList.remove(0);
+ logger.fine("processing " + command + ", at time " + timeStamp.getTime());
+ command.run();
+ }
+ // Get next list of commands at the given time.
+ timeList = commandQueue.removeNextList(timeStamp);
+ }
+ }
+
+ @Override
+ public void scheduleCommand(TimeStamp timeStamp, ScheduledCommand command) {
+ if ((Thread.currentThread() == audioThread) && (timeStamp.getTime() <= getCurrentTime())) {
+ command.run();
+ } else {
+ logger.fine("scheduling " + command + ", at time " + timeStamp.getTime());
+ commandQueue.add(timeStamp, command);
+ }
+ }
+
+ @Override
+ public void scheduleCommand(double time, ScheduledCommand command) {
+ TimeStamp timeStamp = new TimeStamp(time);
+ scheduleCommand(timeStamp, command);
+ }
+
+ @Override
+ public void queueCommand(ScheduledCommand command) {
+ TimeStamp timeStamp = createTimeStamp();
+ scheduleCommand(timeStamp, command);
+ }
+
+ private void clearBlockBuffers() {
+ outputBuffer.clear();
+ }
+
+ private void synthesizeBuffer() {
+ synchronized (runningUnitList) {
+ ListIterator<UnitGenerator> iterator = runningUnitList.listIterator();
+ while (iterator.hasNext()) {
+ UnitGenerator unit = iterator.next();
+ if (pullDataEnabled) {
+ unit.pullData(getFrameCount(), 0, Synthesizer.FRAMES_PER_BLOCK);
+ } else {
+ unit.generate(0, Synthesizer.FRAMES_PER_BLOCK);
+ }
+ }
+ // Remove any units that got auto stopped.
+ for (UnitGenerator ugen : stoppingUnitList) {
+ runningUnitList.remove(ugen);
+ ugen.flattenOutputs();
+ }
+ }
+ stoppingUnitList.clear();
+ }
+
+ public double[] getInputBuffer(int i) {
+ try {
+ return inputBuffer.getChannelBuffer(i);
+ } catch (ArrayIndexOutOfBoundsException e) {
+ throw new RuntimeException("Audio Input not configured in start() method.");
+ }
+ }
+
+ public double[] getOutputBuffer(int i) {
+ try {
+ return outputBuffer.getChannelBuffer(i);
+ } catch (ArrayIndexOutOfBoundsException e) {
+ throw new RuntimeException("Audio Output not configured in start() method.");
+ }
+ }
+
+ private void internalStopUnit(UnitGenerator unit) {
+ synchronized (runningUnitList) {
+ runningUnitList.remove(unit);
+ }
+ unit.flattenOutputs();
+ }
+
+ public void autoStopUnit(UnitGenerator unitGenerator) {
+ synchronized (stoppingUnitList) {
+ stoppingUnitList.add(unitGenerator);
+ }
+ }
+
+ @Override
+ public void startUnit(UnitGenerator unit, double time) {
+ startUnit(unit, new TimeStamp(time));
+ }
+
+ @Override
+ public void stopUnit(UnitGenerator unit, double time) {
+ stopUnit(unit, new TimeStamp(time));
+ }
+
+ @Override
+ public void startUnit(final UnitGenerator unit, TimeStamp timeStamp) {
+ // Don't start if it is a component in a circuit because it will be
+ // executed by the circuit.
+ if (unit.getCircuit() == null) {
+ scheduleCommand(timeStamp, new ScheduledCommand() {
+ @Override
+ public void run() {
+ internalStartUnit(unit);
+ }
+ });
+ }
+ }
+
+ @Override
+ public void stopUnit(final UnitGenerator unit, TimeStamp timeStamp) {
+ scheduleCommand(timeStamp, new ScheduledCommand() {
+ @Override
+ public void run() {
+ internalStopUnit(unit);
+ }
+ });
+ }
+
+ @Override
+ public void startUnit(UnitGenerator unit) {
+ startUnit(unit, createTimeStamp());
+ }
+
+ @Override
+ public void stopUnit(UnitGenerator unit) {
+ stopUnit(unit, createTimeStamp());
+ }
+
+ private void internalStartUnit(UnitGenerator unit) {
+ // logger.info( "internalStartUnit " + unit + " with circuit " +
+ // unit.getCircuit() );
+ if (unit.getCircuit() == null) {
+ synchronized (runningUnitList) {
+ if (!runningUnitList.contains(unit)) {
+ runningUnitList.add(unit);
+ }
+ }
+ }
+ // else
+ // {
+ // logger.info(
+ // "internalStartUnit detected race condition !!!! from old JSyn" + unit
+ // + " with circuit " + unit.getCircuit() );
+ // }
+ }
+
+ public double getInverseNyquist() {
+ return inverseNyquist;
+ }
+
+ public double convertTimeToExponentialScaler(double duration) {
+ // Calculate scaler so that scaler^frames = target/source
+ double numFrames = duration * getFrameRate();
+ return Math.pow(DB90, (1.0 / numFrames));
+ }
+
+ @Override
+ public long getFrameCount() {
+ return frameCount;
+ }
+
+ /**
+ * @return the frameRate
+ */
+ @Override
+ public int getFrameRate() {
+ return frameRate;
+ }
+
+ /**
+ * @return the inverse of the frameRate for efficiency
+ */
+ @Override
+ public double getFramePeriod() {
+ return framePeriod;
+ }
+
+ /** Convert a short value to a double in the range -1.0 to almost 1.0. */
+ public static double convertShortToDouble(short sdata) {
+ return (sdata * (1.0 / Short.MAX_VALUE));
+ }
+
+ /**
+ * Convert a double value in the range -1.0 to almost 1.0 to a short. Double value is clipped
+ * before converting.
+ */
+ public static short convertDoubleToShort(double d) {
+ final double maxValue = ((double) (Short.MAX_VALUE - 1)) / Short.MAX_VALUE;
+ if (d > maxValue) {
+ d = maxValue;
+ } else if (d < -1.0) {
+ d = -1.0;
+ }
+ return (short) (d * Short.MAX_VALUE);
+ }
+
+ @Override
+ public void addAudioTask(Runnable blockTask) {
+ audioTasks.add(blockTask);
+ }
+
+ @Override
+ public void removeAudioTask(Runnable blockTask) {
+ audioTasks.remove(blockTask);
+ }
+
+ @Override
+ public double getUsage() {
+ // use temp so we don't have to synchronize
+ LoadAnalyzer temp = loadAnalyzer;
+ if (temp != null) {
+ return temp.getAverageLoad();
+ } else {
+ return 0.0;
+ }
+ }
+
+ @Override
+ public AudioDeviceManager getAudioDeviceManager() {
+ return audioDeviceManager;
+ }
+
+ @Override
+ public void setRealTime(boolean realTime) {
+ useRealTime = realTime;
+ }
+
+ @Override
+ public boolean isRealTime() {
+ return useRealTime;
+ }
+
+ public double getOutputLatency() {
+ if (audioOutputStream != null) {
+ return audioOutputStream.getLatency();
+ } else {
+ return 0;
+ }
+ }
+
+ public double getInputLatency() {
+ if (audioInputStream != null) {
+ return audioInputStream.getLatency();
+ } else {
+ return 0;
+ }
+ }
+
+ @Override
+ public void add(UnitGenerator ugen) {
+ ugen.setSynthesisEngine(this);
+ allUnitList.add(ugen);
+ if (frameRate > 0) {
+ ugen.setFrameRate(frameRate);
+ }
+ }
+
+ @Override
+ public void remove(UnitGenerator ugen) {
+ allUnitList.remove(ugen);
+ }
+
+ @Override
+ public void sleepUntil(double time) throws InterruptedException {
+ double timeToSleep = time - getCurrentTime();
+ while (timeToSleep > 0.0) {
+ if (useRealTime) {
+ long msecToSleep = (long) (1000 * timeToSleep);
+ if (msecToSleep <= 0) {
+ msecToSleep = 1;
+ }
+ Thread.sleep(msecToSleep);
+ } else {
+
+ generateNextBuffer();
+ }
+ timeToSleep = time - getCurrentTime();
+ }
+ }
+
+ @Override
+ public void sleepFor(double duration) throws InterruptedException {
+ sleepUntil(getCurrentTime() + duration);
+ }
+
+ public void printConnections() {
+ if (pullDataEnabled) {
+ ListIterator<UnitGenerator> iterator = runningUnitList.listIterator();
+ while (iterator.hasNext()) {
+ UnitGenerator unit = iterator.next();
+ unit.printConnections();
+ }
+ }
+
+ }
+
+}