Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Copyright 1997-2008 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*
*/
package javax.media.j3d;
import java.awt.AWTEvent;
import java.awt.event.WindowEvent;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import javax.vecmath.Point2f;
import javax.vecmath.Point3d;
import javax.vecmath.Point3f;
import javax.vecmath.Vector3d;
import javax.vecmath.Vector3f;
/**
* This structure parallels the RenderBin structure and
* is used for sounds
*/
class SoundScheduler extends J3dStructure {
/**
* The View that owns this SoundScheduler
*/
View view = null;
/**
* This boolean tells the thread to suspend itself.
* This is true ONLY when everythings ready to render using run loop
*/
boolean ready = false;
/**
* The ViewPlatform that is associated with this SoundScheduler
*/
ViewPlatformRetained viewPlatform = null;
/**
* The GraphicContext3D that we are currently unning in.
*/
GraphicsContext3D graphicsCtx = null;
/**
* Maintain what reference to the last AuralAttributes found active
* was so that only if there was a change do we need to reset these
* parameters in the AudioDevice3D.
*/
AuralAttributesRetained lastAA = null;
/**
* Since AuralAttribute gain scale factor is multipled with sound's
* initialGain scale factor, any change in AuralAttrib gain scale
* factor should force an update of all active sounds' gains
* Also, change in AuralAttributes should force a sound update
* even if no other sound field changes occurred.
*/
boolean resetAA = true;
/**
* Audio Device
*/
AudioDevice audioDevice = null;
AudioDevice3D audioDevice3D = null;
AudioDevice3DL2 audioDevice3DL2 = null;
int totalChannels = 0;
/**
* Array of SoundScapeRetained nodes that intersect the viewPlatform
* This list is a subset of the soundscapes array, and is used when
* selecting the closest Soundscape.
* Maintained as an expandable array.
*/
SoundscapeRetained[] intersectedSoundscapes = new SoundscapeRetained[32];
/**
* Array of Bounds nodes for the corresponding intersectedSoundscapes
* array. This array is used when selecting the closest Soundscape.
* This list is used when selecting the closest Soundscape.
* Maintained as an expandable array.
*/
Bounds[] intersectedRegions = new Bounds[32];
/**
* Reference to last processed region within run().
* Maintained to avoid re-transforming this bounds.
*/
Bounds region = null;
/**
* An array of prioritized sounds currently playing "live" sounds.
* This prioritized sound list is NO longer re-create instead sounds
* are insert, shuffled or removed as messages are processed.
*/
// XXXX: (Enhancement) should have a seperate list for
// background sound and a list for positional sounds
ArrayList prioritizedSounds = new ArrayList();
/**
* Current number of scene graph sound nodes in the universe
*/
int nRetainedSounds = -1; // none calculated yet
/**
* Current number of immediate mode sound nodes in the universe
*/
int nImmedSounds = -1; // none calculated yet
/**
* Current active (selected) attribute node in the sceneGraph
*/
AuralAttributesRetained aaRetained = null;
// variables for processing transform messages
boolean transformMsg = false;
UpdateTargets targets = null;
/**
* Current active (selected) attribute node in the sceneGraph
*/
AuralAttributesRetained aaImmed = null;
// Dirty flags for fields and parameters that are unique to the
// Sound Scheduler or the Audio Device
// Any listener (body) and/or view transform changes processed in
// CanvasViewCache force one of these flags to be set.
static final int EAR_POSITIONS_CHANGED = 0x0001;
static final int EYE_POSITIONS_CHANGED = 0x0002;
static final int IMAGE_PLATE_TO_VWORLD_CHANGED = 0x0004;
static final int HEAD_TO_VWORLD_CHANGED = 0x0008;
static final int LISTENER_CHANGED = 0x000F;// all of the above
private int listenerUpdated = LISTENER_CHANGED;
/**
* Temporary flag that's denotes that a positional sound was processed
* in the current loop of renderChange().
*/
private boolean positionalSoundUpdated = false;
/**
* Temporary flag that's denotes that some field auralAttribute was changed
*/
private boolean auralAttribsChanged = true; // force processing 1st x
private boolean stallThread = false;
int lastEventReceived = WindowEvent.WINDOW_CLOSED;
/**
* Constructs a new SoundScheduler
*/
SoundScheduler(VirtualUniverse u, View v) {
super(u, J3dThread.SOUND_SCHEDULER);
// Assertion check view & universe
if (v == null) {
System.err.println("WARNING: SoundScheduler constructed with null view");
}
if (u == null) {
System.err.println("WARNING: SoundScheduler constructed with null universe");
}
universe = u;
view = v;
reset();
}
// NOTE: processMessage only called with updatethread.active true
@Override
void processMessages(long referenceTime) {
J3dMessage[] messages = getMessages(referenceTime);
int nMsg = getNumMessage();
J3dMessage m;
int nSounds;
if (nMsg > 0) {
for (int i=0; i < nMsg; i++) {
m = messages[i];
switch (m.type) {
case J3dMessage.INSERT_NODES:
insertNodes(m);
break;
case J3dMessage.REMOVE_NODES:
removeNodes(m);
break;
case J3dMessage.SOUND_ATTRIB_CHANGED:
changeNodeAttrib(m);
break;
case J3dMessage.SOUND_STATE_CHANGED:
changeNodeState(m);
break;
case J3dMessage.BOUNDINGLEAF_CHANGED:
processBoundingLeafChanged(m);
break;
case J3dMessage.SOUNDSCAPE_CHANGED:
SoundscapeRetained ss = (SoundscapeRetained)m.args[0];
if (universe.soundStructure.isSoundscapeScopedToView(ss, view)) {
auralAttribsChanged = true;
changeNodeAttrib(m);
}
break;
case J3dMessage.AURALATTRIBUTES_CHANGED:
auralAttribsChanged = true;
changeNodeAttrib(m);
break;
case J3dMessage.MEDIA_CONTAINER_CHANGED:
changeNodeAttrib(m);
break;
case J3dMessage.TRANSFORM_CHANGED:
transformMsg = true;
auralAttribsChanged = true;
break;
case J3dMessage.RENDER_IMMEDIATE:
processImmediateNodes(m.args, referenceTime);
break;
case J3dMessage.VIEWSPECIFICGROUP_CHANGED:
processViewSpecificGroupChanged(m);
break;
case J3dMessage.UPDATE_VIEW:
if (debugFlag)
debugPrint(".processMessage() UPDATE_VIEW");
// NOTE: can only rely on seeing UPDATE_VIEW when canvas [re]Created
// AND when view deactivated...
// NOTE:
// temp work-around
// calling prioritizeSounds() wipes out old atom fields
// QUESTION: prioritizedSound is NEVER empty - why if size is 0 can
// .isEmpty return anything but TRUE???
//
if (prioritizedSounds.isEmpty()) {
nSounds = prioritizeSounds();
}
break;
case J3dMessage.SWITCH_CHANGED:
if (debugFlag)
debugPrint(".processMessage() " +
"SWITCH_CHANGED ignored");
break;
} // switch
m.decRefcount();
} // for
if (transformMsg) {
targets = universe.transformStructure.getTargetList();
updateTransformChange(targets, referenceTime);
transformMsg = false;
targets = null;
}
Arrays.fill(messages, 0, nMsg, null);
}
// Call renderChanges within try/catch so errors won't kill
// the SoundScheduler.
try {
renderChanges();
}
catch (RuntimeException e) {
System.err.println("Exception occurred " +
"during Sound rendering:");
e.printStackTrace();
}
catch (Error e) {
// Issue 264 - catch Error
System.err.println("Error occurred " +
"during Sound rendering:");
e.printStackTrace();
}
// what if the user/app makes no change to scenegraph?
// must still re-render after retest for sound complete
// calculate which sound will finished first and set a
// wait time to this shortest time so that scheduler is
// re-entered to process sound complete.
long waitTime = shortestTimeToFinish();
if (waitTime == 0L) {
// come right back
if (debugFlag)
debugPrint(".processMessage calls sendRunMessage " +
"for immediate processing");
VirtualUniverse.mc.sendRunMessage(universe,
J3dThread.SOUND_SCHEDULER);
}
else if (waitTime > 0L) {
// Use TimerThread to send message with sounds complete.
// This uses waitForElapse time to sleep for at least the duration
// returned by shortestTimeToFinish method.
if (debugFlag)
debugPrint(".processMessage calls sendRunMessage " +
"with wait time = " + waitTime );
// QUESTION (ISSUE): even when this is set to a large time
// processMessage is reentered immediately.
// Why is timer thread not waiting??
VirtualUniverse.mc.sendRunMessage(waitTime, view,
J3dThread.SOUND_SCHEDULER);
}
}
void insertNodes(J3dMessage m) {
Object[] nodes = (Object[])m.args[0];
ArrayList viewScopedNodes = (ArrayList)m.args[3];
ArrayList> scopedNodesViewList = (ArrayList>)m.args[4];
for (int i=0; i vl = scopedNodesViewList.get(i);
// If the node object is scoped to this view, then ..
if (vl.contains(view)) {
if (node instanceof SoundRetained) {
nRetainedSounds++;
// insert sound node into sound scheduler's prioritized list
addSound((SoundRetained) node);
}
else if (node instanceof SoundscapeRetained) {
auralAttribsChanged = true;
}
}
}
}
}
/**
* Add sound to sounds list.
*/
void addSound(SoundRetained sound) {
if (sound == null)
return;
if (debugFlag)
debugPrint(".addSound()");
synchronized (prioritizedSounds) {
addPrioritizedSound(sound);
}
} // end addSound
/**
* Node removed from tree
*/
@Override
void removeNodes(J3dMessage m) {
Object[] nodes = (Object[])m.args[0];
ArrayList viewScopedNodes = (ArrayList)m.args[3];
ArrayList> scopedNodesViewList = (ArrayList>)m.args[4];
for (int i=0; i vl = scopedNodesViewList.get(i);
// If the node object is scoped to this view, then ..
if (vl.contains(view)) {
if (node instanceof SoundRetained) {
SoundSchedulerAtom soundAtom = null;
for (int arrIndx=1; ;arrIndx++) {
soundAtom = findSoundAtom((SoundRetained)node,
arrIndx);
if (soundAtom == null)
break;
stopSound(soundAtom, false);
}
}
else if (node instanceof SoundscapeRetained) {
auralAttribsChanged = true;
}
}
}
}
}
// deletes all instances of the sound nodes from the priority list
void deleteSound(SoundRetained sound) {
if (sound != null)
return;
if (debugFlag)
debugPrint(".deleteSound()");
synchronized (prioritizedSounds) {
if (!prioritizedSounds.isEmpty()) {
// find sound in list and remove it
int arrSize = prioritizedSounds.size();
for (int index=0; index 0) {
if (debugFlag)
debugPrint(" MuteDirtyBit is on");
muteSound((SoundRetained) node);
}
if ((attribDirty & SoundRetained.PAUSE_DIRTY_BIT) > 0) {
if (debugFlag)
debugPrint(" PauseDirtyBit is on");
pauseSound((SoundRetained) node);
}
}
else if (node instanceof SoundscapeRetained &&
universe.soundStructure.isSoundscapeScopedToView(node, view)) {
auralAttribsChanged = true;
}
else if (node instanceof AuralAttributesRetained) {
auralAttribsChanged = true;
}
else if (node instanceof MediaContainerRetained) {
int listSize = ((Integer)m.args[2]).intValue();
ArrayList userList = (ArrayList)m.args[3];
for (int i = 0; i < listSize; i++) {
SoundRetained sound = (SoundRetained)userList.get(i);
if (sound != null) {
loadSound(sound, true);
if (debugFlag)
debugPrint(".changeNodeAttrib " +
"MEDIA_CONTAINER_CHANGE calls loadSound");
}
}
}
}
void changeNodeState(J3dMessage m) {
Object node = m.args[0];
Object value = m.args[1];
if (debugFlag)
debugPrint(".changeNodeState:");
if (node instanceof SoundRetained && universe.soundStructure.isSoundScopedToView(node, view)) {
int stateDirty = ((Integer)value).intValue();
setStateDirtyFlag((SoundRetained)node, stateDirty);
if (debugFlag)
debugPrint(" Sound node dirty bit = "+stateDirty);
if ((stateDirty & SoundRetained.LIVE_DIRTY_BIT) > 0) {
if (debugFlag)
debugPrint(".changeNodeState LIVE_DIRTY_BIT " +
"calls loadSound");
loadSound((SoundRetained) node, false);
}
if ((stateDirty & SoundRetained.ENABLE_DIRTY_BIT) > 0) {
if (debugFlag)
debugPrint(" EnableDirtyBit is on");
if (((Boolean) m.args[4]).booleanValue()) {
enableSound((SoundRetained) node);
} else {
SoundSchedulerAtom soundAtom;
SoundRetained soundRetained = (SoundRetained) node;
for (int i=prioritizedSounds.size()-1; i >=0; i--) {
soundAtom = prioritizedSounds.get(i);
if (soundAtom.sound.sgSound == soundRetained) {
// ignore soundRetained.release
// flag which is not implement
turnOff(soundAtom);
// Fix to Issue 431.
soundAtom.enable(soundRetained.enable);
}
}
}
}
}
}
void shuffleSound(SoundRetained sound) {
// Find sound atom that references this sound node and
// reinsert it into prioritized sound list by removing atom for
// this sound from priority list, then re-add it.
// Assumes priority has really changed since a message is not sent
// to the scheduler if the 'new' priority value isn't different.
deleteSound(sound); // remove atom for this sound
addSound(sound); // then re-insert it back into list in new position
}
void loadSound(SoundRetained sound, boolean forceReload) {
// find sound atom that references this sound node
// QUESTION: "node" probably not mirror node?
SoundSchedulerAtom soundAtom = null;
for (int i=1; ;i++) {
soundAtom = findSoundAtom(sound, i);
if (soundAtom == null)
break;
MediaContainer mediaContainer = sound.getSoundData();
if (forceReload ||
soundAtom.loadStatus != SoundRetained.LOAD_COMPLETE) {
if (debugFlag)
debugPrint(": not LOAD_COMPLETE - try attaching");
attachSoundData(soundAtom, mediaContainer, forceReload);
}
}
}
void enableSound(SoundRetained sound) {
if (debugFlag)
debugPrint(".enableSound " + sound );
// find sound atom that references this sound node
SoundSchedulerAtom soundAtom = null;
for (int i=1; ;i++) {
soundAtom = findSoundAtom(sound, i);
if (soundAtom == null)
break;
// Set atom enabled field based on current Sound node
// enable boolean flag
soundAtom.enable(sound.enable);
}
}
void muteSound(SoundRetained sound) {
// make mute pending
// mute -> MAKE-SILENT
// unmute -> MAKE-AUDIBLE
if (debugFlag)
debugPrint(".muteSound " + sound );
// find sound atom that references this sound node
SoundSchedulerAtom soundAtom = null;
for (int i=1; ;i++) {
soundAtom = findSoundAtom(sound, i);
if (soundAtom == null)
break;
// Set atom mute field based on node current
// mute boolean flag
soundAtom.mute(sound.mute);
}
}
void pauseSound(SoundRetained sound) {
// make pause pending
// Pause is a separate action
// When resumed it has to reset its state
// PAUSE_AUDIBLE
// PAUSE_SILENT
// RESUME_AUDIBLE
// RESUME_SILENT
// to whatever it was before
if (debugFlag)
debugPrint(".pauseSound " + sound );
// find sound atom that references this sound node
SoundSchedulerAtom soundAtom = null;
for (int i=1; ;i++) {
soundAtom = findSoundAtom(sound, i);
if (soundAtom == null)
break;
// Set atom pause field based on node's current
// pause boolean flag
soundAtom.pause(sound.pause);
}
}
void processImmediateNodes(Object[] args, long referenceTime) {
Object command = args[0];
Object newNode = args[1];
Object oldNode = args[2];
Sound oldSound = (Sound)oldNode;
Sound newSound = (Sound)newNode;
int action = ((Integer)command).intValue();
if (debugFlag)
debugPrint(".processImmediateNodes() - action = " +
action);
switch (action) {
case GraphicsContext3D.ADD_SOUND :
case GraphicsContext3D.INSERT_SOUND :
addSound((SoundRetained)newSound.retained);
nImmedSounds++;
break;
case GraphicsContext3D.REMOVE_SOUND :
deleteSound((SoundRetained)oldSound.retained);
nImmedSounds--;
break;
case GraphicsContext3D.SET_SOUND :
deleteSound((SoundRetained)oldSound.retained);
addSound((SoundRetained)newSound.retained);
break;
}
}
void updateTransformChange(UpdateTargets targets, long referenceTime) {
// node.updateTransformChange() called immediately rather than
// waiting for updateObject to be called and process xformChangeList
// which apprears to only happen when sound started...
UnorderList arrList = targets.targetList[Targets.SND_TARGETS];
if (arrList != null) {
int j,i;
Object nodes[], nodesArr[];
int size = arrList.size();
nodesArr = arrList.toArray(false);
for (j = 0; j 0) {
calcSchedulingAction();
muteSilentSounds();
// short term flag set within performActions->update()
positionalSoundUpdated = false;
// if listener parameters changed re-set View parameters
if (testListenerFlag()) {
if (debugFlag)
debugPrint(" audioDevice3D.setView");
audioDevice3D.setView(view);
}
numActiveSounds = performActions();
if (positionalSoundUpdated) {
// if performActions updated at least one positional sound
// was processed so the listener/view changes were processed,
// thus we can clear the SoundScheduler dirtyFlag, otherwise
// leave the flag dirty until a positional sound is updated
clearListenerFlag(); // clears listenerUpdated flag
}
}
/*
}
*/
}
/**
* Prioritize all sounds associated with SoundScheduler (view)
* This only need be done once when scheduler is initialized since
* the priority list is updated when:
* a) PRIORITY_DIRTY_BIT in soundDirty field set; or
* b) sound added or removed from live array list
*/
int prioritizeSounds() {
int size;
synchronized (prioritizedSounds) {
if (!prioritizedSounds.isEmpty()) {
prioritizedSounds.clear();
}
// XXXX: sync soundStructure sound list
UnorderList retainedSounds = universe.soundStructure.getSoundList(view);
// QUESTION: what is in this sound list??
// mirror node or actual node???
nRetainedSounds = 0;
nImmedSounds = 0;
if (debugFlag)
debugPrint(" prioritizeSound , num retained sounds" +
retainedSounds.size());
for (int i=0; i canvases = view.getAllCanvas3Ds();
while (canvases.hasMoreElements()) {
Canvas3D canvas = canvases.nextElement();
GraphicsContext3D graphicsContext = canvas.getGraphicsContext3D();
Enumeration nonretainedSounds = graphicsContext.getAllSounds();
while (nonretainedSounds.hasMoreElements()) {
if (debugFlag)
debugPrint(" prioritizeSound , get non-retained sound");
Sound sound = (Sound)nonretainedSounds.nextElement();
if (sound == null) {
if (debugFlag)
debugPrint(" prioritizeSound , sound element is null");
// QUESTION: why should I have to do this?
continue;
}
addPrioritizedSound((SoundRetained)sound.retained);
nImmedSounds++;
}
}
if (debugFlag)
debugPrint(" prioritizeSound , num of processed retained sounds" +
nRetainedSounds);
debugPrint(" prioritizeSound , num of processed non-retained sounds" +
nImmedSounds);
size = prioritizedSounds.size();
} // sync
return size;
}
// methods that call this should synchronize prioritizedSounds
void addPrioritizedSound(SoundRetained mirSound) {
SoundRetained sound = mirSound.sgSound;
if (sound == null) { // this mirSound is a nonretained sound
// pad the "child" sg sound pointer with itself
mirSound.sgSound = mirSound;
sound = mirSound;
if (debugFlag)
debugPrint(":addPritorizedSound() sound NULL");
}
boolean addAtom = false;
// see if this atom is in the list already
// covers the case where the node was detached or unswitched but NOT
// deleted (so sample already loaded
// QUESTION: is above logic correct???
SoundSchedulerAtom atom = null;
atom = findSoundAtom(mirSound, 1); // look thru list for 1st instance
if (atom == null) {
atom = new SoundSchedulerAtom();
atom.soundScheduler = this; // save scheduler atom is associated with
addAtom = true;
}
// update fields in atom based on sound nodes state
atom.sound = mirSound; // new mirror sound
updateTransformedFields(mirSound);
if ( !addAtom ) {
return;
}
// if this atom being added then set the enable state
atom.enable(sound.enable);
if (prioritizedSounds.isEmpty()) {
// List is currently empty, so just add it
// insert into empty list of prioritizedSounds
prioritizedSounds.add(atom);
if (debugFlag)
debugPrint(":addPritorizedSound() inset sound " +
mirSound + " into empty priority list");
}
else {
// something's in the proirity list already
// Since list is not empty insert sound into list.
//
// Order is highest to lowest priority values, and
// for sounds with equal priority values, sound
// inserted first get in list given higher priority.
SoundRetained jSound;
SoundSchedulerAtom jAtom;
int j;
int jsounds = (prioritizedSounds.size() - 1);
float soundPriority = sound.priority;
for (j=jsounds; j>=0; j--) {
jAtom = prioritizedSounds.get(j);
jSound = jAtom.sound;
if (debugFlag)
debugPrint(": priority of sound " + jSound.sgSound +
" element " + (j+1) + " of prioritized list");
if (soundPriority <= jSound.sgSound.priority) {
if (j==jsounds) {
// last element's priority is larger than
// current sound's priority, so add this
// sound to the end of the list
prioritizedSounds.add(atom);
if (debugFlag)
debugPrint(": insert sound at list bottom");
break;
}
else {
if (debugFlag)
debugPrint(
": insert sound as list element " +
(j+1));
prioritizedSounds.add(j+1, atom);
break;
}
}
} // for loop
if (j < 0) { // insert at the top of the list
if (debugFlag)
debugPrint(": insert sound at top of priority list");
prioritizedSounds.add(0, atom);
}
} // else list not empty
}
/**
* Process active Soundscapes (if there are any) and intersect these
* soundscapes with the viewPlatform.
*
* Returns the number of soundscapes that intesect with
* view volume.
*/
int findActiveSoundscapes() {
int nSscapes = 0;
int nSelectedSScapes = 0;
SoundscapeRetained ss = null;
SoundscapeRetained lss = null;
boolean intersected = false;
int nUnivSscapes = 0;
UnorderList soundScapes = null;
// Make a copy of references to the soundscapes in the universe
// that are both switch on and have non-null (transformed) regions,
// don't bother testing for intersection with view.
if (universe == null) {
if (debugFlag)
debugPrint(".findActiveSoundscapes() univ=null");
return 0;
}
soundScapes = universe.soundStructure.getSoundscapeList(view);
if (soundScapes == null) {
if (debugFlag)
debugPrint(".findActiveSoundscapes() soundScapes null");
return 0;
}
synchronized (soundScapes) {
nUnivSscapes = soundScapes.size;
if (nUnivSscapes == 0) {
if (debugFlag)
debugPrint(
".findActiveSoundscapes() soundScapes size=0");
return 0;
}
// increase arrays lengths by increments of 32 elements
if (intersectedRegions.length < nSscapes) {
intersectedRegions = new Bounds[nSscapes + 32];
}
if (intersectedSoundscapes.length < nSscapes) {
intersectedSoundscapes = new SoundscapeRetained[nSscapes + 32];
}
// nSscapes is incremented for every Soundscape found
if (debugFlag)
debugPrint(".findActiveSoundscapes() nUnivSscapes="+
nUnivSscapes);
nSelectedSScapes = 0;
for (int k=0; k 1) {
Bounds closestRegions;
closestRegions = viewPlatform.schedSphere.closestIntersection(
intersectedRegions);
for (int j=0; j < intersectedRegions.length; j++) {
if (debugFlag)
debugPrint(" element " + j +
" in intersectedSoundsscapes is " + intersectedRegions[j]);
if (intersectedRegions[j] == closestRegions) {
ss = intersectedSoundscapes[j];
if (debugFlag)
debugPrint(" element " + j + " is closest");
break;
}
}
}
if (ss != null) {
if (debugFlag)
debugPrint(" closest SoundScape found is " + ss);
aa = ss.getAuralAttributes();
if (aa != null) {
if (debugFlag)
debugPrint(": AuralAttribute for " +
"soundscape is NOT null");
} else {
if (debugFlag)
debugPrint(": AuralAttribute for " +
"soundscape " + ss + " is NULL");
}
}
else {
if (debugFlag)
debugPrint(": AuralAttribute is null " +
"since soundscape is NULL");
}
if (debugFlag)
debugPrint(
" auralAttrib for closest SoundScape found is " + aa);
return ((AuralAttributesRetained)aa.retained);
}
/**
* Send current aural attributes to audio device
*
* Note that a AA's dirtyFlag is clear only after parameters are sent to
* audio device.
*/
void updateAuralAttribs(AuralAttributesRetained attribs) {
if (auralAttribsChanged) {
if (attribs != null) {
synchronized (attribs) {
/*
// XXXX: remove use of aaDirty from AuralAttrib node
if ((attribs != lastAA) || attribs.aaDirty)
*/
if (debugFlag) {
debugPrint(" set real updateAuralAttribs because");
}
// Send current aural attributes to audio device
// Assumes that aural attribute parameter is NOT null.
audioDevice3D.setRolloff(attribs.rolloff);
if (debugFlag)
debugPrint(" rolloff " + attribs.rolloff);
// Distance filter parameters
int arraySize = attribs.getDistanceFilterLength();
if ((attribs.filterType ==
AuralAttributesRetained.NO_FILTERING) ||
arraySize == 0 ) {
audioDevice3D.setDistanceFilter(
attribs.NO_FILTERING, null, null);
if (debugFlag)
debugPrint(" no filtering");
}
else {
Point2f[] attenuation = new Point2f[arraySize];
for (int i=0; i< arraySize; i++)
attenuation[i] = new Point2f();
attribs.getDistanceFilter(attenuation);
double[] distance = new double[arraySize];
float[] cutoff = new float[arraySize];
for (int i=0; i< arraySize; i++) {
distance[i] = attenuation[i].x;
cutoff[i] = attenuation[i].y;
}
audioDevice3D.setDistanceFilter(attribs.filterType,
distance, cutoff);
if (debugFlag) {
debugPrint(" filtering parameters: " +
" distance, cutoff arrays");
for (int jj=0; jj0 && soundAtom.endTime<=currentTime) {
// sound's completed playing, force action
soundAtom.schedulingAction = SoundSchedulerAtom.COMPLETE;
if (debugFlag)
debugPrint(": sample complete;"+
" endTime = " + soundAtom.endTime +
", currentTime = " + currentTime +
" so turned off");
soundAtom.status = SoundSchedulerAtom.SOUND_COMPLETE;
turnOff(soundAtom); // Stop sound in device that are complete
if (debugFlag)
debugPrint(": sound "+soundAtom.sampleId+
" action COMPLETE results in call to stop");
}
break;
case SoundSchedulerAtom.RESTART_AUDIBLE:
case SoundSchedulerAtom.START_AUDIBLE:
case SoundSchedulerAtom.RESTART_SILENT:
case SoundSchedulerAtom.START_SILENT:
break;
default: // includes COMPLETE, DO_NOTHING
soundAtom.schedulingAction = SoundSchedulerAtom.DO_NOTHING;
break;
} // switch
if (debugFlag)
debugPrint(": final scheduling action " +
"set to " + soundAtom.schedulingAction);
}
/**
* Determine scheduling action for each live sound
*/
int calcSchedulingAction() {
// Temp variables
SoundRetained sound;
SoundRetained mirSound;
SoundSchedulerAtom soundAtom;
SoundRetained jSound;
int nSounds = 0;
boolean processSound;
// number of sounds to process including scene graph and immediate nodes
int numSoundsToProcess = 0;
if (universe == null) {
if (debugFlag)
debugPrint(
": calcSchedulingAction: univ NULL");
return 0;
}
if (universe.soundStructure == null) {
if (debugFlag)
debugPrint(
": calcSchedulingAction: soundStructure NULL");
return 0;
}
// List of prioritized "live" sounds taken from universe list of sounds.
// Maintained as an expandable array - start out with a small number of
// elements for this array then grow the list larger if necessary...
synchronized (prioritizedSounds) {
nSounds = prioritizedSounds.size();
if (debugFlag)
debugPrint(
": calcSchedulingAction: soundsList size = " +
nSounds);
// (Large) Loop over all switched on sounds and conditionally put
// these into a order prioritized list of sound.
// Try throw out as many sounds as we can:
// Sounds finished playing (reached end before stopped)
// Sounds still yet to be loaded
// Positional sounds whose regions don't intersect view
// Sound to be stopped
// Those sounds remaining are inserted into a prioritized list
for (int i=0; i>>>>>sound using sgSound at " + sound);
printAtomState(soundAtom);
}
processSoundAtom(soundAtom);
} // end of process sound
else {
soundAtom.schedulingAction = SoundSchedulerAtom.DO_NOTHING;
} // end of not process sound
} // end loop over all sound in soundList
} // sync
if (debugFlag) {
if (numSoundsToProcess > 0)
debugPrint(": number of liveSounds = " + numSoundsToProcess);
else
debugPrint(": number of liveSounds <= 0");
}
return numSoundsToProcess;
}
/**
* Mute sounds that are to be played silently.
*
* Not all the sound in the prioritized enabled sound list
* may be able to be played. Due to low priority, some sounds
* must be muted/silenced (if such an action frees up channel
* resources) to make way for sounds with higher priority.
* For each sound in priority list:
* For sounds whose actions are X_SILENT:
* Mute sounds to be silenced
* Add the number of channels used by this muted sound to
* current total number of channels used
* For all remaining sounds (with actions other than above)
* The number of channels that 'would be used' to play
* potentially audible sounds is compared with
* the number left on the device:
* If this sound would use more channels than available
* Change it's X_AUDIBLE action to X_SILENT
* Mute sounds to be silenced
* Add the number of channels used by this sound, muted
* or not, to current total number of channels used
*
* NOTE: requests for sounds to play beyond channel capability of
* the audio device do NOT throw an exception when more sounds are
* started than can be played. Rather the unplayable sounds are
* muted. It is up to the AudioDevice3D implementation to determine
* how muted/silent sounds are implememted (playing with gain zero
* and thus using up channel resources, or stop and restarted with
* correct offset when inactivated then re-actived.
*/
void muteSilentSounds() {
// Temp variables
SoundRetained sound;
SoundRetained mirSound;
int totalChannelsUsed = 0;
SoundSchedulerAtom soundAtom;
int nAtoms;
synchronized (prioritizedSounds) {
nAtoms = prioritizedSounds.size();
if (debugFlag)
debugPrint(".muteSilentSounds(): Loop over prioritizedSounds list, " +
"size = " + nAtoms);
for (int i=0; itotalChannels) {
if ((soundAtom.schedulingAction == SoundSchedulerAtom.MAKE_AUDIBLE) ||
(soundAtom.schedulingAction == SoundSchedulerAtom.LEAVE_AUDIBLE)) {
soundAtom.schedulingAction = SoundSchedulerAtom.MAKE_SILENT;
}
else if (soundAtom.schedulingAction == SoundSchedulerAtom.RESTART_AUDIBLE)
soundAtom.schedulingAction = SoundSchedulerAtom.RESTART_SILENT;
else if (soundAtom.schedulingAction == SoundSchedulerAtom.START_AUDIBLE)
soundAtom.schedulingAction = SoundSchedulerAtom.START_SILENT;
else if (soundAtom.schedulingAction == SoundSchedulerAtom.PAUSE_AUDIBLE)
soundAtom.schedulingAction = SoundSchedulerAtom.PAUSE_SILENT;
else if (soundAtom.schedulingAction == SoundSchedulerAtom.RESUME_AUDIBLE)
soundAtom.schedulingAction = SoundSchedulerAtom.RESUME_SILENT;
audioDevice3D.muteSample(sampleId);
if (debugFlag) {
debugPrint(": sound " + sampleId +
"number of channels needed is " +
numberChannels);
debugPrint(": sound " + sampleId +
" action is x_AUDIBLE but " +
"not enough channels free (" +
(totalChannels - totalChannelsUsed) +
") so, sound muted");
}
}
// sound has enough channels to play
else if (status != SoundSchedulerAtom.SOUND_AUDIBLE) {
// old status is not already unmuted/audible
audioDevice3D.unmuteSample(sampleId);
if (debugFlag)
debugPrint(": sound " + sampleId +
" action is x_AUDIBLE and channels free so, " +
"sound unmuted");
}
// now that the exact muting state is known (re-)get actual
// number of channels used by this sound and add to total
numberChannels =
audioDevice3D.getNumberOfChannelsUsed(sampleId);
soundAtom.numberChannels = numberChannels; // used in audio device
totalChannelsUsed += numberChannels;
} // otherwise, scheduling is for potentally audible sound
// No sound in list should have action TURN_ or LEAVE_OFF
} // of for loop over sounds in list
}
}
void muteSilentSound(SoundSchedulerAtom soundAtom) {
// Temp variables
SoundRetained sound;
SoundRetained mirSound;
mirSound = (SoundRetained)soundAtom.sound;
sound = mirSound.sgSound;
int sampleId = soundAtom.sampleId;
int status = soundAtom.status;
if (status == SoundSchedulerAtom.SOUND_COMPLETE) {
return;
}
if (sampleId == SoundRetained.NULL_SOUND) {
return;
}
if (debugFlag) {
debugPrint(": contents of current sound " +
soundAtom.sampleId + " before switch on sAction" );
printAtomState(soundAtom);
}
if ( (soundAtom.schedulingAction == SoundSchedulerAtom.MAKE_SILENT) ||
(soundAtom.schedulingAction == SoundSchedulerAtom.RESTART_SILENT) ||
(soundAtom.schedulingAction == SoundSchedulerAtom.LEAVE_SILENT) ||
(soundAtom.schedulingAction == SoundSchedulerAtom.START_SILENT) ) {
// Mute sounds that are not already silent
if (status != SoundSchedulerAtom.SOUND_SILENT) {
// old status is not already muted/silent
audioDevice3D.muteSample(sampleId);
if (debugFlag)
debugPrint(": sound " + sampleId +
" action is x_SILENT, sound muted");
}
} // scheduling is for silent sound
}
/**
* Determine amount of time before next playing sound will be
* is complete.
*
* find the atom that has the least amount of time before is
* finished playing and return this time
* @return length of time in millisecond until the next active sound
* will be complete. Returns -1 if no sounds are playing (or all are
* complete).
*/
long shortestTimeToFinish() {
long currentTime = J3dClock.currentTimeMillis();
long shortestTime = -1L;
SoundSchedulerAtom soundAtom;
synchronized (prioritizedSounds) {
int nAtoms = prioritizedSounds.size();
for (int i=0; i= 0) {
if (debugFlag)
debugPrint(".start: " + index );
soundAtom.playing = true;
soundAtom.startTime = audioDevice3D.getStartTime(index);
soundAtom.calculateEndTime();
if (debugFlag)
debugPrint(".start: begintime = " +
soundAtom.startTime + ", endtime " + soundAtom.endTime);
}
else { // error returned by audio device when trying to start
soundAtom.startTime = 0;
soundAtom.endTime = 0;
soundAtom.playing = false;
if (debugFlag) {
debugPrint(".start: error " + startStatus +
" returned by audioDevice3D.startSample(" + index
+ ")" );
debugPrint(
" start/endTime set to zero");
}
}
}
/**
* Exlicitly update the sound parameters associated with a sample
*/
void update(SoundSchedulerAtom soundAtom) {
int index = soundAtom.sampleId;
if (index == SoundRetained.NULL_SOUND) {
return;
}
SoundRetained sound = soundAtom.sound;
audioDevice3D.updateSample(index);
if (debugFlag) {
debugPrint(".update: " + index );
}
soundAtom.calculateEndTime();
if (sound instanceof PointSoundRetained ||
sound instanceof ConeSoundRetained) {
positionalSoundUpdated = true;
}
}
/**
* stop playing one specific sound node
*
* If setPending flag true, sound is stopped but enable state
* is set to pending-on so that it is restarted.
*/
void stopSound(SoundSchedulerAtom soundAtom, boolean setPending) {
if (audioDevice3D == null)
return;
if (debugFlag)
debugPrint(":stopSound(" + soundAtom +
"), enabled = " + soundAtom.enabled);
switch (soundAtom.enabled) {
case SoundSchedulerAtom.ON:
if (setPending)
soundAtom.setEnableState(SoundSchedulerAtom.PENDING_ON);
else
soundAtom.setEnableState(SoundSchedulerAtom.SOUND_OFF);
break;
case SoundSchedulerAtom.PENDING_OFF:
soundAtom.setEnableState(SoundSchedulerAtom.SOUND_OFF);
break;
case SoundSchedulerAtom.PENDING_ON:
if (!setPending)
// Pending sounds to be stop from playing later
soundAtom.setEnableState(SoundSchedulerAtom.SOUND_OFF);
break;
default:
break;
}
soundAtom.status = SoundSchedulerAtom.SOUND_OFF;
turnOff(soundAtom);
}
/**
* Deactive all playing sounds
* If the sound is continuous thendSilence it but leave it playing
* otherwise stop sound
*/
synchronized void deactivateAllSounds() {
SoundRetained sound;
SoundRetained mirSound;
SoundSchedulerAtom soundAtom;
if (audioDevice3D == null)
return;
if (debugFlag)
debugPrint(".deactivateAllSounds");
// sync this method from interrupting run() while loop
synchronized (prioritizedSounds) {
if (prioritizedSounds != null) {
int nAtoms = prioritizedSounds.size();
if (debugFlag)
debugPrint("silenceAll " + nAtoms + " Sounds");
for (int i=0; i ~/Current/MoveAppBoundingLeaf.outted,
// instead transformed position and direction
// points/vectors will be passed to AudioDevice directly.
// vvvvvvvvvvvvvvvvvvvvvvvvvvv
if (updateAll || soundAtom.testDirtyFlag(SoundRetained.XFORM_DIRTY_BIT){
Transform3D xform = new Transform3D();
ps.trans.getWithLock(xform);
if (debugFlag) {
debugPrint(".updateXformedParams " +
"setVworldXfrm for ps @ " + ps + ":");
debugPrint(" xformPosition " +
ps.xformPosition.x + ", " +
ps.xformPosition.y + ", " +
ps.xformPosition.z );
debugPrint(" column-major transform ");
debugPrint(" " +
xform.mat[0]+", " + xform.mat[1]+", "+
xform.mat[2]+", " + xform.mat[3]);
debugPrint(" " +
xform.mat[4]+", " + xform.mat[5]+", "+
xform.mat[6]+", " + xform.mat[7]);
debugPrint(" " +
xform.mat[8]+", " + xform.mat[9]+", "+
xform.mat[10]+", " + xform.mat[11]);
debugPrint(" " +
xform.mat[12]+", " + xform.mat[13]+", "+
xform.mat[14]+", " + xform.mat[15]);
}
audioDevice3D.setVworldXfrm(index, xform);
soundAtom.clearStateDirtyFlag( SoundRetained.XFORM_DIRTY_BIT);
// XXXX: make sure position and direction are already transformed and stored
// into xformXxxxxxx fields.
}
// ^^^^^^^^^^^^^^^^^^^^^
*/
// Set Position
if (updateAll || testListenerFlag() ||
soundAtom.testDirtyFlag(soundAtom.attribsDirty,
SoundRetained.POSITION_DIRTY_BIT) ||
soundAtom.testDirtyFlag(soundAtom.stateDirty,
SoundRetained.XFORM_DIRTY_BIT) )
{
Point3f xformLocation = new Point3f();
mirrorPtSound.getXformPosition(xformLocation);
Point3d positionD = new Point3d(xformLocation);
if (debugFlag)
debugPrint("xform'd Position: ("+positionD.x+", "+
positionD.y+", "+ positionD.z+")" );
audioDevice3D.setPosition(index, positionD);
}
// Set Direction
if (mirrorPtSound instanceof ConeSoundRetained) {
ConeSoundRetained cn = (ConeSoundRetained)mirrorPtSound;
ConeSoundRetained cnSound = (ConeSoundRetained)mirrorPtSound.sgSound;
if (updateAll ||
// XXXX: test for XFORM_DIRTY only in for 1.2
soundAtom.testDirtyFlag(soundAtom.attribsDirty,
(SoundRetained.DIRECTION_DIRTY_BIT |
SoundRetained.XFORM_DIRTY_BIT) ) ) {
Vector3f xformDirection = new Vector3f();
cn.getXformDirection(xformDirection);
Vector3d directionD = new Vector3d(xformDirection);
audioDevice3D.setDirection(index, directionD);
}
}
}
void updateSoundParams(boolean updateAll, SoundSchedulerAtom soundAtom,
AuralAttributesRetained attribs) {
SoundRetained mirrorSound = soundAtom.sound;
SoundRetained sound = mirrorSound.sgSound;
int index = soundAtom.sampleId;
int arraySize;
if (index == SoundRetained.NULL_SOUND)
return;
if (debugFlag)
debugPrint(".updateSoundParams(dirytFlags=" +
soundAtom.attribsDirty + ", " + soundAtom.stateDirty + ")");
// since the sound is audible, make sure that the parameter for
// this sound are up-to-date.
if (updateAll || soundAtom.testDirtyFlag(
soundAtom.attribsDirty, SoundRetained.INITIAL_GAIN_DIRTY_BIT)) {
if (attribs != null) {
audioDevice3D.setSampleGain(index,
(sound.initialGain * attribs.attributeGain));
}
else {
audioDevice3D.setSampleGain(index, sound.initialGain);
}
}
if (updateAll || soundAtom.testDirtyFlag(
soundAtom.attribsDirty, SoundRetained.LOOP_COUNT_DIRTY_BIT)) {
if (debugFlag)
debugPrint(" audioDevice.setLoop(" + sound.loopCount +
") called");
audioDevice3D.setLoop(index, sound.loopCount);
}
if (updateAll || soundAtom.testDirtyFlag(
soundAtom.attribsDirty, SoundRetained.RATE_DIRTY_BIT)) {
if (audioDevice3DL2 != null) {
if (debugFlag)
debugPrint(" audioDevice.setRateScaleFactor(" +
sound.rate + ") called");
audioDevice3DL2.setRateScaleFactor(index, sound.rate);
}
}
if (updateAll || soundAtom.testDirtyFlag(
soundAtom.attribsDirty, SoundRetained.DISTANCE_GAIN_DIRTY_BIT)){
if (sound instanceof ConeSoundRetained) {
ConeSoundRetained cnSound = (ConeSoundRetained)sound;
// set distance attenuation
arraySize = cnSound.getDistanceGainLength();
if (arraySize == 0) {
// send default
audioDevice3D.setDistanceGain(index, null, null, null, null);
}
else {
Point2f[] attenuation = new Point2f[arraySize];
Point2f[] backAttenuation = new Point2f[arraySize];
for (int i=0; i< arraySize; i++) {
attenuation[i] = new Point2f();
backAttenuation[i] = new Point2f();
}
cnSound.getDistanceGain(attenuation, backAttenuation);
double[] frontDistance = new double[arraySize];
float[] frontGain = new float[arraySize];
double[] backDistance = new double[arraySize];
float[] backGain = new float[arraySize];
for (int i=0; i< arraySize; i++) {
frontDistance[i] = attenuation[i].x;
frontGain[i] = attenuation[i].y;
backDistance[i] = backAttenuation[i].x;
backGain[i] = backAttenuation[i].y;
}
audioDevice3D.setDistanceGain(index,
frontDistance, frontGain, backDistance, backGain);
}
} // ConeSound distanceGain
else if (sound instanceof PointSoundRetained) {
PointSoundRetained ptSound = (PointSoundRetained)sound;
// set distance attenuation
arraySize = ptSound.getDistanceGainLength();
if (arraySize == 0) {
// send default
audioDevice3D.setDistanceGain(index, null, null, null, null);
}
else {
Point2f[] attenuation = new Point2f[arraySize];
for (int i=0; i< arraySize; i++)
attenuation[i] = new Point2f();
ptSound.getDistanceGain(attenuation);
double[] frontDistance = new double[arraySize];
float[] frontGain = new float[arraySize];
for (int i=0; i< arraySize; i++) {
frontDistance[i] = attenuation[i].x;
frontGain[i] = attenuation[i].y;
}
audioDevice3D.setDistanceGain(index, frontDistance,
frontGain, null, null);
}
} // PointSound distanceGain
}
if ((sound instanceof ConeSoundRetained) &&
(updateAll || soundAtom.testDirtyFlag(soundAtom.attribsDirty,
SoundRetained.ANGULAR_ATTENUATION_DIRTY_BIT)) ) {
// set angular attenuation
ConeSoundRetained cnSound = (ConeSoundRetained)sound;
arraySize = cnSound.getAngularAttenuationLength();
if (arraySize == 0) {
// send default
double[] angle = new double[2];
float[] scaleFactor = new float[2];
angle[0] = 0.0;
angle[1] = (Math.PI)/2.0;
scaleFactor[0] = 1.0f;
scaleFactor[1] = 0.0f;
audioDevice3D.setAngularAttenuation(index,
cnSound.NO_FILTERING,
angle, scaleFactor, null);
}
else {
Point3f[] attenuation = new Point3f[arraySize];
for (int i=0; i< arraySize; i++) {
attenuation[i] = new Point3f();
}
cnSound.getAngularAttenuation(attenuation);
double[] angle = new double[arraySize];
float[] scaleFactor = new float[arraySize];
float[] cutoff = new float[arraySize];
for (int i=0; i< arraySize; i++) {
angle[i] = attenuation[i].x;
scaleFactor[i] = attenuation[i].y;
cutoff[i] = attenuation[i].z;
}
audioDevice3D.setAngularAttenuation(index,
cnSound.filterType,
angle, scaleFactor, cutoff);
}
}
}
/**
* Check (and set if necessary) AudioDevice3D field
*/
boolean checkAudioDevice3D() {
if (universe != null) {
if (universe.currentView != null)
if (universe.currentView.physicalEnvironment != null) {
audioDevice = universe.currentView.physicalEnvironment.audioDevice;
if (audioDevice != null) {
if (audioDevice instanceof AudioDevice3DL2) {
audioDevice3DL2 = (AudioDevice3DL2)audioDevice;
}
if (audioDevice instanceof AudioDevice3D) {
audioDevice3D = (AudioDevice3D)audioDevice;
}
else { // audioDevice is only an instance of AudioDevice
if (internalErrors)
debugPrint("AudioDevice implementation not supported");
// audioDevice3D should already be null
}
}
else {
// if audioDevice is null, clear extended class fields
audioDevice3DL2 = null;
audioDevice3D = null;
}
}
}
if (audioDevice3D == null)
return false;
if (audioDevice3D.getTotalChannels() == 0)
return false; // can not render sounds on AudioEngine that has no channels
return true;
}
/**
* Clears the fields associated with sample data for this sound.
* Assumes soundAtom is non-null, and that non-null atom
* would have non-null sound field.
*/
void clearSoundData(SoundSchedulerAtom soundAtom) {
if (checkAudioDevice3D() &&
soundAtom.sampleId != SoundRetained.NULL_SOUND) {
stopSound(soundAtom, false); // force stop of playing sound
// Unload sound data from AudioDevice
audioDevice3D.clearSound(soundAtom.sampleId);
}
soundAtom.sampleId = SoundRetained.NULL_SOUND;
// set load state into atom
soundAtom.loadStatus = SoundRetained.LOAD_NULL;
// NOTE: setting node load status not 1-to-1 w/actual load;
// this is incorrect
SoundRetained sound = soundAtom.sound;
soundAtom.loadStatus = SoundRetained.LOAD_NULL;
soundAtom.soundData = null;
sound.changeAtomList(soundAtom, SoundRetained.LOAD_NULL);
}
/**
* Attempts to load sound data for a particular sound source onto
* the chosen/initialized audio device
* If this called, it is assumed that SoundRetained.audioDevice is
* NOT null.
* If an error in loading occurs (an exception is caught,...)
* an error is printed out to stderr - an exception is not thrown.
* @param soundData descrition of sound source data
*/
// QUESTION: should this method be synchronized?
void attachSoundData(SoundSchedulerAtom soundAtom,
MediaContainer soundData, boolean forceReload) {
if (!forceReload && (soundAtom.soundData == soundData)) {
return;
}
SoundRetained sound = soundAtom.sound.sgSound;
if (!checkAudioDevice3D()) {
if (debugFlag)
debugPrint(".attachSoundData audioDevice3D null");
soundAtom.loadStatus = SoundRetained.LOAD_PENDING;
sound.changeAtomList(soundAtom, SoundRetained.LOAD_PENDING);
return;
}
if (soundAtom.soundData != null) {
// clear sound data field for view specific atom NOT sound node
clearSoundData(soundAtom);
if (soundData == null) {
if (debugFlag)
debugPrint(".attachSoundData with null soundData");
return;
}
}
URL url = ((MediaContainerRetained)sound.soundData.retained).url;
String path = ((MediaContainerRetained)sound.soundData.retained).urlString;
InputStream stream = ((MediaContainerRetained)sound.soundData.retained).inputStream;
if (url == null && path == null && stream == null) {
if (debugFlag)
debugPrint(".attachSoundData with null soundData");
// clear non-null sample associated with this soundData
if (soundAtom.sampleId != SoundRetained.NULL_SOUND) {
clearSoundData(soundAtom);
}
return;
}
int id;
if (sound instanceof ConeSoundRetained)
sound.soundType = AudioDevice3D.CONE_SOUND;
else if (sound instanceof PointSoundRetained)
sound.soundType = AudioDevice3D.POINT_SOUND;
else
sound.soundType = AudioDevice3D.BACKGROUND_SOUND;
if (debugFlag) {
debugPrint(".attachSoundData soundType = " + sound.soundType);
debugPrint(".attachSoundData this is = " + sound);
}
// Clone the MediaContainer associated with this node and
// set the capability bits for this clone to allow access to
// all fields; this copy is passed to the audioDevice.
// As the fields of the MediaContainer expands, this code must
// be appended.
MediaContainer cloneMediaContainer = new MediaContainer();
cloneMediaContainer.duplicateAttributes(soundData, true);
cloneMediaContainer.setCapability(MediaContainer.ALLOW_CACHE_READ);
cloneMediaContainer.setCapability(MediaContainer.ALLOW_URL_READ);
id = audioDevice3D.prepareSound(sound.soundType, cloneMediaContainer);
if (debugFlag)
debugPrint(".attachSoundData prepareSound returned " + id);
if (id == SoundRetained.NULL_SOUND) {
soundAtom.loadStatus = SoundRetained.LOAD_FAILED;
// NOTE: setting node load status not 1-to-1 with actual load;
// this is incorrect
sound.changeAtomList(soundAtom, SoundRetained.LOAD_FAILED);
//System.err.println(path + ": "+ J3dI18N.getString("SoundRetained1"));
}
else {
if (debugFlag)
debugPrint(".attachSoundData - sampleId set");
soundAtom.sampleId = id;
// For now loopLength=sampleLength, loop points not supported
long duration = audioDevice3D.getSampleDuration(id);
soundAtom.sampleLength = duration;
soundAtom.loopLength = soundAtom.sampleLength;
// XXXX: for most this will be 0 but not all
soundAtom.loopStartOffset = 0;
soundAtom.attackLength = 0; // portion of sample before loop section
soundAtom.releaseLength = 0; // portion of sample after loop section
soundAtom.loadStatus = SoundRetained.LOAD_COMPLETE;
soundAtom.soundData = soundData;
sound.changeAtomList(soundAtom, SoundRetained.LOAD_COMPLETE);
if (debugFlag)
debugPrint(" attachSoundData; index = "+soundAtom.sampleId);
}
}
SoundSchedulerAtom findSoundAtom(SoundRetained node, int nthInstance) {
// find nth sound atom in the list of prioritized sounds that
// references this sound node
// nthInstance=1 would look for first instance
if (node == null)
return null;
SoundSchedulerAtom returnAtom = null;
synchronized (prioritizedSounds) {
if (!prioritizedSounds.isEmpty()) {
SoundSchedulerAtom soundAtom = null;
int atomFound = 0;
// find sound in list and remove it
int arrSize = prioritizedSounds.size();
for (int index=0; index 0)
return true;
else
return false;
}
/**
* set dirty flags associated with SoundSchedulerAtom
*/
void setAttribsDirtyFlag(SoundRetained node, int dirtyFlag) {
if (debugFlag)
debugPrint(".setAttribsDirtyFlag " + node );
// find sound atom that references this sound node
SoundSchedulerAtom soundAtom = null;
for (int i=1; ;i++) {
soundAtom = findSoundAtom(node, i);
if (soundAtom == null)
break;
soundAtom.setAttribsDirtyFlag(dirtyFlag);
}
}
void setStateDirtyFlag(SoundRetained node, int dirtyFlag) {
if (debugFlag)
debugPrint(".setStateDirtyFlag " + node );
// find sound atom that references this sound node
SoundSchedulerAtom soundAtom = null;
for (int i=1; ;i++) {
soundAtom = findSoundAtom(node, i);
if (soundAtom == null)
break;
soundAtom.setStateDirtyFlag(dirtyFlag);
}
}
void printAtomState(SoundSchedulerAtom atom) {
SoundRetained sound = atom.sound.sgSound;
debugPrint(" this atom = " + atom + " ");
debugPrint(" references sound = " + sound + " ");
debugPrint(" enabled " + atom.enabled);
debugPrint(" status " + atom.status);
debugPrint(" activated " + atom.activated);
debugPrint(" released " + sound.release);
debugPrint(" continuous " + sound.continuous);
debugPrint(" scheduling " + atom.schedulingAction);
}
// Debug print mechanism for Sound nodes
static final boolean debugFlag = false;
static final boolean internalErrors = false;
void debugPrint(String message) {
if (debugFlag)
System.err.println("SS."+message);
}
void processViewSpecificGroupChanged(J3dMessage m) {
int component = ((Integer)m.args[0]).intValue();
Object[] objAry = (Object[])m.args[1];
if (((component & ViewSpecificGroupRetained.ADD_VIEW) != 0) ||
((component & ViewSpecificGroupRetained.SET_VIEW) != 0)) {
int i;
Object obj;
View v = (View)objAry[0];
ArrayList leafList = (ArrayList)objAry[2];
// View being added is this view
if (v == view) {
int size = leafList.size();
for (i = 0; i < size; i++) {
obj = leafList.get(i);
if (obj instanceof SoundRetained) {
nRetainedSounds++;
addSound((SoundRetained) obj);
}
else if (obj instanceof SoundscapeRetained) {
auralAttribsChanged = true;
}
}
}
}
if (((component & ViewSpecificGroupRetained.REMOVE_VIEW) != 0)||
((component & ViewSpecificGroupRetained.SET_VIEW) != 0)) {
int i;
Object obj;
ArrayList leafList;
View v;
if ((component & ViewSpecificGroupRetained.REMOVE_VIEW) != 0) {
v = (View)objAry[0];
leafList = (ArrayList)objAry[2];
}
else {
v = (View)objAry[4];
leafList = (ArrayList)objAry[6];
}
if (v == view) {
int size = leafList.size();
for (i = 0; i < size; i++) {
obj = leafList.get(i);
if (obj instanceof SoundRetained) {
SoundSchedulerAtom soundAtom = null;
for (int arrIndx=1; ;arrIndx++) {
soundAtom = findSoundAtom((SoundRetained)obj,
arrIndx);
if (soundAtom == null)
break;
stopSound(soundAtom, false);
}
}
else if (obj instanceof SoundscapeRetained) {
auralAttribsChanged = true;
}
}
}
}
}
void processBoundingLeafChanged(J3dMessage m) {
// Notify all users of this bounding leaf, it may
// result in the re-evaluation of the lights/fogs/backgrounds
Object[] users = (Object[])(m.args[3]);
int i;
for (i = 0; i < users.length; i++) {
LeafRetained leaf = (LeafRetained)users[i];
if (leaf instanceof SoundRetained && universe.soundStructure.isSoundScopedToView(leaf, view)) {
auralAttribsChanged = true;
}
else if (leaf instanceof SoundscapeRetained && universe.soundStructure.isSoundscapeScopedToView(leaf, view)){
auralAttribsChanged = true;
}
}
}
@Override
void cleanup() {
// clean up any messages that are queued up, since they are
// irrelevant
// clearMessages();
}
}