io.scalajs.dom.html.audio.AudioContext.scala Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of dom-html_sjs0.6_2.11 Show documentation
Show all versions of dom-html_sjs0.6_2.11 Show documentation
DOM/HTML bindings for Scala.js
package io.scalajs.dom.html.audio
import scala.scalajs.js
import scala.scalajs.js.annotation.JSGlobal
/**
* The AudioContext interface represents an audio-processing graph built from audio modules linked together,
* each represented by an AudioNode. An audio context controls both the creation of the nodes it contains and
* the execution of the audio processing, or decoding. You need to create an AudioContext before you do anything
* else, as everything happens inside a context.
*
* An AudioContext can be a target of events, therefore it implements the EventTarget interface.
* @author [email protected]
*/
@js.native
@JSGlobal
class AudioContext extends js.Object {
/////////////////////////////////////////////////////////////////////////////////
// Properties
/////////////////////////////////////////////////////////////////////////////////
/**
* Returns a double representing an ever-increasing hardware time in seconds used for scheduling. It starts at 0.
*/
def currentTime: Double = js.native
/**
* Returns an AudioDestinationNode representing the final destination of all audio in the context.
* It can be thought of as the audio-rendering device.
*/
def destination: AudioDestinationNode = js.native
/**
* Returns the AudioListener object, used for 3D spatialization.
*/
var listener: js.Function = js.native
/**
* Returns a float representing the sample rate (in samples per second) used by all nodes in this context.
* The sample-rate of an AudioContext cannot be changed.
*/
def sampleRate: Double = js.native
/**
* Returns the current state of the AudioContext.
*/
def state: Int = js.native
/**
* Used to return the audio channel that the sound playing in an AudioContext will play in, on a Firefox OS device.
*/
def mozAudioChannelType: Int = js.native
/////////////////////////////////////////////////////////////////////////////////
// Event Listener
/////////////////////////////////////////////////////////////////////////////////
/**
* An event handler that runs when an event of type statechange has fired. This occurs when the AudioContext's
* state changes, due to the calling of one of the state change methods (AudioContext.suspend, AudioContext.resume,
* or AudioContext.close).
* @return
*/
def onstatechange: js.Function = js.native
/////////////////////////////////////////////////////////////////////////////////
// Methods
/////////////////////////////////////////////////////////////////////////////////
/**
* Closes the audio context, releasing any system audio resources that it uses.
*/
def close(): Unit = js.native
/**
* Creates a new, empty AudioBuffer object, which can then be populated by data and played via an AudioBufferSourceNode.
*/
def createBuffer(): AudioBuffer = js.native
/**
* Creates a ConstantSourceNode object, which is an audio source that continuously outputs a monaural (one-channel)
* sound signal whose samples all have the same value.
*/
def createConstantSource(): ConstantSourceNode = js.native
/**
* Creates an AudioBufferSourceNode, which can be used to play and manipulate audio data contained within an
* AudioBuffer object. AudioBuffers are created using AudioContext.createBuffer or returned by
* AudioContext.decodeAudioData when it successfully decodes an audio track.
*/
def createBufferSource(): AudioBufferSourceNode = js.native
/**
* Creates a MediaElementAudioSourceNode associated with an HTMLMediaElement. This can be used to play and
* manipulate audio from