add createMediaStreamDestination

methods and interface are defined in
http://webaudio.github.io/web-audio-api/ and implemented in chrome
This commit is contained in:
Lionel Besson
2014-12-24 11:47:58 +01:00
parent dd4398796d
commit c33634aa6c

19
webaudioapi/waa.d.ts vendored
View File

@@ -86,6 +86,11 @@ interface AudioContext {
*/
createMediaStreamSource(mediaStream: any): MediaStreamAudioSourceNode;
/**
* Creates a MediaStreamAudioDestinationNode.
*/
createMediaStreamDestination(): MediaStreamAudioDestinationNode;
/**
* Creates a ScriptProcessorNode for direct audio processing using JavaScript. An exception will be thrown if bufferSize or numberOfInputChannels or numberOfOutputChannels are outside the valid range.
* It is invalid for both numberOfInputChannels and numberOfOutputChannels to be zero.
@@ -1142,3 +1147,17 @@ interface WaveTable {
*/
interface MediaStreamAudioSourceNode extends AudioSourceNode {
}
/**
* This interface is an audio destination representing a MediaStream with a single AudioMediaStreamTrack. This MediaStream is created when the node is created and is accessible via the stream attribute. This stream can be used in a similar way as a MediaStream obtained via getUserMedia(), and can, for example, be sent to a remote peer using the RTCPeerConnection addStream() method.
*
* numberOfInputs : 1
* numberOfOutputs : 0
*
* channelCount = 2;
* channelCountMode = "explicit";
* channelInterpretation = "speakers";
*/
interface MediaStreamAudioDestinationNode extends AudioNode {
stream: MediaStream;
}