summaryrefslogtreecommitdiffstats
path: root/usecaseui-portal/src/app/shared/utils/recorder.ts
blob: 65cf9136a661fbb2e613a0b4952a50a32912f526 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
export class Recorder {
  mediaStreams: any;
  audioInput: any;
  recorder: any;
  leftDataList: any[] = [];
  rightDataList: any[] = [];

  throwError(message) {
      alert(message);
      throw new function () { this.toString = function () { return message; } };
  }

  // start sound recording
  beforeStartRecord() {
      let getUserMedia = window.navigator.mediaDevices.getUserMedia || null;
      if (!getUserMedia) {
          this.throwError('The current browser does not support recording.');
          return;
      }
      window.navigator.mediaDevices.getUserMedia({
          audio: true
      }).then(mediaStream => {
          this.mediaStreams = mediaStream;
          this.startRecord();
      }).catch(err => {

      })
  }

  startRecord() {
      // Clear data before recording again
      let audioContext = new (window["AudioContext"] || window["webkitAudioContext"])();
      this.recorder = this.createJSNode(audioContext);
      this.recorder.connect(audioContext.destination);
      this.recorder.onaudioprocess = (event) => {
          // console.log(event.inputBuffer);
          let audioBuffer = event.inputBuffer;
          let leftChannelData = audioBuffer.getChannelData(0),
              rightChannelData = audioBuffer.getChannelData(1);
          // console.log(leftChannelData, rightChannelData);
          // need to clone data
          this.leftDataList.push(leftChannelData.slice(0));
          this.rightDataList.push(rightChannelData.slice(0));
      }
      this.audioInput = audioContext.createMediaStreamSource(this.mediaStreams);
  }

  createJSNode(audioContext) {
      const BUFFER_SIZE = 4096;
      const INPUT_CHANNEL_COUNT = 2;
      const OUTPUT_CHANNEL_COUNT = 2;
      let creator = audioContext.createScriptProcessor || audioContext.createJavaScriptNode;
      creator = creator.bind(audioContext);
      return creator(BUFFER_SIZE,
          INPUT_CHANNEL_COUNT, OUTPUT_CHANNEL_COUNT);
  }

  // stop sound recording
  stopRecord() {
      this.mediaStreams.getAudioTracks()[0].stop();
      this.recorder.disconnect();
      this.audioInput.disconnect();
  }

  // Play recording related functional components
  mergeArray(list) {
      let length = list.length * list[0].length;
      let data = new Float32Array(length),
          offset = 0;
      for (let i = 0; i < list.length; i++) {
          data.set(list[i], offset);
          offset += list[i].length;
      }
      return data;
  }

  playRecord() {
    let leftData = this.mergeArray(this.leftDataList);
    let rightData = this.mergeArray(this.rightDataList);
    let allData = this.interleaveLeftAndRight(leftData, rightData);
    let blob = this.createWavFile(allData);
    let _URL = window["URL"] || window["webkitURL"];
    return _URL.createObjectURL(blob);
  }

  // Cross merge left and right channel data
  interleaveLeftAndRight(left, right) {
      let totalLength = left.length + right.length;
      let data = new Float32Array(totalLength);
      for (let i = 0; i < left.length; i++) {
          let k = i * 2;
          data[k] = left[i];
          data[k + 1] = right[i];
      }
      return data;
  }

  createWavFile(audioData) {
      const WAV_HEAD_SIZE = 44;
      let buffer = new ArrayBuffer(audioData.length * 2 + WAV_HEAD_SIZE),
      // need to use a view to manipulate the buffer
      view = new DataView(buffer);
      // Write wav header information
      // Resource exchange file identifier
      this.writeUTFBytes(view, 0, 'RIFF');
      // The total number of bytes from the beginning of the next address to the end of the file is - 8
      view.setUint32(4, 44 + audioData.length * 2, true);
      // Wav file flag
      this.writeUTFBytes(view, 8, 'WAVE');
      // Waveform format flag
      this.writeUTFBytes(view, 12, 'fmt ');
      // Filter bytes, generally 0x10 = 16
      view.setUint32(16, 16, true);
      // sample format (raw)
      view.setUint16(20, 1, true);
      // stereo (2 channels)
      view.setUint16(22, 2, true);
      // sample rate
      view.setUint32(24, 44100, true);
      // byte rate (sample rate * block align)
      view.setUint32(28, 44100 * 2, true);
      // block align (channel count * bytes per sample)
      view.setUint16(32, 2 * 2, true);
      // bits per sample
      view.setUint16(34, 16, true);
      // data sub-chunk
      // data chunk identifier
      this.writeUTFBytes(view, 36, 'data');
      // data chunk length
      view.setUint32(40, audioData.length * 2, true);

      // Write PCM data
      let length = audioData.length;
      let index = 44;
      let volume = 1;
      for (let i = 0; i < length; i++) {
          view.setInt16(index, audioData[i] * (0x7FFF * volume), true);
          index += 2;
      }
      return new Blob([new Uint8Array(buffer)], { type: 'audio/wav' });
  }

  writeUTFBytes(view, offset, string) {
      var lng = string.length;
      for (var i = 0; i < lng; i++) {
          view.setUint8(offset + i, string.charCodeAt(i));
      }
  }
}