Phone Chat with Web Sockets





5.00/5 (2 votes)
Attempt to create a Phone chat application using Web Sockets
Introduction
This application is my attempt to create a smallest voice chat application using Web Sockets and MediaRecorder
. Web Sockets is the technology that lets you push the data from the web server to web browser. The MediaRecorder
JavaScript object provides functionality to easily record media.
You can send a voice message using Push-to-talk button or by using "Auto detect" feature to send voice data as you speak using WebAD library. The send button sends text message. The ring button rings all of the logged in users.
Background
To use Web Sockets, your web server needs to have ASP.NET 4.5 or higher and WebSocket Protocol installed under Add Roles and Features > Server Roles > Web Server > Web Server > Application Development > WebSocket Protocol.
To setup the allocation, unzip Phone.zip to a C:\inetpub\wwwroot\Phoneon your web server. Open IIS Console and create virtual directory called Phone.
Using the Code
Next, point your browser to http://localhost/Phone/Chat.aspx.
The web allocation has two files: Handler1.ashx and Chat.aspx. Handler1.ashx handler file handles WebSockets
request. It handles web socket request with HandleSocketRequest
function. The Async
function loops if the socket connection is opened. It uses Await
to wait for the message and then broadcasts the massage to all the registered users.
<%@ WebHandler Language="VB" Class="Handler1" %>
Imports System.Web
Imports System.Web.Services
Imports System.Net.WebSockets
Imports System.Web.WebSockets
Imports System.Threading.Tasks
Public Class Handler1
Implements System.Web.IHttpHandler
Private oSockets As New Hashtable
Sub ProcessRequest(ByVal context As HttpContext) _
Implements IHttpHandler.ProcessRequest
If context.IsWebSocketRequest Then
If context.Application("Sockets") IsNot Nothing Then
oSockets = context.Application("Sockets")
Dim sUserId As String = context.Request.QueryString("user")
If oSockets.ContainsKey(sUserId) Then
context.Response.StatusCode = 500
context.Response.StatusDescription = "User " & _
sUserId & " already logged in"
context.Response.End()
End If
End If
Try
context.AcceptWebSocketRequest(AddressOf HandleSocketRequest)
Catch ex As Exception
context.Response.StatusCode = 500
context.Response.StatusDescription = ex.Message
context.Response.End()
End Try
End If
End Sub
Async Function HandleSocketRequest_
(context As System.Web.WebSockets.AspNetWebSocketContext) As Task
Dim sUserId As String = context.QueryString("user")
Dim oSocket As System.Net.WebSockets.WebSocket = context.WebSocket
'Register user
oSockets(sUserId) = oSocket
context.Application("Sockets") = oSockets
'Send RefreshUsers Msg to all users
Dim oRefreshMsgBuffer As New ArraySegment(Of Byte)_
(Encoding.UTF8.GetBytes("{{RefreshUsers}}"))
For Each oKey As DictionaryEntry In oSockets
Dim oSocket2 As System.Net.WebSockets.WebSocket = oKey.Value
Await oSocket2.SendAsync(oRefreshMsgBuffer, _
WebSocketMessageType.Text, True, Threading.CancellationToken.None)
Next
Const iMaxBufferSize As Integer = 64 * 1024
Dim buffer = New Byte(iMaxBufferSize - 1) {}
While oSocket.State = WebSocketState.Open 'Loop if Socket is open
'Get Msg
Dim result = Await oSocket.ReceiveAsync_
(New ArraySegment(Of Byte)(buffer), Threading.CancellationToken.None)
Dim oBytes As Byte() = New Byte(result.Count - 1) {}
Array.Copy(buffer, oBytes, result.Count)
Dim oFinalBuffer As List(Of Byte) = New List(Of Byte)()
oFinalBuffer.AddRange(oBytes)
'Get Remaining Msg
While result.EndOfMessage = False
result = Await oSocket.ReceiveAsync_
(New ArraySegment(Of Byte)(buffer), Threading.CancellationToken.None)
oBytes = New Byte(result.Count - 1) {}
Array.Copy(buffer, oBytes, result.Count)
oFinalBuffer.AddRange(oBytes)
End While
If result.MessageType = WebSocketMessageType.Text Then
Dim sMsg As String = Encoding.UTF8.GetString(oFinalBuffer.ToArray())
Dim bSendMsgToAllUsers As Boolean = _
sMsg <> "{{Ring}}" 'Send Msg to all users (including self)
sMsg = sUserId & ": " & sMsg
Dim oMsgBuffer As _
New ArraySegment(Of Byte)(Encoding.UTF8.GetBytes(sMsg))
For Each oKey As DictionaryEntry In oSockets
If bSendMsgToAllUsers OrElse oKey.Key <> sUserId Then
Dim oDestSocket As System.Net.WebSockets.WebSocket = oKey.Value
Await oDestSocket.SendAsync_
(oMsgBuffer, WebSocketMessageType.Text, True, _
Threading.CancellationToken.None)
End If
Next
ElseIf result.MessageType = WebSocketMessageType.Binary Then
Dim oArray As Byte() = oFinalBuffer.ToArray()
'Send Binary Msg to all users (excluding self)
For Each oKey As DictionaryEntry In oSockets
If oKey.Key <> sUserId Then
Dim oDestSocket As System.Net.WebSockets.WebSocket = oKey.Value
Dim oMsgBuffer As New ArraySegment(Of Byte)(oArray)
Await oDestSocket.SendAsync_
(oMsgBuffer, WebSocketMessageType.Binary, _
True, Threading.CancellationToken.None)
End If
Next
End If
End While
'Send RefreshUsers Msg to all users
For Each oKey As DictionaryEntry In oSockets
Dim oSocket2 As System.Net.WebSockets.WebSocket = oKey.Value
Await oSocket2.SendAsync(oRefreshMsgBuffer, _
WebSocketMessageType.Text, True, Threading.CancellationToken.None)
Next
'Close Socket
Await oSocket.CloseAsync(WebSocketCloseStatus.Empty, "", _
Threading.CancellationToken.None)
'Remove Socket from the List
If oSockets.ContainsKey(sUserId) Then
oSockets.Remove(sUserId)
context.Application("Sockets") = oSockets
End If
End Function
ReadOnly Property IsReusable() As Boolean Implements IHttpHandler.IsReusable
Get
Return False
End Get
End Property
End Class
Chat.aspx file lets you login and broadcast message to all connected users. It is an ASP.NET web form that get the list of active users and uses JavaScript to send massage to Handler1.ashx.
<%@ Page Language="VB"%>
<%
Dim sUserList As String = ""
If Application("Sockets") IsNot Nothing Then
For Each oSocket As DictionaryEntry In Application("Sockets")
Dim sUser As String = oSocket.Key
sUserList += "<div class='User' data-value=""" & sUser & """>" _
& sUser & "</div>" & vbCrLf
Next
End If
If Request.QueryString("getUsers") = "1" Then
Response.Write(sUserList)
Response.End()
ElseIf Request.QueryString("resetUsers") = "1" Then
If Application("Sockets") IsNot Nothing Then
For Each oEntry As DictionaryEntry In Application("Sockets")
Dim oSocket As Object = oEntry.Value
Try
oSocket.CloseOutputAsync_
(System.Net.WebSockets.WebSocketCloseStatus.NormalClosure, "", _
System.Threading.CancellationToken.None)
oSocket.CloseAsync_
(System.Net.WebSockets.WebSocketCloseStatus.NormalClosure, "", _
System.Threading.CancellationToken.None)
Catch ex As Exception
'System.Threading.Thread.Sleep(1000)
End Try
Next
Application("Sockets") = Nothing
End If
Response.Write("Users reseted")
Response.End()
End If
%>
<!DOCTYPE html>
<html>
<head>
<title>Chat App</title>
<script src="Chat.js?v=13"></script>
</head>
<body onload="OnLoad()">
<div id="idContainer">
<table style="width: 100%;">
<tr>
<td>
<label for="txtUser">User Name</label>
<input id="txtUser" _
value="<%=IIf(Request.QueryString("user") <> "",
Request.QueryString("user"), "Jack")%>" />
<button type="button" onclick="OpenSocket()" id="btnOpen">
Login</button>
<button type="button" onclick="CloseSocket()" id="btnClose"
disabled>Log off</button>
<span id="spStatus" style="color:red">⬤</span>
</td>
<td align="right">
<table border="0">
<tr>
<td>
<label>Users</label></td>
<td>
<button type="button" onclick="RefreshUsers()"
title="Refresh Users">↻</button>
</td>
<td>
<button type="button" onclick="ResetUsers()"
title="Reset Users">☀</button>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td style="width: 78%; height: 100%; padding-right: 10px;
padding-bottom: 5px; height: 300px">
<textarea id="txtOutput" rows="1" style="margin-top: 10px;
width: 100%; height: 100%" placeholder="Output"></textarea>
</td>
<td id="tdOtherUsers" style="width: 20%; height: 100%;
padding-left: 10px; border: 1px solid gray; vertical-align: top">
<%=sUserList%>
</td>
</tr>
</table>
<textarea id="txtMsg" rows="5" wrap="soft"
style="width: 98%; margin-left: 3px; margin-top: 6px"
placeholder="Input Text"></textarea>
<table border="0">
<tr>
<td>
<button type="button" onclick="Send()"
id="btnSend" disabled>Send</button>
</td>
<td>
<button type="button" onclick="Ring()"
id="btnRing" title="Ring" disabled>☎</button>
</td>
<td>
<button type="button" onmousedown="RecordStart()"
onmouseup="RecordEnd()" onmouseout="RecordEnd()"
id="btnPushToTalk" disabled>Push-to-talk</button>
</td>
<td>
<label>
<input type="checkbox" id="chkSendAudio"
onclick="SetupAutoSound()" disabled/> Auto detect
</label>
</td>
<td id="tdSound" style="display:none">
<table>
<tr>
<td>
<label>
<input type="checkbox"
id="chkSoundSettings"
onclick="SoundSettings()"/> Settings
</label>
</td>
<td style="width: 50px" title="Volume">
<div id="idVolume"></div>
</td>
<td style="width: 100px" title="Volume State">
<div id="idVolumeState"></div>
</td>
<td style="width: 90px" title="Recorder State">
<div id="idMediaRecorderState"></div>
</td>
<td>
<div id="idProgress" style="width: 200px;
height:10px; border: 1px solid green;
background-color: lightblue; border-radius: 4px;">
</div>
</td>
</tr>
</table>
</td>
</tr>
</table>
<%
Dim oList As New ArrayList()
oList.Add("averageSignalValue")
oList.Add("speakingMinVolume")
oList.Add("muteVolume")
oList.Add("signalDuration")
oList.Add("maxSignalDuration")
oList.Add("silence")
oList.Add("prespeechstartMsecs")
%>
<table id="tblSoundSettings" style="display:none;
border: 1px solid gray; border-radius: 3px;">
<%For each sItem As String In oList %>
<tr>
<td><%=sItem%></td>
<td>
<input id="txt_<%=sItem%>" type="range"
<%If Right(sItem, 5) = "Value" Or
Right(sItem, 6) = "Volume" Then%>
min="0" max="0.09" step="0.001"
<%Else %>
min="1" max="<%=IIf(sItem = "maxSignalDuration",
10000, 3000) %>" step="1"
<%End If %>
onchange="config.<%=sItem%>=parseFloat(this.value);
id_<%=sItem%>.innerHTML=this.value" />
</td>
<td id="id_<%=sItem%>"></td>
</tr>
<%Next %>
</table>
</div>
<audio id="idAudio" controls></audio>
</body>
</html>
Chat.js file is the JavaScript file for Chat.aspx.
var websocket = null;
var oMediaRecorder = null;
var bMediaRecorderReady = true
var audioContext = null;
var meter = null;
function OnLoad() {
DisplayConfig();
if (navigator.userAgent.indexOf("(iPhone;") != -1) {
_("idContainer").style.padding = "50px"
}
}
function DisplayConfig() {
for (var id in config) {
if (_("id_" + id)) {
_("id_" + id).innerHTML = config[id];
_("txt_" + id).value = config[id];
}
}
}
function SoundSettings() {
var b = _("chkSoundSettings").checked;
if (b) DisplayConfig();
_("tblSoundSettings").style.display = b ? "" : "none";
}
function _(id) {
return document.getElementById(id)
}
function ShowAudioControls() {
_("tdSound").style.display = chkSendAudio.checked ? "" : "none";
}
function RecordStart() {
_("btnPushToTalk").style.backgroundColor = "red";
if (oMediaRecorder) {
oMediaRecorder.start();
} else {
SetupAudio(true);
}
}
function RecordEnd() {
if (_("btnPushToTalk").style.backgroundColor != "red") return;
_("btnPushToTalk").style.backgroundColor = "";
if (oMediaRecorder && oMediaRecorder.state == 'recording') {
bMediaRecorderReady = true;
oMediaRecorder.stop()
}
}
function SetupAutoSound() {
if (chkSendAudio.checked == false)
{ chkSoundSettings.checked = false; SoundSettings()};
SetupAudio(false);
ShowAudioControls();
}
function SetupAudio(bStartMediaRecorder) {
if (audioContext) {
if (bStartMediaRecorder == false) AudioDetection();
return null; //setup once
} else {
audioContext = new AudioContext();
}
var oUserMediaConfig = {'audio': {'mandatory': {
'googEchoCancellation': 'false',
'googAutoGainControl': 'false',
'googNoiseSuppression': 'false',
'googHighpassFilter': 'false'
}, 'optional': []
}
}
try {
navigator.mediaDevices.getUserMedia(oUserMediaConfig).then(function (stream) {
var mediaStreamSource = audioContext.createMediaStreamSource(stream);
meter = CreateAudioMeter(audioContext);
mediaStreamSource.connect(meter);
dBegin = Date.now();
AudioDetection();
oMediaRecorder = new MediaRecorder(stream);
oMediaRecorder.addEventListener('dataavailable', OnMediaRecorderReady);
if (bStartMediaRecorder) oMediaRecorder.start();
})
} catch (e) {
alert('getUserMedia threw exception :' + e);
}
}
document.addEventListener('prespeechstart', event => {
if (oMediaRecorder.state != 'inactive') {
oMediaRecorder.stop()
}
bMediaRecorderReady = false
oMediaRecorder.start()
})
document.addEventListener('signal', event => {
if (oMediaRecorder.state == 'inactive') {
oMediaRecorder.start()
}
})
document.addEventListener('speechstop', event => {
if (oMediaRecorder.state == 'inactive') {
LogOutput('Cannot stop MediaRecorder because it is inactive');
} else {
oMediaRecorder.stop()
}
bMediaRecorderReady = true
})
document.addEventListener('speechabort', event => {
if (oMediaRecorder.state != 'inactive') {
oMediaRecorder.stop()
}
bMediaRecorderReady = false
})
function OnMediaRecorderReady(e) {
if (bMediaRecorderReady && websocket.readyState == 1) {
if (e.data && e.data.size > 0) {
e.data.arrayBuffer().then(buffer => {
LogOutput("Audio sent [" +
e.data.size.toLocaleString('en-US') + "] "); // + e.data.type
websocket.send(buffer);
})
}
}
}
function LogOutput(s) {
if (txtOutput.value != "") txtOutput.value += "\n";
var d = new Date();
txtOutput.value += d.toLocaleString() + "\n" + s;
}
function HandleReceiveAudio(e) {
const buffer = e.data
var sBlobType = "audio/webm;codecs=opus";
if (MediaRecorder.isTypeSupported(sBlobType) == false) {
if (navigator.userAgent.indexOf("(iPhone;") != -1 ||
navigator.userAgent.indexOf("(iPad;") != -1) {
const data = new Uint8Array(buffer);
if (data[0] === 26 && data[1] === 69 && data[2] === 223) {
//iPhone and iPad does not support webm
//use https://github.com/Kagami/vmsg
//or fnExtractSoundToMP3 (npm install ffmpeg)
//https://stackoverflow.com/questions/16413063/html5-record-audio-to-file
}
}
sBlobType = "audio/mp4"
}
LogOutput("Audio received [" + buffer.byteLength.toLocaleString('en-US') + "]");
var oBlob = new Blob([buffer], { "type": sBlobType });
var audioURL = window.URL.createObjectURL(oBlob);
var audio = _("idAudio");
//var audio = new Audio();
audio.src = audioURL;
audio.play();
}
function OpenSocket() {
var sProtocol = window.location.protocol == "https:" ? "wss" : "ws";
var uri = sProtocol + '://' + window.location.hostname +
"/Phone/Handler1.ashx?user=" + escape(txtUser.value);
websocket = new WebSocket(uri);
websocket.binaryType = "arraybuffer";
websocket.onopen = function () {
//Connected
chkSendAudio.disabled = false;
btnSend.disabled = false;
btnRing.disabled = false;
btnPushToTalk.disabled = false;
btnClose.disabled = false;
btnOpen.disabled = true;
spStatus.style.color = "green";
RefreshUsers();
};
websocket.onclose = function () {
if (document.readyState == "complete") {
//Connection lost
chkSendAudio.disabled = false;
btnSend.disabled = true;
btnRing.disabled = true;
btnPushToTalk.disabled = true;
btnClose.disabled = true;
btnOpen.disabled = false;
spStatus.style.color = "red";
tdOtherUsers.innerHTML = "";
RefreshUsers();
chkSendAudio.checked = false;
chkSoundSettings.checked = false;
ShowAudioControls();
SoundSettings();
}
};
websocket.onmessage = function (event) {
if (typeof event.data == "object" &&
event.data.toString() == "[object ArrayBuffer]") {
setTimeout(function () {
HandleReceiveAudio(event);
}, 1000);
} else {
var sData = event.data;
if (sData == "{{RefreshUsers}}") {
RefreshUsers();
return;
} else if (sData.indexOf("{{Ring}}") != -1) {
PlayFile("mp3/Ring.mp3", 10000);
//return;
}
LogOutput(sData);
}
};
websocket.onerror = function (event) {
alert('Could not connect. Please try another name.');
};
setTimeout(function () { RefreshUsers() }, 1000);
}
function Send() {
if (txtMsg.value == "") return;
websocket.send(txtMsg.value);
txtMsg.value = "";
}
function CloseSocket() {
websocket.close();
}
function RefreshUsers() {
var oHttp = new XMLHttpRequest();
oHttp.open("POST", "?getUsers=1", false);
oHttp.setRequestHeader("Content-Type", "application/x-www-form-urlencoded");
oHttp.onreadystatechange = function () { // Call a function when the state changes.
if (this.readyState === XMLHttpRequest.DONE && this.status === 200) {
tdOtherUsers.innerHTML = oHttp.responseText;
}
}
oHttp.send();
}
function ResetUsers() {
var oHttp = new XMLHttpRequest();
oHttp.open("POST", "?resetUsers=1", false);
oHttp.setRequestHeader("Content-Type", "application/x-www-form-urlencoded");
oHttp.onreadystatechange = function () { // Call a function when the state changes.
if (this.readyState === XMLHttpRequest.DONE && this.status === 200) {
tdOtherUsers.innerHTML = "";
}
}
oHttp.send();
}
function Ring() {
if (websocket) websocket.send("{{Ring}}");
PlayFile("mp3/PhoneLong.mp3", 10000);
}
function PlayFile(sFile, iStopMsec) {
var oAudio = _("idAudio");
//var oAudio = new Audio(sFile);
oAudio.src = sFile;
oAudio.play();
setTimeout(function () {
oAudio.pause()
}, iStopMsec)
}
//===============
//Code based on: https://github.com/solyarisoftware/webad
let volumeState = 'mute'
let speechStarted = false
let silenceItems = 0
let signalItems = 0
let speechstartTime
let prerecordingItems = 0
let speechVolumesList = []
const dispatchEvent = (eventName, eventData) =>
document.dispatchEvent(new CustomEvent(eventName, eventData))
var oVolumeList = [];
var dBegin = Date.now();
var iMinAvg = null;
var config = {
timeoutMsecs: 50, //SAMPLE_POLLING_MSECS
prespeechstartMsecs: 300, //PRERECORDSTART_MSECS 600
silence: 300, //MAX_INTERSPEECH_SILENCE_MSECS 600
signalDuration: 200, //MIN_SIGNAL_DURATION 400
maxSignalDuration: 8000, //stop recording and send signal
//if it is more than 8 seconds
averageSignalValue: 0.02, //MIN_AVERAGE_SIGNAL_VOLUME 0.04 -- to calculate
//if a signal block contains speech or just noise
speakingMinVolume: 0.01, //VOLUME_SIGNAL 0.02
muteVolume: 0.0001 //VOLUME_MUTE
};
function AudioDetection() {
if (chkSendAudio.checked == false) {
return
}
setTimeout(function() {
prerecording();
sampleThresholdsDecision();
if (meter) {
if ((Date.now() - dBegin) / 1000 <= 60) {
//witin the first minute, setup speakingMinVolume and
//averageSignalValue based on min average of 100 volume points
oVolumeList.push(meter.volume);
var iAvg = GetListAvg(oVolumeList);
if (oVolumeList.length > 100) {
oVolumeList.shift(); // removes the first item of an array
var iAvg = GetListAvg(oVolumeList);
if (iMinAvg == null) {
iMinAvg = iAvg;
} else {
iMinAvg = Math.min(iMinAvg, iAvg);
}
if (config.speakingMinVolume < iMinAvg) {
config.speakingMinVolume = parseFloat(iMinAvg.toFixed(4));
config.averageSignalValue = config.speakingMinVolume * 2;
}
}
}
_("idVolume").innerHTML = meter.volume.toFixed(4);
_("idProgress").style.width = (meter.volume * 1000) + "px";
_("idMediaRecorderState").innerHTML = oMediaRecorder.state
}
AudioDetection();
}, config.timeoutMsecs
)
function prerecording() {
++prerecordingItems
const eventData = {
detail: {
volume: meter.volume,
timestamp: Date.now(),
items: prerecordingItems
}
}
if ((prerecordingItems * config.timeoutMsecs) >= config.prespeechstartMsecs) {
if (!speechStarted)
dispatchEvent('prespeechstart', eventData)
prerecordingItems = 0
}
}
function sampleThresholdsDecision() {
const timestamp = Date.now()
const duration = timestamp - speechstartTime
var div = document.getElementById("idVolumeState");
if (meter.volume < config.muteVolume) {
mute(timestamp, duration);
div.innerHTML = "mute"
div.style.backgroundColor = "";
} else if (meter.volume > config.speakingMinVolume) {
signal(timestamp, duration);
div.innerHTML = "signal " + duration.toLocaleString();
div.style.backgroundColor = "lightgreen";
} else {
silence(timestamp, duration);
div.innerHTML = "silence"
div.style.backgroundColor = "";
}
}
function mute(timestamp, duration) {
const eventData = {
detail: {
event: 'mute',
volume: meter.volume,
timestamp,
duration
}
}
dispatchEvent('mute', eventData)
if (volumeState !== 'mute') {
dispatchEvent('mutedmic', eventData)
volumeState = 'mute'
}
}
function signal(timestamp, duration) {
silenceItems = 0
const eventData = {
detail: {
event: 'signal',
volume: meter.volume,
timestamp,
duration,
items: ++signalItems
}
}
if (duration > config.maxSignalDuration) {
dispatchEvent('speechstop', eventData);
speechstartTime = Date.now();
speechStarted = false;
return;
}
if (!speechStarted) {
dispatchEvent('speechstart', eventData)
speechstartTime = timestamp
speechStarted = true
speechVolumesList = []
}
speechVolumesList.push(meter.volume)
dispatchEvent('signal', eventData)
if (volumeState === 'mute') {
dispatchEvent('unmutedmic', eventData)
volumeState = 'signal'
}
}
function silence(timestamp, duration) {
signalItems = 0
const eventData = {
detail: {
event: 'silence',
volume: meter.volume,
timestamp,
duration,
items: ++silenceItems
}
}
dispatchEvent('silence', eventData)
if (volumeState === 'mute') {
dispatchEvent('unmutedmic', eventData)
volumeState = 'silence'
}
var maxSilenceItems = Math.round(config.silence / config.timeoutMsecs);
if (speechStarted && (silenceItems === maxSilenceItems)) {
const signalDuration = duration - config.silence
const averageSignalValue = GetListAvg(speechVolumesList).toFixed(4)
if (signalDuration < config.signalDuration) {
eventData.detail.abort = `signal duration (${signalDuration}) <
config.signalDuration (${config.signalDuration})`
dispatchEvent('speechabort', eventData)
}
else if (averageSignalValue < config.averageSignalValue) {
eventData.detail.abort = `signal average volume
(${averageSignalValue}) < config.averageSignalValue
(${config.averageSignalValue})`
dispatchEvent('speechabort', eventData)
}
else {
dispatchEvent('speechstop', eventData)
}
speechStarted = false
}
}
function GetListAvg(oList) {
if (oList.length == 0) return 0;
return (oList.reduce((a, b) => a + b) / oList.length);
}
}
function CreateAudioMeter(audioContext, clipLevel, averaging, clipLag) {
var processor = audioContext.createScriptProcessor(512);
//The ScriptProcessorNode is deprecated. Use AudioWorkletNode instead.
//https://developer.chrome.com/blog/audio-worklet/
processor.onaudioprocess = volumeAudioProcess;
processor.clipping = false;
processor.lastClip = 0;
processor.volume = 0;
processor.clipLevel = clipLevel || 0.98;
processor.averaging = averaging || 0.95;
processor.clipLag = clipLag || 750;
processor.connect(audioContext.destination);
processor.checkClipping =
function () {
if (!this.clipping)
return false;
if ((this.lastClip + this.clipLag) < window.performance.now())
this.clipping = false;
return this.clipping;
};
processor.shutdown =
function () {
this.disconnect();
this.onaudioprocess = null;
};
return processor;
function volumeAudioProcess(event) {
var buf = event.inputBuffer.getChannelData(0);
var bufLength = buf.length;
var sum = 0;
var x;
for (var i = 0; i < bufLength; i++) {
x = buf[i];
if (Math.abs(x) >= this.clipLevel) {
this.clipping = true;
this.lastClip = window.performance.now();
}
sum += x * x;
}
var rms = Math.sqrt(sum / bufLength);
this.volume = Math.max(rms, this.volume * this.averaging);
}
}
Points of Interest
The application does not let you send voice to iPhone/iPad from Android or Windows because Apple does not support webM format. iPhone/iPad plays wav, mp3 or mp4. So the next step is to try using Wave Recorder instead of MediaRecorder
.
History
- 16th December, 2022: Version 1 created