
Build with Symbl
Easily build and deploy conversation intelligence in your speech, text or video driven applications with our AI, comprehensive suite of APIs and developer tools.

Beyond Speech-to-Text
Go beyond standard speech-to-text and get real time speaker diarization (separation), generate action items, identify topics, and more with streaming or asynchronous sources.

Topics of Discussion
Easily extract summary topics, topic hierarchies and scope from unstructured conversation data.

Conversation Analytics
Measure talk-to-listen ratios, sentence level and topic based sentiments, entity tracking and more to generate actionable insights.

Entity and Intent Recognition
Enable real time identification of named entities such as speakers, location, company, dates and more and also build with custom entities.

Real Time Insights
Extract actionable insights such as follow-ups, key topics, questions, action items and intent in real-time, to make your applications smarter, personalized, and robust.

Customizable UI Components
Leverage our pre-built and customizable UI elements to quickly build native AI powered experiences for human conversations.
Conversation Intelligence built for developers
Easily connect with Symbl on any channel with just a few lines of code
var myHeaders = new Headers();
myHeaders.append("x-api-key", "your_auth_token");
myHeaders.append("Content-Type", "video/mp4");
var file = "";
var requestOptions = {
method: 'POST',
headers: myHeaders,
body: file,
redirect: 'follow'
};
fetch("https://api.symbl.ai/v1/process/video", requestOptions)
.then(response => response.text())
.then(result => console.log(result))
.catch(error => console.log('error', error));
var myHeaders = new Headers();
myHeaders.append("x-api-key", "your_auth_token");
myHeaders.append("Content-Type", "audio/wav");
var file = "";
var requestOptions = {
method: 'POST',
headers: myHeaders,
body: file,
redirect: 'follow'
};
fetch("https://api.symbl.ai/v1/process/audio", requestOptions)
.then(response => response.text())
.then(result => console.log(result))
.catch(error => console.log('error', error));
var myHeaders = new Headers();
myHeaders.append("x-api-key", "your_auth_token");
myHeaders.append("Content-Type", "application/json");
var raw = JSON.stringify({
"messages": [{
"payload": {
"content": "Hello, World",
"contentType": "text/plain"
},
"from": {
"name": "Developer",
"userId": "developer@developer.com"
}
}]
});
var requestOptions = {
method: 'POST',
headers: myHeaders,
body: raw,
redirect: 'follow'
};
fetch("https://api.symbl.ai/v1/process/text", requestOptions)
.then(response => response.text())
.then(result => console.log(result))
.catch(error => console.log('error', error));
var myHeaders = new Headers();
myHeaders.append("x-api-key", "your_auth_token");
myHeaders.append("Content-Type", "application/json");
var raw = JSON.stringify({
"operation": "start",
"endpoint": {
"type": "pstn",
"phoneNumber": "+12532158782",
"dtmf": "6671425093"
},
"actions": [{
"invokeOn": "stop",
"name": "sendSummaryEmail",
"parameters": {
"emails": ["__email_address__"]
}
}],
"data": {
"session": {
"name": "__name_of_this_call__"
}
}
});
var requestOptions = {
method: 'POST',
headers: myHeaders,
body: raw,
redirect: 'follow'
};
fetch("https://api.symbl.ai/v1/endpoint:connect", requestOptions)
.then(response => response.text())
.then(result => console.log(result))
.catch(error => console.log('error', error));
const uniqueMeetingId = btoa('email@address.com');
const accessToken = '';
const symblEndpoint = `wss://api.symbl.ai/v1/realtime/insights/${uniqueMeetingId}?access_token=${accessToken}`;
const ws = new WebSocket(symblEndpoint);
// Fired when a message is received from the WebSocket server
ws.onmessage = (event) => {
console.log(event);
};
// Fired when the WebSocket closes unexpectedly due to an error or lost connetion
ws.onerror = (err) => {
console.error(err);
};
// Fired when the WebSocket connection has been closed
ws.onclose = (event) => {
console.info('Connection to websocket closed');
};
// Fired when the connection succeeds.
ws.onopen = (event) => {
ws.send(JSON.stringify({
type: 'start_request',
meetingTitle: 'Websockets How-to', // Conversation name
insightTypes: ['question', 'action_item'], // Will enable insight generation
config: {
confidenceThreshold: 0.5,
languageCode: 'en-US',
speechRecognition: {
encoding: 'LINEAR16',
sampleRateHertz: 44100,
}
},
speaker: {
userId: 'example@symbl.ai',
name: 'Example Sample',
}
}));
};
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });
const handleSuccess = (stream) => {
const AudioContext = window.AudioContext;
const context = new AudioContext();
const source = context.createMediaStreamSource(stream);
const processor = context.createScriptProcessor(1024, 1, 1);
const gainNode = context.createGain();
source.connect(gainNode);
gainNode.connect(processor);
processor.connect(context.destination);
processor.onaudioprocess = (e) => {
// convert to 16-bit payload
const inputData = e.inputBuffer.getChannelData(0) || new Float32Array(this.bufferSize);
const targetBuffer = new Int16Array(inputData.length);
for (let index = inputData.length; index > 0; index--) {
targetBuffer[index] = 32767 * Math.min(1, inputData[index]);
}
// Send to websocket
if (ws.readyState === WebSocket.OPEN) {
ws.send(targetBuffer.buffer);
}
};
};
handleSuccess(stream);
Symbl compliments the products your team uses everyday. We are agnostic, flexible, and play well with others.

Why Symbl?
Symbl’s patent pending contextual understanding AI platform is built for analyzing open domain human-to-human conversations. Symbl APIs enable developers and application owners to easily integrate and activate intelligence across speech, text and video driven features, without the need for upfront training.