An Angular directive that provides an easy-to-use wrapper for the Web Speech API, enabling voice input functionality with minimal setup.
Add voice recognition to any button with:
- π€ Click to listen - Toggle speech recognition on/off
- π Real-time transcripts - Get live updates as the user speaks
- β Final results - Receive completed transcripts when speech ends
- βοΈ Configurable - Customize language, continuous mode, and more
- π§ Advanced access - Direct access to the underlying SpeechRecognition instance
- Works on any clickable element (buttons, icons, etc.)
- Real-time transcript streaming
- Configurable speech recognition settings
- Exposes underlying SpeechRecognition API for advanced use cases
- SSR-safe with platform detection
- Standalone directive (no module required)
- Fully typed with TypeScript
- Lightweight with no dependencies
npm i -S ngx-speech-button && npm i -D @types/dom-speech-recognitionNote: The
@types/dom-speech-recognitionpackage provides TypeScript types for the Web Speech API and is required as a dev dependency.
| Angular Version | Package Version |
|---|---|
| 21.x | 0.0.x |
The Web Speech API is supported in modern browsers. Check Can I Use for current browser support.
Add the ngxSpeechButton directive to any clickable element to enable voice input.
import { Component } from '@angular/core';
import { SpeechButton } from 'ngx-speech-button';
@Component({
selector: 'app-voice-input',
imports: [SpeechButton],
template: `
<button ngxSpeechButton #speech="ngxSpeechButton" (transcriptCompleted)="onTranscript($event)">
{{ speech.listening() ? 'π΄ Listening...' : 'π€' }}
</button>
<p>Transcript: {{ transcript }}</p>
`,
})
export class VoiceInputComponent {
transcript = '';
onTranscript(text: string) {
this.transcript = text;
}
}Use transcriptChanged to receive updates as the user speaks:
@Component({
selector: 'app-live-transcription',
imports: [SpeechButton],
template: `
<button
ngxSpeechButton
(transcriptChanged)="liveText = $event"
(transcriptCompleted)="finalText = $event"
>
π€ Speak
</button>
<p>Live: {{ liveText }}</p>
<p>Final: {{ finalText }}</p>
`,
})
export class LiveTranscriptionComponent {
liveText = '';
finalText = '';
}Configure the SpeechRecognition API with the config input:
@Component({
selector: 'app-custom-speech',
imports: [SpeechButton],
template: `
<button
ngxSpeechButton
[config]="{ lang: 'en-US', continuous: false, interimResults: true }"
(transcriptCompleted)="onComplete($event)"
>
π€ English Only
</button>
`,
})
export class CustomSpeechComponent {
onComplete(text: string) {
console.log('Final:', text);
}
}Handle speech recognition errors with the error output:
@Component({
selector: 'app-error-handling',
imports: [SpeechButton],
template: `
<button ngxSpeechButton (transcriptCompleted)="onComplete($event)" (error)="onError($event)">
π€ Speak
</button>
@if (errorMessage) {
<p class="error">{{ errorMessage }}</p>
}
`,
})
export class ErrorHandlingComponent {
errorMessage = '';
onComplete(text: string) {
console.log(text);
}
onError(event: SpeechRecognitionErrorEvent) {
this.errorMessage = `Error: ${event.error}`;
}
}For advanced use cases, access the underlying SpeechRecognition instance:
@Component({
selector: 'app-advanced',
imports: [SpeechButton],
template: ` <button ngxSpeechButton #speech="ngxSpeechButton">π€</button> `,
})
export class AdvancedComponent implements AfterViewInit {
@ViewChild('speech') speechButton!: SpeechButton;
ngAfterViewInit() {
// Attach custom event handlers
this.speechButton.recognition?.addEventListener('soundstart', () => {
console.log('Sound detected');
});
this.speechButton.recognition?.addEventListener('speechstart', () => {
console.log('Speech started');
});
}
}Hide the button when the Web Speech API is not available:
<button
ngxSpeechButton
#speech="ngxSpeechButton"
[hidden]="!speech.available()"
(transcriptCompleted)="onComplete($event)"
>
π€ Voice Input
</button>
@if (!speech.available()) {
<p>Voice input not supported in this browser</p>
}| Selector | [ngxSpeechButton] |
|---|
| Export As | ngxSpeechButton |
|---|
| Name | Type | Default | Description |
|---|---|---|---|
| config | SpeechRecognitionConfig |
{} |
Configuration options for SpeechRecognition API. |
| Name | Type | Description |
|---|---|---|
| transcriptChanged | EventEmitter<string> |
Emits the current transcript as it updates. |
| transcriptCompleted | EventEmitter<string> |
Emits the final transcript when recognition ends. |
| error | EventEmitter<SpeechRecognitionErrorEvent> |
Emits when a speech recognition error occurs. |
| Name | Type | Description |
|---|---|---|
| available | Signal<boolean> |
Whether the Web Speech API is available. |
| listening | Signal<boolean> |
Whether speech recognition is currently active. |
| recognition | SpeechRecognition | null |
The underlying SpeechRecognition instance for advanced use. |
type SpeechRecognitionConfig = Partial<
Pick<SpeechRecognition, 'lang' | 'continuous' | 'interimResults' | 'maxAlternatives' | 'grammars'>
>;| Property | Type | Default | Description |
|---|---|---|---|
| lang | string |
navigator.language |
Language for recognition (e.g., 'en-US'). |
| continuous | boolean |
true |
Whether to return continuous results. |
| interimResults | boolean |
false |
Whether to return interim results. |
| maxAlternatives | number |
1 |
Maximum number of alternative transcriptions. |
| grammars | SpeechGrammarList |
- | Grammar list to constrain recognition. |
- Angular 21+
- Browser with Web Speech API support
To clone this repo and run it locally:
git clone https://github.com/JayChase/ngx-speech-button.git
cd ngx-speech-button
npm install
npm run buildng serve demonpm testMIT