How do I develop optimized cross platform apps with/without platform- and API-specific code?
Last reviewed: 3/25/2023
HOW Article ID: H032303
The information in this article applies to:
- SpeechKit 12
Summary
Explore the power of common classes that provide platform independent access to platform APIs.
More Information
Cross platform development frameworks enable application code to be written once and compiled to target multiple deployment options.
Chant classes continue to enable apps to access API-specific services on various platforms. For cross platform apps, this requires platform checks when allocating objects and handling events.
The updated Chant classes are now factored to enable common classes to be used cross platform and default to a platform-specific API. This eliminates the need for platform checks when allocating objects and handling events.
The ChantRecognizer class handles speech recognition and ChantSynthesizer handles speech synthesis for apps on Android, iOS, macOS, and Windows platforms. Platform APIs default as follows:
Speech API | Platforms |
---|---|
Apple Speech | iOS, macOS |
Google android.speech | Android |
Microsoft WindowsMedia | Windows |
Apps are not locked into using the default platform API. API-specific classes may be allocated and used at any time.
JChantRecognizer _Recognizer = null;
JChantSynthesizer _Synthesizer = null;
JSpeechKit _SpeechKit = null;
_SpeechKit = new JSpeechKit();
_SpeechKit.setCredentials("Credentials");
// Create Synthesizer
_Recognizer = _SpeechKit.createChantRecognizer();
if (_Recognizer != null) {
// Set the callback
_Recognizer.setChantSpeechKitEvents(this);
// Register Callbacks
_Recognizer.registerCallback(ChantSpeechKitCallback.CCSRInitComplete);
_Recognizer.registerCallback(ChantSpeechKitCallback.CCSRRecognitionCommand);
// Set app context
_Recognizer.setContext(getApplicationContext());
}
// Create Synthesizer
_Synthesizer = _SpeechKit.createChantSynthesizer();
if (_Synthesizer != null)
{
// Set the callback
_Synthesizer.setChantSpeechKitEvents(this);
// Register Callbacks for engine init
_Synthesizer.registerCallback(ChantSpeechKitCallback.CCTTSInitComplete);
_Synthesizer.registerCallback(ChantSpeechKitCallback.CCTTSRangeStart);
// Set app context
_Synthesizer.setContext(_Context);
}
NSpeechKit _SpeechKit = null;
NChantRecognizer _Recognizer = null;
NChantSynthesizer _Synthesizer = null;
// Instantiate SpeechKit
_SpeechKit = new NSpeechKit();
if (_SpeechKit != null)
{
// Set credentials
_SpeechKit.SetCredentials("Credentials");
_Recognizer = _SpeechKit.CreateChantRecognizer();
if (_Recognizer != null)
{
_Recognizer.RecognitionCommand += this.Recognizer_RecognitionCommand;
}
_Synthesizer = _SpeechKit.CreateChantSynthesizer();
if (_Synthesizer != null)
{
_Synthesizer.WordPosition += Synthesizer_WordPosition;
}
}
CSpeechKit* _SpeechKit;
CChantRecognizer* _Recognizer;
CChantSynthesizer* _Synthesizer;
_SpeechKit = new CSpeechKit();
if (_SpeechKit =! NULL)
{
// Set credentials
_SpeechKit->SetCredentials(L"Credentials");
// Create recognizer
_Recognizer = _SpeechKit->CreateChantRecognizer();
if (_Recognizer != NULL)
{
// Register Event Handlers
_Recognizer->SetRecognitionCommand(RecognitionCommand);
}
// Create synthesizer
_Synthesizer = _SpeechKit->CreateChantSynthesizer();
if (_Synthesizer != NULL)
{
// Register Event Handlers
_Synthesizer->SetWordPosition(WordPosition);
}
}
CSpeechKit* _SpeechKit;
CChantRecognizer* _Recognizer;
CChantSynthesizer* _Synthesizer;
_SpeechKit = new CSpeechKit();
if (_SpeechKit =! NULL)
{
// Set credentials
_SpeechKit->SetCredentials("Credentials");
// Create recognizer
_Recognizer = _SpeechKit->CreateChantRecognizer();
if (_Recognizer != NULL)
{
// Register Event Handlers
_Recognizer->SetRecognitionCommand(RecognitionCommand);
}
// Create synthesizer
_Synthesizer = _SpeechKit->CreateChantSynthesizer();
if (_Synthesizer != NULL)
{
// Register Event Handlers
_Synthesizer->SetWordPosition(WordPosition);
}
}
var
_SpeechKit: TSpeechKit;
_Recognizer: TChantRecognizer;
_Synthesizer: TChantSynthesizer;
begin
// Instantiate SpeechKit object
_SpeechKit := TSpeechKit.Create();
if (_SpeechKit <> nil) then
begin
// Set credentials
_SpeechKit.SetCredentials('Credentials');
// Create recognizer
_Recognizer := _SpeechKit.CreateChantRecognizer();
if (_Recognizer <> nil) then
begin
// Register Event Handlers
_Recognizer.RecognitionCommand := RecognitionCommand;
end;
// Create synthesizer
_Synthesizer := _SpeechKit.CreateChantSynthesizer();
if (_Synthesizer <> nil) then
begin
// Register Event Handlers
_Synthesizer.WordPosition := WordPosition;
end;
end;
end;
JSpeechKit _SpeechKit = null;
JChantRecognizer _Recognizer = null;
JChantSynthesizer _Synthesizer = null;
// Create SpeechKit object
_SpeechKit = new JSpeechKit();
// Set credentials
_SpeechKit.setCredentials("Credentials");
_Recognizer = _SpeechKit.createChantRecognizer();
if (_Recognizer != null)
{
// Set the callback object
_Recognizer.setChantSpeechKitEvents(this);
// Register for callbacks
_Recognizer.registerCallback(ChantSpeechKitCallback.CCSRRecognitionCommand);
}
_Synthesizer = _SpeechKit.createChantSynthesizer();
if (_Synthesizer != null)
{
// Set the callback object
_Synthesizer.setChantSpeechKitEvents(this);
// Register for callbacks
_Synthesizer.registerCallback(ChantSpeechKitCallback.CCTTSWordPosition);
}
@property (strong, nonatomic) SPSpeechKit* speechKit;
@property (strong, nonatomic) SPChantRecognizer* recognizer;
@property (strong, nonatomic) SPChantSynthesizer* synthesizer;
_speechKit = [[SPSpeechKit alloc] init];
if (_speechKit != nil)
{
// Set credentials
[_speechKit setCredentials:@"Credentials"];
_recognizer = [_speechKit createChantRecognizer];
if (_recognizer != nil)
{
[_recognizer setDelegate:(id<SPChantRecognizerDelegate>)self];
}
_synthesizer = [_speechKit createChantSynthesizer];
if (_synthesizer != nil)
{
[_synthesizer setDelegate:(id<SPChantSynthesizerDelegate>)self];
}
}
var _SpeechKit: SPSpeechKit? = nil
var _Recognizer: SPChantRecognizer? = nil
var _Synthesizer: SPChantSynthesizer? = nil
SpeechKit = SPSpeechKit()
if (_SpeechKit != nil)
{
// Set credentials
_ = _SpeechKit!.setCredentials(credentials: "Credentials")
_Recognizer = _SpeechKit!.createChantRecognizer()
if (_Recognizer != nil)
{
_Recognizer!.delegate = self
}
_Synthesizer = _SpeechKit!.createChantSynthesizer()
if (_Synthesizer != nil)
{
_Synthesizer!.delegate = self
}
}
Dim _SpeechKit As NSpeechKit
Dim WithEvents _Recognizer As NChantRecognizer
Dim WithEvents _Synthesizer As NChantSynthesizer
Private Sub Window_Loaded(ByVal sender As System.Object, ByVal e As System.Windows.RoutedEventArgs) Handles MyBase.Loaded
' Instantiate SpeechKit
_SpeechKit = New NSpeechKit()
If (_SpeechKit IsNot Nothing) Then
' Set credentials
_SpeechKit.SetCredentials("Credentials")
_Recognizer = _SpeechKit.CreateChantRecognizer()
_Synthesizer = _SpeechKit.CreateChantSynthesizer()
End If
End Sub
Review the details of cross platform development discussed in the Integrating SpeechKit section for your favorite development tool and programming language.
Speech recognition and synthesis event result arguments are designed and classes factored to allow for shared common properties and downcasting for access to unique API-specific property values.
Review the details in CDW 2023 Downcasting event results to API-specific objects.