🌊

Flutter / riverpod で Speech to Text

2022/10/18に公開

こちらを使ってみると非常に簡単に日本語をテキストにできました。

https://pub.dev/packages/speech_to_text

ただ、動いてくれてはいますが、もっときれいな方法を教えていただきたく思っています。

問題点

  • 画面と関数がイマイチ分割できていない
  • Providerが多くなりすぎている気がしている。

モデルクラス作ってStateNotifierでラップしてあげるのが一番キレイなのかもしれません。。

speech_to_text.dart
final sTTProvider = ChangeNotifierProvider((ref) {
  final _stt = SpeechToText();
  return STTNotifier(speechToText: _stt);
});

class STTNotifier extends ChangeNotifier {
  STTNotifier({required this.speechToText}) {
    _initialize();
  }

  SpeechToText speechToText;
  Future<void> _initialize() async {
    await speechToText.initialize();
  }

  Future<void> startListening() async {
    speechToText.listen(onResult: ((result) {
      print(result);
      notifyListeners();
    }));
    notifyListeners();
  }

  Future<void> stopListening() async {
    await speechToText.stop();
    notifyListeners();
  }

  Future<void> quitListening() async {
    await speechToText.cancel();
    notifyListeners();
  }
}
main.dart

class MyHomePage extends ConsumerWidget {
  MyHomePage({Key? key}) : super(key: key);

  
  Widget build(BuildContext context, WidgetRef ref) {
    final _stt = ref.watch(sTTProvider);
    return Scaffold(
      appBar: AppBar(
        title: Text('Speech Demo'),
      ),
      body: Center(
        child: Column(
          mainAxisAlignment: MainAxisAlignment.center,
          children: <Widget>[
            Container(
              padding: EdgeInsets.all(16),
              child: Text(
                'Recognized words:',
                style: TextStyle(fontSize: 20.0),
              ),
            ),
            Expanded(
              child: Container(
                  padding: EdgeInsets.all(16),
                  child: Text(_stt.speechToText.lastRecognizedWords)),
            ),
          ],
        ),
      ),
      floatingActionButton: FloatingActionButton(
        onPressed: (() async {
          if (!_stt.speechToText.isListening) {
            await ref.read(sTTProvider.notifier).startListening();
          } else {
            await ref.read(sTTProvider.notifier).stopListening();
          }
        }),
        tooltip: 'Listen',
        child: Icon(Icons.mic),
      ),
    );
  }
}

speech_to_text_provider.dart

import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:speech_to_text/speech_to_text.dart';

final speechResultProvider = StateProvider<String?>(((ref) => null));

final canListenProvider = StateProvider<bool>((ref) => false);

final isListeningProvider = StateProvider<bool>((ref) => false);

final sttProvider = FutureProvider.autoDispose<SpeechToText>((ref) async {
  final _stt = SpeechToText();

  final bool _result = await _stt.initialize(onStatus: ((status) {
    print(status);
    if (status == "listening") {
      ref.read(isListeningProvider.notifier).update((state) => true);
    }
    if (status == "notListening") {
      ref.read(isListeningProvider.notifier).update((state) => false);
    }
    if (status == "done") {
      ref.read(isListeningProvider.notifier).update((state) => false);
    }
  }));

  ref.read(canListenProvider.notifier).update((state) => _result);

  ref.onDispose(
    () async {
      await _stt.cancel();
    },
  );

  return _stt;
});

main.dart
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:stt_example/spoken_text_provider.dart';

void main() {
  runApp(MyApp());
}

class MyApp extends StatelessWidget {
  
  Widget build(BuildContext context) {
    return ProviderScope(
      child: MaterialApp(
        title: 'Flutter Demo',
        home: MyHomePage(),
      ),
    );
  }
}

class MyHomePage extends ConsumerWidget {
  MyHomePage({Key? key}) : super(key: key);

  
  Widget build(BuildContext context, WidgetRef ref) {
    final _speechResult = ref.watch(speechResultProvider);
    final _canListen = ref.watch(canListenProvider);
    final _isListening = ref.watch(isListeningProvider);
    final _stt = ref.watch(sttProvider);
    // print(_canListen);
    return Scaffold(
      appBar: AppBar(
        title: Text('Speech Demo'),
      ),
      body: Center(
        child: Column(
          mainAxisAlignment: MainAxisAlignment.center,
          children: <Widget>[
            Container(
              padding: EdgeInsets.all(16),
              child: Text(
                'Recognized words:',
                style: TextStyle(fontSize: 20.0),
              ),
            ),
            Expanded(
              child: Container(
                  padding: EdgeInsets.all(16),
                  child: Text(_speechResult ?? "")),
            ),
          ],
        ),
      ),
      floatingActionButton: FloatingActionButton(
        onPressed: (() {
          !_isListening
              ? _stt.whenData((stt) => stt.listen(
                  onResult: ((result) => ref
                      .read(speechResultProvider.notifier)
                      .update((state) => result.recognizedWords))))
              : _stt.whenData((stt) async {
                  await stt.stop();
                });
        }),
        tooltip: 'Listen',
        child: Icon(_isListening ? Icons.mic_off : Icons.mic),
      ),
    );
  }
}

Discussion