Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

* Download VSCode if you do not already have it installed. This is the preferred IDE for development with Pangea Chat.
* Download flutter on your device using this guide: https://docs.flutter.dev/get-started/install
* Test to make sure that flutter is properly installed by running “flutter version”
* Test to make sure that flutter is properly installed by running “flutter --version”
* You may need to add flutter to your path manually. Instructions can be found here: https://docs.flutter.dev/get-started/install/macos/mobile-ios?tab=download#add-flutter-to-your-path
* Ensure that Google Chrome is installed
* Install the latest version of XCode
Expand All @@ -25,9 +25,9 @@
* To run on Android:
* Download Android File Transfer here: ​​https://www.android.com/filetransfer/
* To run the app from VSCode terminal:
* On web, run `flutter run -d chrome hot`
* On web, run `flutter run -d chrome --hot`
* Or as a web server (Usage with WSL or remote connect) `flutter run --release -d web-server -web-port=3000`
* On mobile device or simulator, run `flutter run hot -d <DEVICE_NAME>`
* On mobile device or simulator, run `flutter run --hot -d <DEVICE_NAME>`

# Special thanks

Expand Down
5 changes: 5 additions & 0 deletions lib/pages/chat/chat.dart
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import 'dart:async';
import 'dart:developer';
import 'dart:io';

import 'package:fluffychat/pangea/common/constants/model_keys.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
Expand Down Expand Up @@ -938,6 +939,10 @@ class ChatController extends State<ChatPageWithRoom>
'duration': result.duration,
'waveform': result.waveform,
},
// #Pangea
//TODO: add results of transcription
// ModelKey.botTranscription: result.sttModel?.toJson(),
// Pangea#
},
// #Pangea
// ).catchError((e) {
Expand Down
151 changes: 102 additions & 49 deletions lib/pages/chat/recording_dialog.dart
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,17 @@ import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';

import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:flutter_sound/flutter_sound.dart';
import 'package:path/path.dart' as path_lib;
import 'package:path_provider/path_provider.dart';
import 'package:record/record.dart';
import 'package:wakelock_plus/wakelock_plus.dart';
import 'package:web_socket_channel/web_socket_channel.dart';

import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/pangea/toolbar/utils/update_version_dialog.dart';
import 'package:fluffychat/utils/platform_infos.dart';
import 'package:fluffychat/pangea/transcription/transcription_repo.dart';
import 'events/audio_player.dart';

class RecordingDialog extends StatefulWidget {
Expand All @@ -33,12 +36,16 @@ class RecordingDialogState extends State<RecordingDialog> {
final _audioRecorder = AudioRecorder();
final List<double> amplitudeTimeline = [];

FlutterSoundRecorder? _audioRecorderStream;
WebSocketChannel? realtimeTranscriptionChannel;

String? fileName;

static const int bitRate = 64000;
// #Pangea
// static const int samplingRate = 44100;
static const int samplingRate = 22050;
static const int samplingRateTranscription = 16000;
// Pangea#

Future<void> startRecording() async {
Expand Down Expand Up @@ -70,22 +77,33 @@ class RecordingDialogState extends State<RecordingDialog> {
await WakelockPlus.enable();

// #Pangea
const audioConfig = RecordConfig(
bitRate: bitRate,
sampleRate: samplingRate,
numChannels: 1,
autoGain: true,
echoCancel: true,
noiseSuppress: true,
encoder: codec,
);
final streamBytes = StreamController<Uint8List>();
_audioRecorderStream = await FlutterSoundRecorder().openRecorder();
final audioRecorderStartFuture = _audioRecorderStream!.startRecorder(
toStream: streamBytes.sink,
codec: Codec.pcm16,
numChannels: 1,
sampleRate: samplingRateTranscription,
bitRate: samplingRateTranscription,
);
final isNotError = await showUpdateVersionDialog(
future: () =>
// Pangea#

_audioRecorder.start(
const RecordConfig(
bitRate: bitRate,
sampleRate: samplingRate,
numChannels: 1,
autoGain: true,
echoCancel: true,
noiseSuppress: true,
encoder: codec,
// Pangea#
future: () => Future.wait([
audioRecorderStartFuture,
_audioRecorder.start(
audioConfig,
path: path ?? '',
),
path: path ?? '',
),
]),
// #Pangea
context: context,
);
Expand All @@ -107,6 +125,14 @@ class RecordingDialogState extends State<RecordingDialog> {
_duration += const Duration(milliseconds: 100);
});
});

// init websocket with transcription API
realtimeTranscriptionChannel =
await TranscriptionRepo.connectTranscriptionChannel();
realtimeTranscriptionChannel!.sink
.addStream(streamBytes.stream)
.then((_) {})
.catchError((error) {});
} catch (_) {
setState(() => error = true);
rethrow;
Expand All @@ -124,12 +150,20 @@ class RecordingDialogState extends State<RecordingDialog> {
WakelockPlus.disable();
_recorderSubscription?.cancel();
_audioRecorder.stop();
if (_audioRecorderStream != null) {
_audioRecorderStream!.closeRecorder();
_audioRecorderStream = null;
}
super.dispose();
}

void _stopAndSend() async {
_recorderSubscription?.cancel();
final path = await _audioRecorder.stop();
if (_audioRecorderStream != null) {
await _audioRecorderStream!.stopRecorder();
}
await realtimeTranscriptionChannel!.sink.close();

if (path == null) throw ('Recording failed!');
const waveCount = AudioPlayerWidget.wavesCount;
Expand Down Expand Up @@ -159,44 +193,63 @@ class RecordingDialogState extends State<RecordingDialog> {
'${_duration.inMinutes.toString().padLeft(2, '0')}:${(_duration.inSeconds % 60).toString().padLeft(2, '0')}';
final content = error
? Text(L10n.of(context).oopsSomethingWentWrong)
: Row(
: Column(
mainAxisSize: MainAxisSize.min,
children: [
Container(
width: 16,
height: 16,
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(32),
color: Colors.red,
),
Row(
children: [
Container(
width: 16,
height: 16,
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(32),
color: Colors.red,
),
),
Expanded(
child: Row(
mainAxisSize: MainAxisSize.min,
mainAxisAlignment: MainAxisAlignment.end,
children: amplitudeTimeline.reversed
.take(26)
.toList()
.reversed
.map(
(amplitude) => Container(
margin: const EdgeInsets.only(left: 2),
width: 4,
decoration: BoxDecoration(
color: theme.colorScheme.primary,
borderRadius: BorderRadius.circular(
AppConfig.borderRadius),
),
height: maxDecibalWidth * (amplitude / 100),
),
)
.toList(),
),
),
const SizedBox(width: 8),
SizedBox(
width: 48,
child: Text(time),
),
],
),
Expanded(
child: Row(
mainAxisSize: MainAxisSize.min,
mainAxisAlignment: MainAxisAlignment.end,
children: amplitudeTimeline.reversed
.take(26)
.toList()
.reversed
.map(
(amplitude) => Container(
margin: const EdgeInsets.only(left: 2),
width: 4,
decoration: BoxDecoration(
color: theme.colorScheme.primary,
borderRadius:
BorderRadius.circular(AppConfig.borderRadius),
),
height: maxDecibalWidth * (amplitude / 100),
realtimeTranscriptionChannel != null
? Row(
children: [
StreamBuilder(
stream: realtimeTranscriptionChannel!.stream,
builder: (context, snapshot) {
return Text(
snapshot.hasData ? '${snapshot.data}' : '',
);
},
),
)
.toList(),
),
),
const SizedBox(width: 8),
SizedBox(
width: 48,
child: Text(time),
),
],
)
: const Row(),
],
);
if (PlatformInfos.isCupertinoStyle) {
Expand Down
3 changes: 3 additions & 0 deletions lib/pangea/common/network/urls.dart
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,9 @@ class PApiUrls {

static String morphFeaturesAndTags = "${PApiUrls.choreoEndpoint}/morphs";

static String realtimeTranscriptionSession =
"${PApiUrls.choreoEndpoint}/realtime_transcription_session";

///-------------------------------- revenue cat --------------------------
static String rcAppsChoreo = "${PApiUrls.subscriptionEndpoint}/app_ids";
static String rcProductsChoreo =
Expand Down
Empty file.
45 changes: 45 additions & 0 deletions lib/pangea/transcription/transcription_repo.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import 'dart:convert';

import 'package:fluffychat/pages/chat/recording_dialog.dart';
import 'package:fluffychat/pangea/common/config/environment.dart';
import 'package:fluffychat/pangea/common/network/requests.dart';
import 'package:fluffychat/pangea/common/network/urls.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'package:http/http.dart';
import 'package:web_socket_channel/io.dart';

class TranscriptionRepo {
static Future<IOWebSocketChannel> connectTranscriptionChannel() async {
final Requests reqToken = Requests(
choreoApiKey: Environment.choreoApiKey,
accessToken: MatrixState.pangeaController.userController.accessToken,
);

final Response resToken = await reqToken.get(
url: PApiUrls.realtimeTranscriptionSession,
);
final json = jsonDecode(resToken.body);
final String key = json['key'];

final uri = Uri(
scheme: 'wss',
host: 'api.deepgram.com',
path: 'v1/listen',
queryParameters: {
'encoding': 'linear16',
'sample_rate': '${RecordingDialogState.samplingRateTranscription}',
'endpointing': 'false',
// TODO: accept language code as a parameter and set params based on it
// 'language': languageCode,
// 'model': languageCode == 'en' ? 'nova-3' : 'nova-2',
},
);

final channel = IOWebSocketChannel.connect(
uri,
headers: {"Authorization": "Token $key"},
);

return channel;
}
}
Loading