Add audio file caching

This commit is contained in:
Nutcake 2023-05-20 21:09:22 +02:00
parent a8063a74d7
commit c12748de6c
7 changed files with 317 additions and 159 deletions

View file

@ -163,7 +163,7 @@ class RecordApi {
final digests = [voiceDigest]; final digests = [voiceDigest];
final record = Record.fromRequiredData( final record = Record.fromRequiredData(
recordType: RecordType.texture, recordType: RecordType.audio,
userId: client.userId, userId: client.userId,
machineId: machineId, machineId: machineId,
assetUri: voiceDigest.dbUri, assetUri: voiceDigest.dbUri,

View file

@ -130,6 +130,9 @@ class ApiClient {
// TODO: Show the login screen again if cached login was unsuccessful. // TODO: Show the login screen again if cached login was unsuccessful.
throw "You are not authorized to do that. $error"; throw "You are not authorized to do that. $error";
} }
if (response.statusCode == 404) {
throw "Resource not found. $error";
}
if (response.statusCode == 500) { if (response.statusCode == 500) {
throw "Internal server error. $error"; throw "Internal server error. $error";
} }

View file

@ -0,0 +1,24 @@
import 'dart:io';
import 'package:contacts_plus_plus/auxiliary.dart';
import 'package:contacts_plus_plus/clients/api_client.dart';
import 'package:http/http.dart' as http;
import 'package:contacts_plus_plus/models/message.dart';
import 'package:path/path.dart';
import 'package:path_provider/path_provider.dart';
class AudioCacheClient {
final Future<Directory> _directoryFuture = getTemporaryDirectory();
Future<File> cachedNetworkAudioFile(AudioClipContent clip) async {
final directory = await _directoryFuture;
final file = File("${directory.path}/${basename(clip.assetUri)}");
if (!await file.exists()) {
await file.create(recursive: true);
final response = await http.get(Uri.parse(Aux.neosDbToHttp(clip.assetUri)));
ApiClient.checkResponse(response);
await file.writeAsBytes(response.bodyBytes);
}
return file;
}
}

View file

@ -2,10 +2,12 @@ import 'dart:convert';
import 'dart:io' show Platform; import 'dart:io' show Platform;
import 'package:contacts_plus_plus/auxiliary.dart'; import 'package:contacts_plus_plus/auxiliary.dart';
import 'package:contacts_plus_plus/clients/audio_cache_client.dart';
import 'package:contacts_plus_plus/models/message.dart'; import 'package:contacts_plus_plus/models/message.dart';
import 'package:contacts_plus_plus/widgets/messages/message_state_indicator.dart'; import 'package:contacts_plus_plus/widgets/messages/message_state_indicator.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:just_audio/just_audio.dart'; import 'package:just_audio/just_audio.dart';
import 'package:provider/provider.dart';
class MessageAudioPlayer extends StatefulWidget { class MessageAudioPlayer extends StatefulWidget {
const MessageAudioPlayer({required this.message, this.foregroundColor, super.key}); const MessageAudioPlayer({required this.message, this.foregroundColor, super.key});
@ -19,19 +21,13 @@ class MessageAudioPlayer extends StatefulWidget {
class _MessageAudioPlayerState extends State<MessageAudioPlayer> with WidgetsBindingObserver { class _MessageAudioPlayerState extends State<MessageAudioPlayer> with WidgetsBindingObserver {
final AudioPlayer _audioPlayer = AudioPlayer(); final AudioPlayer _audioPlayer = AudioPlayer();
Future? _audioFileFuture;
double _sliderValue = 0; double _sliderValue = 0;
@override @override
void initState() { void initState() {
super.initState(); super.initState();
WidgetsBinding.instance.addObserver(this); WidgetsBinding.instance.addObserver(this);
if (Platform.isAndroid) {
//TODO: Add caching of audio-files
_audioPlayer.setUrl(
Aux.neosDbToHttp(AudioClipContent
.fromMap(jsonDecode(widget.message.content)).assetUri),
preload: true).whenComplete(() => _audioPlayer.setLoopMode(LoopMode.off));
}
} }
@override @override
@ -44,10 +40,9 @@ class _MessageAudioPlayerState extends State<MessageAudioPlayer> with WidgetsBin
@override @override
void didChangeDependencies() { void didChangeDependencies() {
super.didChangeDependencies(); super.didChangeDependencies();
_audioPlayer.setUrl( final audioCache = Provider.of<AudioCacheClient>(context);
Aux.neosDbToHttp(AudioClipContent _audioFileFuture = audioCache.cachedNetworkAudioFile(AudioClipContent.fromMap(jsonDecode(widget.message.content)))
.fromMap(jsonDecode(widget.message.content)).assetUri), .then((value) => _audioPlayer.setFilePath(value.path)).whenComplete(() => _audioPlayer.setLoopMode(LoopMode.off));
preload: true).whenComplete(() => _audioPlayer.setLoopMode(LoopMode.off));
} }
@override @override
@ -90,116 +85,152 @@ class _MessageAudioPlayerState extends State<MessageAudioPlayer> with WidgetsBin
if (!Platform.isAndroid) { if (!Platform.isAndroid) {
return _createErrorWidget("Sorry, audio-messages are not\n supported on this platform."); return _createErrorWidget("Sorry, audio-messages are not\n supported on this platform.");
} }
return IntrinsicWidth( return FutureBuilder(
child: StreamBuilder<PlayerState>( future: _audioFileFuture,
stream: _audioPlayer.playerStateStream, builder: (context, snapshot) {
builder: (context, snapshot) { if (snapshot.hasData) {
if (snapshot.hasData) { return IntrinsicWidth(
final playerState = snapshot.data as PlayerState; child: StreamBuilder<PlayerState>(
return Column( stream: _audioPlayer.playerStateStream,
crossAxisAlignment: CrossAxisAlignment.center, builder: (context, snapshot) {
mainAxisAlignment: MainAxisAlignment.center, if (snapshot.hasData) {
children: [ final playerState = snapshot.data as PlayerState;
Row( return Column(
mainAxisSize: MainAxisSize.max, crossAxisAlignment: CrossAxisAlignment.center,
mainAxisAlignment: MainAxisAlignment.spaceBetween, mainAxisAlignment: MainAxisAlignment.center,
children: [ children: [
IconButton( Row(
onPressed: () { mainAxisSize: MainAxisSize.max,
switch (playerState.processingState) { mainAxisAlignment: MainAxisAlignment.spaceBetween,
case ProcessingState.idle: children: [
case ProcessingState.loading: IconButton(
case ProcessingState.buffering: onPressed: () {
break; switch (playerState.processingState) {
case ProcessingState.ready: case ProcessingState.idle:
if (playerState.playing) { case ProcessingState.loading:
_audioPlayer.pause(); case ProcessingState.buffering:
} else { break;
_audioPlayer.play(); case ProcessingState.ready:
if (playerState.playing) {
_audioPlayer.pause();
} else {
_audioPlayer.play();
}
break;
case ProcessingState.completed:
_audioPlayer.seek(Duration.zero);
_audioPlayer.play();
break;
} }
break; },
case ProcessingState.completed: color: widget.foregroundColor,
_audioPlayer.seek(Duration.zero); icon: SizedBox(
_audioPlayer.play(); width: 24,
break; height: 24,
} child: playerState.processingState == ProcessingState.loading
}, ? const Center(child: CircularProgressIndicator(),)
color: widget.foregroundColor, : Icon(((_audioPlayer.duration ?? Duration.zero) - _audioPlayer.position).inMilliseconds <
icon: SizedBox( 10 ? Icons.replay
width: 24, : (playerState.playing ? Icons.pause : Icons.play_arrow)),
height: 24, ),
child: playerState.processingState == ProcessingState.loading ),
? const Center(child: CircularProgressIndicator(),) StreamBuilder(
: Icon(((_audioPlayer.duration ?? Duration.zero) - _audioPlayer.position).inMilliseconds < stream: _audioPlayer.positionStream,
10 ? Icons.replay builder: (context, snapshot) {
: (playerState.playing ? Icons.pause : Icons.play_arrow)), _sliderValue = _audioPlayer.duration == null ? 0 : (_audioPlayer.position.inMilliseconds /
), (_audioPlayer.duration!.inMilliseconds)).clamp(0, 1);
return StatefulBuilder( // Not sure if this makes sense here...
builder: (context, setState) {
return SliderTheme(
data: SliderThemeData(
inactiveTrackColor: widget.foregroundColor?.withAlpha(100),
),
child: Slider(
thumbColor: widget.foregroundColor,
value: _sliderValue,
min: 0.0,
max: 1.0,
onChanged: (value) async {
_audioPlayer.pause();
setState(() {
_sliderValue = value;
});
_audioPlayer.seek(Duration(
milliseconds: (value * (_audioPlayer.duration?.inMilliseconds ?? 0)).round(),
));
},
),
);
}
);
}
)
],
), ),
StreamBuilder( Row(
stream: _audioPlayer.positionStream, mainAxisSize: MainAxisSize.max,
builder: (context, snapshot) { mainAxisAlignment: MainAxisAlignment.spaceEvenly,
_sliderValue = _audioPlayer.duration == null ? 0 : (_audioPlayer.position.inMilliseconds / children: [
(_audioPlayer.duration!.inMilliseconds)).clamp(0, 1); const SizedBox(width: 4,),
return StatefulBuilder( // Not sure if this makes sense here... StreamBuilder(
builder: (context, setState) { stream: _audioPlayer.positionStream,
return SliderTheme( builder: (context, snapshot) {
data: SliderThemeData( return Text("${snapshot.data?.format() ?? "??"}/${_audioPlayer.duration?.format() ??
inactiveTrackColor: widget.foregroundColor?.withAlpha(100), "??"}",
), style: Theme
child: Slider( .of(context)
thumbColor: widget.foregroundColor, .textTheme
value: _sliderValue, .bodySmall
min: 0.0, ?.copyWith(color: widget.foregroundColor?.withAlpha(150)),
max: 1.0, );
onChanged: (value) async { }
_audioPlayer.pause(); ),
setState(() { const Spacer(),
_sliderValue = value; MessageStateIndicator(message: widget.message, foregroundColor: widget.foregroundColor,),
}); ],
_audioPlayer.seek(Duration(
milliseconds: (value * (_audioPlayer.duration?.inMilliseconds ?? 0)).round(),
));
},
),
);
}
);
}
) )
], ],
);
} else if (snapshot.hasError) {
FlutterError.reportError(FlutterErrorDetails(exception: snapshot.error!, stack: snapshot.stackTrace));
return _createErrorWidget("Failed to load audio-message.");
} else {
return const Center(child: CircularProgressIndicator(),);
}
}
),
);
} else if (snapshot.hasError) {
return SizedBox(
width: 300,
child: Row(
children: [
const Icon(Icons.volume_off),
const SizedBox(width: 8,),
Expanded(
child: Text(
"Failed to load voice message: ${snapshot.error}",
maxLines: 4,
overflow: TextOverflow.ellipsis,
softWrap: true,
), ),
Row( ),
mainAxisSize: MainAxisSize.max, ],
mainAxisAlignment: MainAxisAlignment.spaceEvenly, ),
children: [ );
const SizedBox(width: 4,), } else {
StreamBuilder( return const Padding(
stream: _audioPlayer.positionStream, padding: EdgeInsets.all(8.0),
builder: (context, snapshot) { child: Row(
return Text("${snapshot.data?.format() ?? "??"}/${_audioPlayer.duration?.format() ?? children: [
"??"}", Icon(Icons.volume_up),
style: Theme SizedBox(width: 8,),
.of(context) Center(child: CircularProgressIndicator()),
.textTheme ],
.bodySmall ),
?.copyWith(color: widget.foregroundColor?.withAlpha(150)), );
); }
} }
),
const Spacer(),
MessageStateIndicator(message: widget.message, foregroundColor: widget.foregroundColor,),
],
)
],
);
} else if (snapshot.hasError) {
FlutterError.reportError(FlutterErrorDetails(exception: snapshot.error!, stack: snapshot.stackTrace));
return _createErrorWidget("Failed to load audio-message.");
} else {
return const Center(child: CircularProgressIndicator(),);
}
}
),
); );
} }
} }

View file

@ -15,6 +15,8 @@ class MessageCameraView extends StatefulWidget {
class _MessageCameraViewState extends State<MessageCameraView> { class _MessageCameraViewState extends State<MessageCameraView> {
final List<CameraDescription> _cameras = []; final List<CameraDescription> _cameras = [];
late final CameraController _cameraController; late final CameraController _cameraController;
int _cameraIndex = 0;
FlashMode _flashMode = FlashMode.off;
Future? _initializeControllerFuture; Future? _initializeControllerFuture;
@override @override
@ -23,16 +25,20 @@ class _MessageCameraViewState extends State<MessageCameraView> {
availableCameras().then((List<CameraDescription> cameras) { availableCameras().then((List<CameraDescription> cameras) {
_cameras.clear(); _cameras.clear();
_cameras.addAll(cameras); _cameras.addAll(cameras);
_cameraController = CameraController(cameras.first, ResolutionPreset.high); if (cameras.isEmpty) {
setState(() { _initializeControllerFuture = Future.error("Failed to initialize camera");
_initializeControllerFuture = _cameraController.initialize(); } else {
}); _cameraController = CameraController(cameras.first, ResolutionPreset.high);
_cameraIndex = 0;
_initializeControllerFuture = _cameraController.initialize().whenComplete(() => _cameraController.setFlashMode(_flashMode));
}
setState(() {});
}); });
} }
@override @override
void dispose() { void dispose() {
_cameraController.dispose(); _cameraController.setFlashMode(FlashMode.off).whenComplete(() => _cameraController.dispose());
super.dispose(); super.dispose();
} }
@ -47,25 +53,121 @@ class _MessageCameraViewState extends State<MessageCameraView> {
builder: (context, snapshot) { builder: (context, snapshot) {
// Can't use hasData since the future returns void. // Can't use hasData since the future returns void.
if (snapshot.connectionState == ConnectionState.done) { if (snapshot.connectionState == ConnectionState.done) {
return Column( return Stack(
children: [ children: [
Expanded(child: CameraPreview(_cameraController)), Column(
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [ children: [
IconButton(onPressed: () async { Expanded(child: CameraPreview(_cameraController)),
final sMsgr = ScaffoldMessenger.of(context); Row(
final nav = Navigator.of(context); crossAxisAlignment: CrossAxisAlignment.center,
try { mainAxisAlignment: MainAxisAlignment.spaceEvenly,
await _initializeControllerFuture; children: [
final image = await _cameraController.takePicture(); IconButton(
nav.pop(File(image.path)); onPressed: _cameras.isEmpty ? null : () async {
} catch (e) { setState(() {
sMsgr.showSnackBar(SnackBar(content: Text("Failed to capture image: $e"))); _cameraIndex = (_cameraIndex+1) % _cameras.length;
} });
}, icon: const Icon(Icons.circle_outlined)) _cameraController.setDescription(_cameras[_cameraIndex]);
},
iconSize: 32,
icon: const Icon(Icons.switch_camera),
),
const SizedBox(width: 64, height: 72,),
AnimatedSwitcher(
duration: const Duration(milliseconds: 200),
transitionBuilder: (Widget child, Animation<double> animation) =>
FadeTransition(
opacity: animation,
child: RotationTransition(
turns: Tween<double>(begin: 0.6, end: 1).animate(animation),
child: child,
),
),
child: switch (_flashMode) {
FlashMode.off =>
IconButton(
key: const ValueKey("button-flash-off"),
iconSize: 32,
onPressed: () async {
_flashMode = FlashMode.auto;
await _cameraController.setFlashMode(_flashMode);
setState(() {});
},
icon: const Icon(Icons.flash_off),
),
FlashMode.auto =>
IconButton(
key: const ValueKey("button-flash-auto"),
iconSize: 32,
onPressed: () async {
_flashMode = FlashMode.always;
await _cameraController.setFlashMode(_flashMode);
setState(() {});
},
icon: const Icon(Icons.flash_auto),
),
FlashMode.always =>
IconButton(
key: const ValueKey("button-flash-always"),
iconSize: 32,
onPressed: () async {
_flashMode = FlashMode.torch;
await _cameraController.setFlashMode(_flashMode);
setState(() {});
},
icon: const Icon(Icons.flash_on),
),
FlashMode.torch =>
IconButton(
key: const ValueKey("button-flash-torch"),
iconSize: 32,
onPressed: () async {
_flashMode = FlashMode.off;
await _cameraController.setFlashMode(_flashMode);
setState(() {});
},
icon: const Icon(Icons.flashlight_on),
),
},
),
],
)
], ],
) ),
Align(
alignment: Alignment.bottomCenter,
child: Container(
decoration: BoxDecoration(
color: Theme
.of(context)
.colorScheme
.surface,
borderRadius: BorderRadius.circular(64),
),
margin: const EdgeInsets.all(16),
child: IconButton(
onPressed: () async {
final sMsgr = ScaffoldMessenger.of(context);
final nav = Navigator.of(context);
try {
await _initializeControllerFuture;
final image = await _cameraController.takePicture();
nav.pop(File(image.path));
} catch (e) {
sMsgr.showSnackBar(SnackBar(content: Text("Failed to capture image: $e")));
}
},
style: IconButton.styleFrom(
foregroundColor: Theme
.of(context)
.colorScheme
.primary,
),
icon: const Icon(Icons.camera),
iconSize: 64,
),
),
),
], ],
); );
} else if (snapshot.hasError) { } else if (snapshot.hasError) {
@ -79,5 +181,4 @@ class _MessageCameraViewState extends State<MessageCameraView> {
), ),
); );
} }
} }

View file

@ -33,19 +33,16 @@ class _MessageRecordButtonState extends State<MessageRecordButton> {
return Material( return Material(
child: GestureDetector( child: GestureDetector(
onTapDown: widget.disabled ? null : (_) async { onTapDown: widget.disabled ? null : (_) async {
// TODO: Implement voice message recording
debugPrint("Down");
HapticFeedback.vibrate(); HapticFeedback.vibrate();
widget.onRecordStart?.call(); widget.onRecordStart?.call();
final dir = await getTemporaryDirectory(); final dir = await getTemporaryDirectory();
await _recorder.start( await _recorder.start(
path: "${dir.path}/A-${const Uuid().v4()}.wav", path: "${dir.path}/A-${const Uuid().v4()}.ogg",
encoder: AudioEncoder.wav, encoder: AudioEncoder.opus,
samplingRate: 44100, samplingRate: 44100,
); );
}, },
onTapUp: (_) async { onTapUp: (_) async {
debugPrint("Up");
if (await _recorder.isRecording()) { if (await _recorder.isRecording()) {
final recording = await _recorder.stop(); final recording = await _recorder.stop();
widget.onRecordEnd?.call(recording == null ? null : File(recording)); widget.onRecordEnd?.call(recording == null ? null : File(recording));

View file

@ -1,11 +1,11 @@
import 'dart:convert'; import 'dart:convert';
import 'dart:io'; import 'dart:io';
import 'dart:math';
import 'package:collection/collection.dart'; import 'package:collection/collection.dart';
import 'package:contacts_plus_plus/apis/record_api.dart'; import 'package:contacts_plus_plus/apis/record_api.dart';
import 'package:contacts_plus_plus/client_holder.dart'; import 'package:contacts_plus_plus/client_holder.dart';
import 'package:contacts_plus_plus/clients/api_client.dart'; import 'package:contacts_plus_plus/clients/api_client.dart';
import 'package:contacts_plus_plus/clients/audio_cache_client.dart';
import 'package:contacts_plus_plus/clients/messaging_client.dart'; import 'package:contacts_plus_plus/clients/messaging_client.dart';
import 'package:contacts_plus_plus/models/friend.dart'; import 'package:contacts_plus_plus/models/friend.dart';
import 'package:contacts_plus_plus/models/message.dart'; import 'package:contacts_plus_plus/models/message.dart';
@ -125,7 +125,6 @@ class _MessagesListState extends State<MessagesList> with SingleTickerProviderSt
_hasText = false; _hasText = false;
} }
Future<void> sendVoiceMessage(ApiClient client, MessagingClient mClient, File file, String machineId, Future<void> sendVoiceMessage(ApiClient client, MessagingClient mClient, File file, String machineId,
void Function(double progress) progressCallback) async { void Function(double progress) progressCallback) async {
final record = await RecordApi.uploadVoiceClip( final record = await RecordApi.uploadVoiceClip(
@ -291,20 +290,23 @@ class _MessagesListState extends State<MessagesList> with SingleTickerProviderSt
), ),
); );
} }
return ListView.builder( return Provider(
controller: _messageScrollController, create: (BuildContext context) => AudioCacheClient(),
reverse: true, child: ListView.builder(
itemCount: cache.messages.length, controller: _messageScrollController,
itemBuilder: (context, index) { reverse: true,
final entry = cache.messages[index]; itemCount: cache.messages.length,
if (index == cache.messages.length - 1) { itemBuilder: (context, index) {
return Padding( final entry = cache.messages[index];
padding: const EdgeInsets.only(top: 12), if (index == cache.messages.length - 1) {
child: MessageBubble(message: entry,), return Padding(
); padding: const EdgeInsets.only(top: 12),
} child: MessageBubble(message: entry,),
return MessageBubble(message: entry,); );
}, }
return MessageBubble(message: entry,);
},
),
); );
}, },
), ),