Add audio file caching
This commit is contained in:
parent
a8063a74d7
commit
c12748de6c
7 changed files with 317 additions and 159 deletions
|
@ -163,7 +163,7 @@ class RecordApi {
|
|||
final digests = [voiceDigest];
|
||||
|
||||
final record = Record.fromRequiredData(
|
||||
recordType: RecordType.texture,
|
||||
recordType: RecordType.audio,
|
||||
userId: client.userId,
|
||||
machineId: machineId,
|
||||
assetUri: voiceDigest.dbUri,
|
||||
|
|
|
@ -130,6 +130,9 @@ class ApiClient {
|
|||
// TODO: Show the login screen again if cached login was unsuccessful.
|
||||
throw "You are not authorized to do that. $error";
|
||||
}
|
||||
if (response.statusCode == 404) {
|
||||
throw "Resource not found. $error";
|
||||
}
|
||||
if (response.statusCode == 500) {
|
||||
throw "Internal server error. $error";
|
||||
}
|
||||
|
|
24
lib/clients/audio_cache_client.dart
Normal file
24
lib/clients/audio_cache_client.dart
Normal file
|
@ -0,0 +1,24 @@
|
|||
import 'dart:io';
|
||||
|
||||
import 'package:contacts_plus_plus/auxiliary.dart';
|
||||
import 'package:contacts_plus_plus/clients/api_client.dart';
|
||||
import 'package:http/http.dart' as http;
|
||||
import 'package:contacts_plus_plus/models/message.dart';
|
||||
import 'package:path/path.dart';
|
||||
import 'package:path_provider/path_provider.dart';
|
||||
|
||||
class AudioCacheClient {
|
||||
final Future<Directory> _directoryFuture = getTemporaryDirectory();
|
||||
|
||||
Future<File> cachedNetworkAudioFile(AudioClipContent clip) async {
|
||||
final directory = await _directoryFuture;
|
||||
final file = File("${directory.path}/${basename(clip.assetUri)}");
|
||||
if (!await file.exists()) {
|
||||
await file.create(recursive: true);
|
||||
final response = await http.get(Uri.parse(Aux.neosDbToHttp(clip.assetUri)));
|
||||
ApiClient.checkResponse(response);
|
||||
await file.writeAsBytes(response.bodyBytes);
|
||||
}
|
||||
return file;
|
||||
}
|
||||
}
|
|
@ -2,10 +2,12 @@ import 'dart:convert';
|
|||
import 'dart:io' show Platform;
|
||||
|
||||
import 'package:contacts_plus_plus/auxiliary.dart';
|
||||
import 'package:contacts_plus_plus/clients/audio_cache_client.dart';
|
||||
import 'package:contacts_plus_plus/models/message.dart';
|
||||
import 'package:contacts_plus_plus/widgets/messages/message_state_indicator.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:just_audio/just_audio.dart';
|
||||
import 'package:provider/provider.dart';
|
||||
|
||||
class MessageAudioPlayer extends StatefulWidget {
|
||||
const MessageAudioPlayer({required this.message, this.foregroundColor, super.key});
|
||||
|
@ -19,19 +21,13 @@ class MessageAudioPlayer extends StatefulWidget {
|
|||
|
||||
class _MessageAudioPlayerState extends State<MessageAudioPlayer> with WidgetsBindingObserver {
|
||||
final AudioPlayer _audioPlayer = AudioPlayer();
|
||||
Future? _audioFileFuture;
|
||||
double _sliderValue = 0;
|
||||
|
||||
@override
|
||||
void initState() {
|
||||
super.initState();
|
||||
WidgetsBinding.instance.addObserver(this);
|
||||
if (Platform.isAndroid) {
|
||||
//TODO: Add caching of audio-files
|
||||
_audioPlayer.setUrl(
|
||||
Aux.neosDbToHttp(AudioClipContent
|
||||
.fromMap(jsonDecode(widget.message.content)).assetUri),
|
||||
preload: true).whenComplete(() => _audioPlayer.setLoopMode(LoopMode.off));
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -44,10 +40,9 @@ class _MessageAudioPlayerState extends State<MessageAudioPlayer> with WidgetsBin
|
|||
@override
|
||||
void didChangeDependencies() {
|
||||
super.didChangeDependencies();
|
||||
_audioPlayer.setUrl(
|
||||
Aux.neosDbToHttp(AudioClipContent
|
||||
.fromMap(jsonDecode(widget.message.content)).assetUri),
|
||||
preload: true).whenComplete(() => _audioPlayer.setLoopMode(LoopMode.off));
|
||||
final audioCache = Provider.of<AudioCacheClient>(context);
|
||||
_audioFileFuture = audioCache.cachedNetworkAudioFile(AudioClipContent.fromMap(jsonDecode(widget.message.content)))
|
||||
.then((value) => _audioPlayer.setFilePath(value.path)).whenComplete(() => _audioPlayer.setLoopMode(LoopMode.off));
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -90,116 +85,152 @@ class _MessageAudioPlayerState extends State<MessageAudioPlayer> with WidgetsBin
|
|||
if (!Platform.isAndroid) {
|
||||
return _createErrorWidget("Sorry, audio-messages are not\n supported on this platform.");
|
||||
}
|
||||
return IntrinsicWidth(
|
||||
child: StreamBuilder<PlayerState>(
|
||||
stream: _audioPlayer.playerStateStream,
|
||||
builder: (context, snapshot) {
|
||||
if (snapshot.hasData) {
|
||||
final playerState = snapshot.data as PlayerState;
|
||||
return Column(
|
||||
crossAxisAlignment: CrossAxisAlignment.center,
|
||||
mainAxisAlignment: MainAxisAlignment.center,
|
||||
children: [
|
||||
Row(
|
||||
mainAxisSize: MainAxisSize.max,
|
||||
mainAxisAlignment: MainAxisAlignment.spaceBetween,
|
||||
return FutureBuilder(
|
||||
future: _audioFileFuture,
|
||||
builder: (context, snapshot) {
|
||||
if (snapshot.hasData) {
|
||||
return IntrinsicWidth(
|
||||
child: StreamBuilder<PlayerState>(
|
||||
stream: _audioPlayer.playerStateStream,
|
||||
builder: (context, snapshot) {
|
||||
if (snapshot.hasData) {
|
||||
final playerState = snapshot.data as PlayerState;
|
||||
return Column(
|
||||
crossAxisAlignment: CrossAxisAlignment.center,
|
||||
mainAxisAlignment: MainAxisAlignment.center,
|
||||
children: [
|
||||
IconButton(
|
||||
onPressed: () {
|
||||
switch (playerState.processingState) {
|
||||
case ProcessingState.idle:
|
||||
case ProcessingState.loading:
|
||||
case ProcessingState.buffering:
|
||||
break;
|
||||
case ProcessingState.ready:
|
||||
if (playerState.playing) {
|
||||
_audioPlayer.pause();
|
||||
} else {
|
||||
_audioPlayer.play();
|
||||
Row(
|
||||
mainAxisSize: MainAxisSize.max,
|
||||
mainAxisAlignment: MainAxisAlignment.spaceBetween,
|
||||
children: [
|
||||
IconButton(
|
||||
onPressed: () {
|
||||
switch (playerState.processingState) {
|
||||
case ProcessingState.idle:
|
||||
case ProcessingState.loading:
|
||||
case ProcessingState.buffering:
|
||||
break;
|
||||
case ProcessingState.ready:
|
||||
if (playerState.playing) {
|
||||
_audioPlayer.pause();
|
||||
} else {
|
||||
_audioPlayer.play();
|
||||
}
|
||||
break;
|
||||
case ProcessingState.completed:
|
||||
_audioPlayer.seek(Duration.zero);
|
||||
_audioPlayer.play();
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case ProcessingState.completed:
|
||||
_audioPlayer.seek(Duration.zero);
|
||||
_audioPlayer.play();
|
||||
break;
|
||||
}
|
||||
},
|
||||
color: widget.foregroundColor,
|
||||
icon: SizedBox(
|
||||
width: 24,
|
||||
height: 24,
|
||||
child: playerState.processingState == ProcessingState.loading
|
||||
? const Center(child: CircularProgressIndicator(),)
|
||||
: Icon(((_audioPlayer.duration ?? Duration.zero) - _audioPlayer.position).inMilliseconds <
|
||||
10 ? Icons.replay
|
||||
: (playerState.playing ? Icons.pause : Icons.play_arrow)),
|
||||
),
|
||||
},
|
||||
color: widget.foregroundColor,
|
||||
icon: SizedBox(
|
||||
width: 24,
|
||||
height: 24,
|
||||
child: playerState.processingState == ProcessingState.loading
|
||||
? const Center(child: CircularProgressIndicator(),)
|
||||
: Icon(((_audioPlayer.duration ?? Duration.zero) - _audioPlayer.position).inMilliseconds <
|
||||
10 ? Icons.replay
|
||||
: (playerState.playing ? Icons.pause : Icons.play_arrow)),
|
||||
),
|
||||
),
|
||||
StreamBuilder(
|
||||
stream: _audioPlayer.positionStream,
|
||||
builder: (context, snapshot) {
|
||||
_sliderValue = _audioPlayer.duration == null ? 0 : (_audioPlayer.position.inMilliseconds /
|
||||
(_audioPlayer.duration!.inMilliseconds)).clamp(0, 1);
|
||||
return StatefulBuilder( // Not sure if this makes sense here...
|
||||
builder: (context, setState) {
|
||||
return SliderTheme(
|
||||
data: SliderThemeData(
|
||||
inactiveTrackColor: widget.foregroundColor?.withAlpha(100),
|
||||
),
|
||||
child: Slider(
|
||||
thumbColor: widget.foregroundColor,
|
||||
value: _sliderValue,
|
||||
min: 0.0,
|
||||
max: 1.0,
|
||||
onChanged: (value) async {
|
||||
_audioPlayer.pause();
|
||||
setState(() {
|
||||
_sliderValue = value;
|
||||
});
|
||||
_audioPlayer.seek(Duration(
|
||||
milliseconds: (value * (_audioPlayer.duration?.inMilliseconds ?? 0)).round(),
|
||||
));
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
)
|
||||
],
|
||||
),
|
||||
StreamBuilder(
|
||||
stream: _audioPlayer.positionStream,
|
||||
builder: (context, snapshot) {
|
||||
_sliderValue = _audioPlayer.duration == null ? 0 : (_audioPlayer.position.inMilliseconds /
|
||||
(_audioPlayer.duration!.inMilliseconds)).clamp(0, 1);
|
||||
return StatefulBuilder( // Not sure if this makes sense here...
|
||||
builder: (context, setState) {
|
||||
return SliderTheme(
|
||||
data: SliderThemeData(
|
||||
inactiveTrackColor: widget.foregroundColor?.withAlpha(100),
|
||||
),
|
||||
child: Slider(
|
||||
thumbColor: widget.foregroundColor,
|
||||
value: _sliderValue,
|
||||
min: 0.0,
|
||||
max: 1.0,
|
||||
onChanged: (value) async {
|
||||
_audioPlayer.pause();
|
||||
setState(() {
|
||||
_sliderValue = value;
|
||||
});
|
||||
_audioPlayer.seek(Duration(
|
||||
milliseconds: (value * (_audioPlayer.duration?.inMilliseconds ?? 0)).round(),
|
||||
));
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
Row(
|
||||
mainAxisSize: MainAxisSize.max,
|
||||
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
|
||||
children: [
|
||||
const SizedBox(width: 4,),
|
||||
StreamBuilder(
|
||||
stream: _audioPlayer.positionStream,
|
||||
builder: (context, snapshot) {
|
||||
return Text("${snapshot.data?.format() ?? "??"}/${_audioPlayer.duration?.format() ??
|
||||
"??"}",
|
||||
style: Theme
|
||||
.of(context)
|
||||
.textTheme
|
||||
.bodySmall
|
||||
?.copyWith(color: widget.foregroundColor?.withAlpha(150)),
|
||||
);
|
||||
}
|
||||
),
|
||||
const Spacer(),
|
||||
MessageStateIndicator(message: widget.message, foregroundColor: widget.foregroundColor,),
|
||||
],
|
||||
)
|
||||
],
|
||||
);
|
||||
} else if (snapshot.hasError) {
|
||||
FlutterError.reportError(FlutterErrorDetails(exception: snapshot.error!, stack: snapshot.stackTrace));
|
||||
return _createErrorWidget("Failed to load audio-message.");
|
||||
} else {
|
||||
return const Center(child: CircularProgressIndicator(),);
|
||||
}
|
||||
}
|
||||
),
|
||||
);
|
||||
} else if (snapshot.hasError) {
|
||||
return SizedBox(
|
||||
width: 300,
|
||||
child: Row(
|
||||
children: [
|
||||
const Icon(Icons.volume_off),
|
||||
const SizedBox(width: 8,),
|
||||
Expanded(
|
||||
child: Text(
|
||||
"Failed to load voice message: ${snapshot.error}",
|
||||
maxLines: 4,
|
||||
overflow: TextOverflow.ellipsis,
|
||||
softWrap: true,
|
||||
),
|
||||
Row(
|
||||
mainAxisSize: MainAxisSize.max,
|
||||
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
|
||||
children: [
|
||||
const SizedBox(width: 4,),
|
||||
StreamBuilder(
|
||||
stream: _audioPlayer.positionStream,
|
||||
builder: (context, snapshot) {
|
||||
return Text("${snapshot.data?.format() ?? "??"}/${_audioPlayer.duration?.format() ??
|
||||
"??"}",
|
||||
style: Theme
|
||||
.of(context)
|
||||
.textTheme
|
||||
.bodySmall
|
||||
?.copyWith(color: widget.foregroundColor?.withAlpha(150)),
|
||||
);
|
||||
}
|
||||
),
|
||||
const Spacer(),
|
||||
MessageStateIndicator(message: widget.message, foregroundColor: widget.foregroundColor,),
|
||||
],
|
||||
)
|
||||
],
|
||||
);
|
||||
} else if (snapshot.hasError) {
|
||||
FlutterError.reportError(FlutterErrorDetails(exception: snapshot.error!, stack: snapshot.stackTrace));
|
||||
return _createErrorWidget("Failed to load audio-message.");
|
||||
} else {
|
||||
return const Center(child: CircularProgressIndicator(),);
|
||||
}
|
||||
}
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
);
|
||||
} else {
|
||||
return const Padding(
|
||||
padding: EdgeInsets.all(8.0),
|
||||
child: Row(
|
||||
children: [
|
||||
Icon(Icons.volume_up),
|
||||
SizedBox(width: 8,),
|
||||
Center(child: CircularProgressIndicator()),
|
||||
],
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
|
@ -15,6 +15,8 @@ class MessageCameraView extends StatefulWidget {
|
|||
class _MessageCameraViewState extends State<MessageCameraView> {
|
||||
final List<CameraDescription> _cameras = [];
|
||||
late final CameraController _cameraController;
|
||||
int _cameraIndex = 0;
|
||||
FlashMode _flashMode = FlashMode.off;
|
||||
Future? _initializeControllerFuture;
|
||||
|
||||
@override
|
||||
|
@ -23,16 +25,20 @@ class _MessageCameraViewState extends State<MessageCameraView> {
|
|||
availableCameras().then((List<CameraDescription> cameras) {
|
||||
_cameras.clear();
|
||||
_cameras.addAll(cameras);
|
||||
_cameraController = CameraController(cameras.first, ResolutionPreset.high);
|
||||
setState(() {
|
||||
_initializeControllerFuture = _cameraController.initialize();
|
||||
});
|
||||
if (cameras.isEmpty) {
|
||||
_initializeControllerFuture = Future.error("Failed to initialize camera");
|
||||
} else {
|
||||
_cameraController = CameraController(cameras.first, ResolutionPreset.high);
|
||||
_cameraIndex = 0;
|
||||
_initializeControllerFuture = _cameraController.initialize().whenComplete(() => _cameraController.setFlashMode(_flashMode));
|
||||
}
|
||||
setState(() {});
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
_cameraController.dispose();
|
||||
_cameraController.setFlashMode(FlashMode.off).whenComplete(() => _cameraController.dispose());
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
|
@ -47,25 +53,121 @@ class _MessageCameraViewState extends State<MessageCameraView> {
|
|||
builder: (context, snapshot) {
|
||||
// Can't use hasData since the future returns void.
|
||||
if (snapshot.connectionState == ConnectionState.done) {
|
||||
return Column(
|
||||
return Stack(
|
||||
children: [
|
||||
Expanded(child: CameraPreview(_cameraController)),
|
||||
Row(
|
||||
mainAxisAlignment: MainAxisAlignment.center,
|
||||
Column(
|
||||
children: [
|
||||
IconButton(onPressed: () async {
|
||||
final sMsgr = ScaffoldMessenger.of(context);
|
||||
final nav = Navigator.of(context);
|
||||
try {
|
||||
await _initializeControllerFuture;
|
||||
final image = await _cameraController.takePicture();
|
||||
nav.pop(File(image.path));
|
||||
} catch (e) {
|
||||
sMsgr.showSnackBar(SnackBar(content: Text("Failed to capture image: $e")));
|
||||
}
|
||||
}, icon: const Icon(Icons.circle_outlined))
|
||||
Expanded(child: CameraPreview(_cameraController)),
|
||||
Row(
|
||||
crossAxisAlignment: CrossAxisAlignment.center,
|
||||
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
|
||||
children: [
|
||||
IconButton(
|
||||
onPressed: _cameras.isEmpty ? null : () async {
|
||||
setState(() {
|
||||
_cameraIndex = (_cameraIndex+1) % _cameras.length;
|
||||
});
|
||||
_cameraController.setDescription(_cameras[_cameraIndex]);
|
||||
},
|
||||
iconSize: 32,
|
||||
icon: const Icon(Icons.switch_camera),
|
||||
),
|
||||
const SizedBox(width: 64, height: 72,),
|
||||
AnimatedSwitcher(
|
||||
duration: const Duration(milliseconds: 200),
|
||||
transitionBuilder: (Widget child, Animation<double> animation) =>
|
||||
FadeTransition(
|
||||
opacity: animation,
|
||||
child: RotationTransition(
|
||||
turns: Tween<double>(begin: 0.6, end: 1).animate(animation),
|
||||
child: child,
|
||||
),
|
||||
),
|
||||
child: switch (_flashMode) {
|
||||
FlashMode.off =>
|
||||
IconButton(
|
||||
key: const ValueKey("button-flash-off"),
|
||||
iconSize: 32,
|
||||
onPressed: () async {
|
||||
_flashMode = FlashMode.auto;
|
||||
await _cameraController.setFlashMode(_flashMode);
|
||||
setState(() {});
|
||||
},
|
||||
icon: const Icon(Icons.flash_off),
|
||||
),
|
||||
FlashMode.auto =>
|
||||
IconButton(
|
||||
key: const ValueKey("button-flash-auto"),
|
||||
iconSize: 32,
|
||||
onPressed: () async {
|
||||
_flashMode = FlashMode.always;
|
||||
await _cameraController.setFlashMode(_flashMode);
|
||||
setState(() {});
|
||||
},
|
||||
icon: const Icon(Icons.flash_auto),
|
||||
),
|
||||
FlashMode.always =>
|
||||
IconButton(
|
||||
key: const ValueKey("button-flash-always"),
|
||||
iconSize: 32,
|
||||
onPressed: () async {
|
||||
_flashMode = FlashMode.torch;
|
||||
await _cameraController.setFlashMode(_flashMode);
|
||||
setState(() {});
|
||||
},
|
||||
icon: const Icon(Icons.flash_on),
|
||||
),
|
||||
FlashMode.torch =>
|
||||
IconButton(
|
||||
key: const ValueKey("button-flash-torch"),
|
||||
iconSize: 32,
|
||||
onPressed: () async {
|
||||
_flashMode = FlashMode.off;
|
||||
await _cameraController.setFlashMode(_flashMode);
|
||||
setState(() {});
|
||||
},
|
||||
icon: const Icon(Icons.flashlight_on),
|
||||
),
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
],
|
||||
)
|
||||
),
|
||||
Align(
|
||||
alignment: Alignment.bottomCenter,
|
||||
child: Container(
|
||||
decoration: BoxDecoration(
|
||||
color: Theme
|
||||
.of(context)
|
||||
.colorScheme
|
||||
.surface,
|
||||
borderRadius: BorderRadius.circular(64),
|
||||
),
|
||||
margin: const EdgeInsets.all(16),
|
||||
child: IconButton(
|
||||
onPressed: () async {
|
||||
final sMsgr = ScaffoldMessenger.of(context);
|
||||
final nav = Navigator.of(context);
|
||||
try {
|
||||
await _initializeControllerFuture;
|
||||
final image = await _cameraController.takePicture();
|
||||
nav.pop(File(image.path));
|
||||
} catch (e) {
|
||||
sMsgr.showSnackBar(SnackBar(content: Text("Failed to capture image: $e")));
|
||||
}
|
||||
},
|
||||
style: IconButton.styleFrom(
|
||||
foregroundColor: Theme
|
||||
.of(context)
|
||||
.colorScheme
|
||||
.primary,
|
||||
),
|
||||
icon: const Icon(Icons.camera),
|
||||
iconSize: 64,
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
);
|
||||
} else if (snapshot.hasError) {
|
||||
|
@ -79,5 +181,4 @@ class _MessageCameraViewState extends State<MessageCameraView> {
|
|||
),
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -33,19 +33,16 @@ class _MessageRecordButtonState extends State<MessageRecordButton> {
|
|||
return Material(
|
||||
child: GestureDetector(
|
||||
onTapDown: widget.disabled ? null : (_) async {
|
||||
// TODO: Implement voice message recording
|
||||
debugPrint("Down");
|
||||
HapticFeedback.vibrate();
|
||||
widget.onRecordStart?.call();
|
||||
final dir = await getTemporaryDirectory();
|
||||
await _recorder.start(
|
||||
path: "${dir.path}/A-${const Uuid().v4()}.wav",
|
||||
encoder: AudioEncoder.wav,
|
||||
path: "${dir.path}/A-${const Uuid().v4()}.ogg",
|
||||
encoder: AudioEncoder.opus,
|
||||
samplingRate: 44100,
|
||||
);
|
||||
},
|
||||
onTapUp: (_) async {
|
||||
debugPrint("Up");
|
||||
if (await _recorder.isRecording()) {
|
||||
final recording = await _recorder.stop();
|
||||
widget.onRecordEnd?.call(recording == null ? null : File(recording));
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import 'dart:convert';
|
||||
import 'dart:io';
|
||||
import 'dart:math';
|
||||
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:contacts_plus_plus/apis/record_api.dart';
|
||||
import 'package:contacts_plus_plus/client_holder.dart';
|
||||
import 'package:contacts_plus_plus/clients/api_client.dart';
|
||||
import 'package:contacts_plus_plus/clients/audio_cache_client.dart';
|
||||
import 'package:contacts_plus_plus/clients/messaging_client.dart';
|
||||
import 'package:contacts_plus_plus/models/friend.dart';
|
||||
import 'package:contacts_plus_plus/models/message.dart';
|
||||
|
@ -125,7 +125,6 @@ class _MessagesListState extends State<MessagesList> with SingleTickerProviderSt
|
|||
_hasText = false;
|
||||
}
|
||||
|
||||
|
||||
Future<void> sendVoiceMessage(ApiClient client, MessagingClient mClient, File file, String machineId,
|
||||
void Function(double progress) progressCallback) async {
|
||||
final record = await RecordApi.uploadVoiceClip(
|
||||
|
@ -291,20 +290,23 @@ class _MessagesListState extends State<MessagesList> with SingleTickerProviderSt
|
|||
),
|
||||
);
|
||||
}
|
||||
return ListView.builder(
|
||||
controller: _messageScrollController,
|
||||
reverse: true,
|
||||
itemCount: cache.messages.length,
|
||||
itemBuilder: (context, index) {
|
||||
final entry = cache.messages[index];
|
||||
if (index == cache.messages.length - 1) {
|
||||
return Padding(
|
||||
padding: const EdgeInsets.only(top: 12),
|
||||
child: MessageBubble(message: entry,),
|
||||
);
|
||||
}
|
||||
return MessageBubble(message: entry,);
|
||||
},
|
||||
return Provider(
|
||||
create: (BuildContext context) => AudioCacheClient(),
|
||||
child: ListView.builder(
|
||||
controller: _messageScrollController,
|
||||
reverse: true,
|
||||
itemCount: cache.messages.length,
|
||||
itemBuilder: (context, index) {
|
||||
final entry = cache.messages[index];
|
||||
if (index == cache.messages.length - 1) {
|
||||
return Padding(
|
||||
padding: const EdgeInsets.only(top: 12),
|
||||
child: MessageBubble(message: entry,),
|
||||
);
|
||||
}
|
||||
return MessageBubble(message: entry,);
|
||||
},
|
||||
),
|
||||
);
|
||||
},
|
||||
),
|
||||
|
|
Loading…
Reference in a new issue