Multiple clips like whats app not working in flatlist
+++++++++++++++++++++++ UserChat.js file+++++++++++++++++++++++
import * as AudioManager from '../Main/AudioManager'; import * as gc from '../../common/GConstants';
import { FontFamily, Fonts } from '../../common/GConstants' import { Image, KeyboardAvoidingView, SafeAreaView, StyleSheet, Text, TextInput, TouchableOpacity, View } from 'react-native' import { RNFFmpegConfig, RNFFprobe } from 'react-native-ffmpeg'; import React, { Component } from 'react' import { asyncStorageKeys, getAsyncData } from '../../api/AsyncstorageFunction'; import { getRendomString, secondsToTime } from '../../common/GFunction'; import { heightPercentageToDP as hp, widthPercentageToDP as wp } from 'react-native-responsive-screen'; import realm, { addMessage, deleteAllMessages, getAllMessages } from "../../LocalDatabase/Database";
import { Alert } from 'react-native'; import { ApiEndPoints } from '../../api/APIManger'; import AudioRecorderPlayer from 'react-native-audio-recorder-player'; import { AutoScrollFlatList } from "react-native-autoscroll-flatlist"; import Colors from '../../common/GColors'; import { EventRegister } from 'react-native-event-listeners'; import GColors from '../../common/GColors'; import GVariable from '../../common/GVariable'; import Images from '../../assets/images/index' import Modals from 'react-native-modal'; import { Platform } from 'react-native'; import Slider from 'react-native-slider'; import { SpotsayAlert } from '../../common/GComponents'; import { UploadAudioManager } from '../../common/UploadAudioManager'; import XamppConnection from '../../ChatConfig/XamppConnection'; import { duration } from 'moment'; import images from '../../assets/images/index'; import { uploadFiles } from 'react-native-fs';
export default class UserChat extends Component { constructor() { super(); this.state = { unMatchModal: false, chatMessage: "", userName: '', chatList: [ { id: 1, voice: true, duration: 20, msg: 'http://localhost:8081/assets/src/assets/audioClip/In_my_free_time.m4a?platform=android&hash=77617fa2192e343ec73f1c4ff138a665' isPlaying: false, }, { id: 2, voice: true, duration: 30, msg: 'http://localhost:8081/assets/src/assets/audioClip/In_my_free_time.m4a?platform=android&hash=77617fa2192e343ec73f1c4ff138a665' isPlaying: false, }, { id: 3, voice: true, duration: 40, msg: 'http://localhost:8081/assets/src/assets/audioClip/In_my_free_time.m4a?platform=android&hash=77617fa2192e343ec73f1c4ff138a665' isPlaying: false, }, { id: 4, voice: true, duration: 20, msg: 'http://localhost:8081/assets/src/assets/audioClip/In_my_free_time.m4a?platform=android&hash=77617fa2192e343ec73f1c4ff138a665' isPlaying: false, }, { id: 5, voice: true, duration: 50, msg: 'http://localhost:8081/assets/src/assets/audioClip/In_my_free_time.m4a?platform=android&hash=77617fa2192e343ec73f1c4ff138a665' isPlaying: false, },
],
playIcon: false,
pauseIcon: false,
containerWidth: 0,
duration: '',
isPlaying: false,
isSeeking: false,
seek: 0,
percent: 0,
isPaused: false,
currentPositionSec: '',
currentDurationSec: '',
}
this.audioRecorderPlayer = new AudioRecorderPlayer();
}
// function to play audio
async playAudio(filePath, index) {
console.log('detail about clip', filePath, index)
await AudioManager.startPlayer(filePath, (res) => {
const { status } = res
switch (status) {
case AudioManager.AUDIO_STATUS.begin: {
console.log('BEGIN AUDIO')
let data = this.state.chatList
data[index].isPlaying = true
this.setState({
chatList: data
}, async () => { })
this.setState({ isPlaying: true });
break
}
case AudioManager.AUDIO_STATUS.play: {
const { currentPosition, duration } = res.data
console.log('PLAY AUDIO', currentPosition)
let percent = Math.round(
(Math.floor(currentPosition) / Math.floor(duration)) * 100,
);
this.setState({
isPlaying: true,
isPaused: false,
currentPositionSec: Math.round(
Math.round(Math.round(currentPosition / 10) / 100),
),
percent: percent
});
break
}
case AudioManager.AUDIO_STATUS.pause: {
console.log('PAUSE AUDIO')
this.setState({ isPaused: true });
break
}
case AudioManager.AUDIO_STATUS.resume: {
console.log('RESUME AUDIO')
this.setState({ isPaused: false })
break
}
case AudioManager.AUDIO_STATUS.stop: {
console.log('STOP AUDIO')
this.setState({ isPlaying: false, isPaused: false })
break
}
}
})
}
// function to pause audio
async pauseAudio() {
await AudioManager.pausePlayer()
}
// function to pause audio
async stopAudio() {
await AudioManager.stopPlayer()
}
onStartPlay = async (item, index) => {
let data = this.state.chatList
for (var i = 0; i < data.length; i++) {
if (index == i) {
if (data[i].isPlaying) {
data[i].isPlaying = false
this.pauseAudio()
} else {
data[i].isPlaying = true
this.playAudio(item.msg)
}
} else {
data[i].isPlaying = false
this.stopAudio()
}
}
this.setState({
chatList: data
}, async () => { })
};
componentWillUnmount = () => {
this.audioRecorderPlayer.stopPlayer();
this.audioRecorderPlayer.removePlayBackListener();
this.setState({
isPlaying: false
});
}
// Life cycle methods
componentDidMount = () => {
}
senderChat = (item, index) => {
if (item.id == odd) {
return (
<View>
{
item.voice == true ?
<View style={Styles.voiceSender}>
<TouchableOpacity activeOpacity={0.5}
onPress={() => {
this.onStartPlay(item, index)
}}
style={{ width: wp(8), aspectRatio: 1, }}>
<Image source={item.isPlaying == false ? images.iconPlayAudio : images.iconPauseAudio2}
style={{ width: '100%', height: '100%', }}
/>
</TouchableOpacity>
<View style={{
alignSelf: 'center',
marginHorizontal: wp(2),
flexDirection: 'row',
justifyContent: 'space-between',
}}>
<View
style={{
borderRadius: 20,
width: wp(45),
alignSelf: 'center',
}}>
<Slider
minimumValue={0}
maximumValue={100}
trackStyle={Styles.track}
thumbStyle={Styles.thumb}
value={item.isPlaying ? this.state.percent : 0}
minimumTrackTintColor={GColors.darkBlue}
onSlidingStart={(value) => {
this.setState({ isPaused: true, isPlaying: false })
this.audioRecorderPlayer.pausePlayer()
console.warn("changingggg automatic time ", value)
}}
onSlidingComplete={(value) => {
console.warn("total automatic time ", item.duration)
console.warn("changingggg automatic time ", value)
let seektime = (((item.duration / 100) * value) * 1000);
console.warn("changingggg automatic second ------- time ", seektime)
this.setState({ isPaused: false, isPlaying: true, percent: value }, () => {
this.audioRecorderPlayer.resumePlayer()
this.audioRecorderPlayer.seekToPlayer(seektime);
})
}}
/>
</View>
</View>
<Text style={{ width: wp(12) }}>{item.isPlaying ? secondsToTime(this.state.currentPositionSec)
.m.toString()
.padStart(2, 0) +
':' +
secondsToTime(this.state.currentPositionSec)
.s.toString()
.padStart(2, 0) : "00:00"}</Text>
</View>
:
<View style={Styles.senderView}>
<Text style={Styles.text} >{item.msg}</Text>
</View>
}
</View>
)
}
else {
return (
<View>
{
item.voice == true ?
<View style={Styles.voiceRecever}>
<TouchableOpacity activeOpacity={0.5}
onPress={() => {
this.onStartPlay(item, index)
}}
style={{ width: wp(8), aspectRatio: 1, marginRight: wp(2) }}>
<Image source={item.isPlaying == false ? images.iconPlayAudio2 : images.iconPauseAudio} style={{ width: '100%', height: '100%', resizeMode: 'contain' }} />
</TouchableOpacity>
<View style={{
alignSelf: 'center',
marginHorizontal: wp(2),
flexDirection: 'row',
justifyContent: 'space-between',
}}>
<View
style={{
borderRadius: 20,
width: wp(45),
alignSelf: 'center',
}}>
<Slider
minimumValue={0}
maximumValue={100}
trackStyle={Styles.track}
thumbStyle={Styles.thumb}
value={item.isPlaying ? this.state.percent : 0}
minimumTrackTintColor={GColors.darkBlue}
onSlidingStart={(value) => {
this.setState({ isPaused: true, isPlaying: false })
this.audioRecorderPlayer.pausePlayer()
console.warn("changingggg automatic time ", value)
}}
onSlidingComplete={(value) => {
console.warn("total automatic time ", item.duration)
console.warn("changingggg automatic time ", value)
let seektime = (((item.duration / 100) * value) * 1000);
console.warn("changingggg automatic second ------- time ", seektime)
this.setState({ isPaused: false, isPlaying: true, percent: value }, () => {
this.audioRecorderPlayer.resumePlayer()
this.audioRecorderPlayer.seekToPlayer(seektime);
})
}}
/>
</View>
</View>
<Text>{item.isPlaying ? secondsToTime(this.state.currentPositionSec)
.m.toString()
.padStart(2, 0) +
':' +
secondsToTime(this.state.currentPositionSec)
.s.toString()
.padStart(2, 0) : "00:00"}
</Text>
</View>
:
<View style={Styles.receverView}>
<Text style={Styles.text} >{item.msg}</Text>
</View>
}
</View>
)
}
};
render() {
return (
<SafeAreaView style={{ flex: 1, backgroundColor: Colors.white }}>
<View style={Styles.container}>
<TouchableOpacity
onPress={() => { this.props.navigation.popToTop(); }}
style={{ flex: 1 }}
>
<Image source={Images.arrowIconS}
style={{ marginHorizontal: wp(4.2666666667) }}
/>
</TouchableOpacity>
<TouchableOpacity
activeOpacity={0.8}
style={Styles.titleView} onPress={() =>
this.props.navigation.navigate("ChatAudioPlay")} >
<Image source={this.props.navigation.getParam('userVrImg')} resizeMode={'contain'} style={Styles.headerUserImg} />
<Text style={Styles.headerUsertext}>{this.state.userName}</Text>
{/* <Text style={Styles.headerUsertext}>{this.props.navigation.getParam('userVrName',)}</Text> */}
</TouchableOpacity>
<TouchableOpacity
onPress={() => this.UnMatchModelClick()}
style={{ flex: 1 }}
>
<Image
source={Images.dotImage}
style={{ marginHorizontal: wp(4.2666666667), tintColor: Colors.black }}
/>
</TouchableOpacity>
</View>
{/* <ScrollView bounces={false}
keyboardShouldPersistTaps="always"
contentContainerStyle={{ flex: 1 }}
> */}
<AutoScrollFlatList
style={{ backgroundColor: Colors.background }}
data={this.state.chatList}
renderItem={({ item, index }) => this.senderChat(item, index)}
showsVerticalScrollIndicator={false}
/>
<KeyboardAvoidingView
behavior={Platform.OS === "ios" ? "padding" : ""}
// keyboardBottomOffset={0}
// keyboardVerticalOffset={-500}
// enabled={false}
>
<View style={Styles.bottomContainer}>
<TextInput
placeholder='Type a Message'
placeholderTextColor={Colors.black}
style={Styles.textInput}
multiline={true}
value={this.state.chatMessage}
onChangeText={(text) => {
this.setState({ chatMessage: text });
}}
/>
<TouchableOpacity activeOpacity={0.5} >
<Text style={{ marginHorizontal: 12 }}>{this.state.chatMessage == '' ? 'GIF' : ''}</Text>
</TouchableOpacity>
<TouchableOpacity activeOpacity={0.5} onPress={() => this.getChatItem()} style={{ width: wp(13), aspectRatio: 1 }}>
<Image source={this.state.chatMessage == '' ? Images.audio : Images.sendIcon} style={{ width: '100%', height: '100%', resizeMode: 'contain' }} />
</TouchableOpacity>
</View>
</KeyboardAvoidingView>
</SafeAreaView >
)
}
const Styles = StyleSheet.create({
container: {
justifyContent: 'space-between',
flexDirection: 'row',
alignItems: 'center',
backgroundColor: Colors.white,
// paddingVertical: wp(1.5),
borderBottomWidth: .2,
borderColor: Colors.DarkBlack,
// elevation: 1,
height: hp(11)
},
titleView: {
flex: 4, alignItems: 'center', justifyContent: 'center',
},
headerUserImg: {
width: hp(5.6),
height: hp(5.6)
},
headerUsertext: {
color: Colors.newChatText,
fontSize: Fonts.fontsize12,
fontFamily: FontFamily.semiBold,
fontWeight: '600'
},
bottomContainer:
{
// height: hp(6.5),
backgroundColor: Colors.white,
marginVertical: hp(1),
marginHorizontal: wp(2),
justifyContent: 'space-between',
flexDirection: 'row',
alignItems: 'center',
borderWidth: .2,
borderColor: Colors.DarkBlack,
borderRadius: 10,
borderRightWidth: 0,
borderTopEndRadius: wp(10),
borderBottomEndRadius: wp(10),
},
textInput:
{
marginStart: wp(5),
flex: 1,
marginEnd: wp(4),
fontSize: Fonts.fontsize15,
color: GColors.black
},
text:
{
fontSize: Fonts.fontsize15,
},
senderView:
{
backgroundColor: Colors.chatSender,
marginBottom: hp(1.5),
paddingVertical: hp(1.5),
paddingHorizontal: wp(2.5),
alignSelf: 'flex-end',
marginEnd: wp(2),
marginStart: wp(7),
borderTopStartRadius: hp(1.5),
borderTopEndRadius: hp(1.5),
borderBottomStartRadius: hp(1.5)
},
voiceRecever: {
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'center',
backgroundColor: Colors.white,
marginBottom: hp(1.5),
paddingVertical: hp(1),
paddingHorizontal: wp(2.5),
alignSelf: 'flex-start',
marginStart: wp(2),
marginEnd: wp(7),
borderBottomEndRadius: hp(1.5),
borderTopEndRadius: hp(1.5),
borderBottomStartRadius: hp(1.5)
},
receverView:
{
backgroundColor: Colors.white,
marginBottom: hp(1.5),
paddingVertical: hp(1.5),
paddingHorizontal: wp(2.5),
alignSelf: 'flex-start',
marginStart: wp(2),
marginEnd: wp(7),
borderBottomEndRadius: hp(1.5),
borderTopEndRadius: hp(1.5),
borderBottomStartRadius: hp(1.5)
},
voiceSender: {
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'center',
backgroundColor: Colors.chatSender,
marginBottom: hp(1.5),
paddingVertical: hp(1),
paddingHorizontal: wp(2.5),
alignSelf: 'flex-end',
marginEnd: wp(2),
width: wp(75),
marginStart: wp(7),
borderTopStartRadius: hp(1.5),
borderTopEndRadius: hp(1.5),
borderBottomStartRadius: hp(1.5)
}, track: {
height: hp(0.6),
borderRadius: 6,
backgroundColor: GColors.gray,
},
thumb: {
width: hp(1),
height: hp(1),
backgroundColor: GColors.darkBlue
},
secondWaveView:
{
marginTop: hp(2),
marginHorizontal: wp(5.5),
flexDirection: 'row',
justifyContent: 'space-between',
},
})
+++++++++++++++++++++++ AudioManager.js file+++++++++++++++++++++++
import AudioRecorderPlayer from 'react-native-audio-recorder-player'
let audioRecorderPlayer = undefined let currentPath = undefined let currentCallback = () => { } let currentPosition = 0
const AUDIO_STATUS = { play: 'play', begin: 'begin', pause: 'pause', resume: 'resume', stop: 'stop', }
async function startPlayer(path, callback) { console.log({ currentPath, path })
if (currentPath === undefined) {
currentPath = path
currentCallback = callback
} else if (currentPath !== path) {
if (audioRecorderPlayer !== undefined) {
try {
await stopPlayer()
} catch (error) {
console.log('ERROR STOP PLAYER TOP')
}
}
currentPath = path
currentCallback = callback
}
if (audioRecorderPlayer === undefined) {
audioRecorderPlayer = new AudioRecorderPlayer()
}
try {
const activePath = await audioRecorderPlayer.startPlayer(currentPath);
console.log({ activePath })
currentCallback({
status: (currentPath === path) && (currentPosition > 0) ? AUDIO_STATUS.resume : AUDIO_STATUS.begin
})
audioRecorderPlayer.addPlayBackListener(async (e) => {
if (e.currentPosition === e.duration) {
try {
await stopPlayer()
} catch (error) {
console.log('ERROR STOP PLAYER IN LISTENER')
}
} else {
currentPosition = e.currentPosition
currentCallback({
status: AUDIO_STATUS.play,
data: e
})
}
return
});
} catch (error) {
console.log({ 'ERROR PLAY PLAYER': error })
}
}
async function pausePlayer() { try { await audioRecorderPlayer.pausePlayer(); currentCallback({ status: AUDIO_STATUS.pause }) } catch (error) { console.log({ 'ERROR PAUSE PLAYER': error }) } }
async function stopPlayer() { const isStop = await audioRecorderPlayer.stopPlayer(); console.log({ isStop }) audioRecorderPlayer.removePlayBackListener() currentPosition = 0 currentCallback({ status: AUDIO_STATUS.stop }) audioRecorderPlayer = undefined } async function seekPlayer(seekTime) { const isSeek = await audioRecorderPlayer.seekPlayer(seekTime); }
export { AUDIO_STATUS, startPlayer, stopPlayer, pausePlayer, seekPlayer }
hi, did u solved this?
i found https://react-native-track-player.js.org/, it's can work
Check out how I achieved it in a WhatsApp clone app I've built.
In more depth, this is the AudioManager util I've created.