mp4parser icon indicating copy to clipboard operation
mp4parser copied to clipboard

replacing audio of a video for a specific time using mp4parser

Open jaydeepmca opened this issue 8 years ago • 4 comments

My video length is 5 second and audio length is 15 second, right now when i merged audio and video using mp4parser then video is making for audio length like 15 second, so 5 second video is display and after stop video and remaining audio track is playing. I want to stop audio after video is completed. Below is my code:

File newFile = new File(Environment.getExternalStorageDirectory(), "/Stakes");
                        if (!newFile.exists()) {
                            newFile.mkdirs();
                        }
                        File tempFile = new File(newFile, "stakes218.mp4");
                        String tempString = tempFile.getAbsolutePath().toString();

                        File audioFile = new File(Environment.getExternalStorageDirectory(), "/SongList");
                        String FILE_PATH = audioFile.getAbsolutePath();
                        String vidPathTemp;

                        vidPathTemp = FILE_PATH + "/" + "songtest.mp3";
                       /* vidPathTemp = FILE_PATH + "/" + "song1.m4a";*/
                        /*vidPathTemp = FILE_PATH + "/" + "pitch"+".mp3";*/


                        File mergedFile = new File(Environment.getExternalStorageDirectory(), "/Stakes_Merged_Video");
                        if (!mergedFile.exists()) {
                            mergedFile.mkdirs();
                        }

                        File audioVideo = new File(mergedFile, "stakes" + randomInteger(0, 500) + ".mp4");

                        String output = audioVideo.getAbsolutePath();

                        Toast.makeText(getApplicationContext(), recordMediaPath, Toast.LENGTH_LONG).show();

                        mux(recordMediaPath, vPath_off1, output);

And here is mux() method which i used for merging mp4 video and m4a audio.

public boolean mux(String videoFile, String audioFile, String outputFile) {
        Movie video;
        try {
            video = new MovieCreator().build(videoFile);
        } catch (RuntimeException e) {
            e.printStackTrace();
            return false;
        } catch (IOException e) {
            e.printStackTrace();
            return false;
        }

        Movie audio;
        try {
            audio = new MovieCreator().build(audioFile);
        } catch (IOException e) {
            e.printStackTrace();
            return false;
        } catch (NullPointerException e) {
            e.printStackTrace();
            return false;
        }
        /*int size = audio.getTracks().size();
        Track audioTrack = audio.getTracks().get((size - 1));*/
        Track audioTrack = audio.getTracks().get(0);

/*        TrimVideo trimVideo = new TrimVideo();
        trimVideo.correctTimeToNextSyncSample(audioTrack,20.0);*/

        video.addTrack(audioTrack);


        Container out = new DefaultMp4Builder().build(video);

        FileOutputStream fos;
        try {
            fos = new FileOutputStream(outputFile);
        } catch (FileNotFoundException e) {
            e.printStackTrace();
            return false;
        }
        BufferedWritableFileByteChannel byteBufferByteChannel = new BufferedWritableFileByteChannel(fos);
        try {
            out.writeContainer(byteBufferByteChannel);
            byteBufferByteChannel.close();
            fos.close();
        } catch (IOException e) {
            e.printStackTrace();
            return false;
        }
        return true;
    }

    private static class BufferedWritableFileByteChannel implements WritableByteChannel {
        private static final int BUFFER_CAPACITY = 1000000;

        private boolean isOpen = true;
        private final OutputStream outputStream;
        private final ByteBuffer byteBuffer;
        private final byte[] rawBuffer = new byte[BUFFER_CAPACITY];

        private BufferedWritableFileByteChannel(OutputStream outputStream) {
            this.outputStream = outputStream;
            this.byteBuffer = ByteBuffer.wrap(rawBuffer);
        }

        @Override
        public int write(ByteBuffer inputBuffer) throws IOException {
            int inputBytes = inputBuffer.remaining();

            if (inputBytes > byteBuffer.remaining()) {
                dumpToFile();
                byteBuffer.clear();

                if (inputBytes > byteBuffer.remaining()) {
                    throw new BufferOverflowException();
                }
            }

            byteBuffer.put(inputBuffer);

            return inputBytes;
        }

        @Override
        public boolean isOpen() {
            return isOpen;
        }

        @Override
        public void close() throws IOException {
            dumpToFile();
            isOpen = false;
        }

        private void dumpToFile() {
            try {
                outputStream.write(rawBuffer, 0, byteBuffer.position());
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    }

jaydeepmca avatar Jun 28 '16 18:06 jaydeepmca

You should use CroppedTrack to crop audio track. For example:

AACTrackImpl audioTrack = new AACTrackImpl(new FileDataSourceImpl(audioPath)); //audioTrack dutation 15s => 5s CroppedTrack croppedTrack = new CroppedTrack(audioTrack,0,audioTrack.getSamples().size()/3);

Here`s my codes:

static void muxAudioVideo(String videoPath,String audioPath,String outputPath){
    try {
        Movie movie = MovieCreator.build(videoPath);
        Movie outMovie = new Movie();
        int videoDuration=0;
        for (Track track:movie.getTracks()){
            if("vide".equals(track.getHandler())){
                outMovie.addTrack(track);
                videoDuration = (int) Math.ceil(trackDuration(track));
            }
        }
        System.out.println("videoDuration:"+videoDuration);

        AACTrackImpl audioTrack = new AACTrackImpl(new FileDataSourceImpl(audioPath));
        int audioDuration = (int) Math.ceil(trackDuration(audioTrack));
        System.out.println("audioDuration:"+audioDuration);

        if(audioDuration<videoDuration){
            double exactlyDuration = (double) videoDuration / audioDuration ;
            int minDuration = (int) Math.floor(exactlyDuration);
            double offsetDuration = exactlyDuration - minDuration ;
            Track[] audioTracks = new Track[minDuration+1];
            for (int i=0;i<minDuration;i++){
                audioTracks[i] = audioTrack ;
            }
            int offsetSize = (int) (audioTrack.getSamples().size()* offsetDuration);
            audioTracks[minDuration] = new CroppedTrack(audioTrack,0,offsetSize);
            outMovie.addTrack(new AppendTrack(audioTracks));
        }else if(audioDuration>videoDuration){
            double offsetDuration = (double) videoDuration / audioDuration ;
            int offsetSize = (int) (audioTrack.getSamples().size()* offsetDuration);
            //audioTrack 15s => 5s
            CroppedTrack croppedTrack = new CroppedTrack(audioTrack,0,audioTrack.getSamples().size()/3);
            outMovie.addTrack(croppedTrack);
        }else {
            outMovie.addTrack(audioTrack);
        }

        Container container = new DefaultMp4Builder().build(outMovie);
        File outFile = new File(outputPath);
        RandomAccessFile randomAccessFile = new RandomAccessFile(outFile,"rw");
        container.writeContainer(randomAccessFile.getChannel());
        randomAccessFile.close();

    } catch (IOException e) {
        e.printStackTrace();
    }
}

liyzay avatar Jul 07 '16 03:07 liyzay

thank you alex,

but i need to stop merged audio with video for length of video. So please guide me about it if you have idea.

Regards, Jaydeep.

On Thu, Jul 7, 2016 at 9:23 AM, Alex [email protected] wrote:

You should use CroppedTrack to crop audio track. For example:

AACTrackImpl audioTrack = new AACTrackImpl(new FileDataSourceImpl(audioPath)); //audioTrack dutation 15s => 5s CroppedTrack croppedTrack = new CroppedTrack(audioTrack,0,audioTrack.getSamples().size()/3);

Here`s my codes:

static void muxAudioVideo(String videoPath,String audioPath,String outputPath){ try { Movie movie = MovieCreator.build(videoPath); Movie outMovie = new Movie(); int videoDuration=0; for (Track track:movie.getTracks()){ if("vide".equals(track.getHandler())){ outMovie.addTrack(track); videoDuration = (int) Math.ceil(trackDuration(track)); } } System.out.println("videoDuration:"+videoDuration);

    AACTrackImpl audioTrack = new AACTrackImpl(new FileDataSourceImpl(audioPath));
    int audioDuration = (int) Math.ceil(trackDuration(audioTrack));
    System.out.println("audioDuration:"+audioDuration);

    if(audioDuration<videoDuration){
        double exactlyDuration = (double) videoDuration / audioDuration ;
        int minDuration = (int) Math.floor(exactlyDuration);
        double offsetDuration = exactlyDuration - minDuration ;
        Track[] audioTracks = new Track[minDuration+1];
        for (int i=0;i<minDuration;i++){
            audioTracks[i] = audioTrack ;
        }
        int offsetSize = (int) (audioTrack.getSamples().size()* offsetDuration);
        audioTracks[minDuration] = new CroppedTrack(audioTrack,0,offsetSize);
        outMovie.addTrack(new AppendTrack(audioTracks));
    }else if(audioDuration>videoDuration){
        double offsetDuration = (double) videoDuration / audioDuration ;
        int offsetSize = (int) (audioTrack.getSamples().size()* offsetDuration);
        //audioTrack 15s => 5s
        CroppedTrack croppedTrack = new CroppedTrack(audioTrack,0,audioTrack.getSamples().size()/3);
        outMovie.addTrack(croppedTrack);
    }else {
        outMovie.addTrack(audioTrack);
    }

    Container container = new DefaultMp4Builder().build(outMovie);
    File outFile = new File(outputPath);
    RandomAccessFile randomAccessFile = new RandomAccessFile(outFile,"rw");
    container.writeContainer(randomAccessFile.getChannel());
    randomAccessFile.close();

} catch (IOException e) {
    e.printStackTrace();
}

}

— You are receiving this because you authored the thread. Reply to this email directly, view it on GitHub https://github.com/sannies/mp4parser/issues/209#issuecomment-230972707, or mute the thread https://github.com/notifications/unsubscribe/ADyYtWlqcQOo2aFgLGE4r1sLglIfcHKAks5qTHgigaJpZM4JAa77 .

jaydeepmca avatar Jul 07 '16 04:07 jaydeepmca

Did you find the solution Jaydeep?

myarnav avatar May 08 '17 17:05 myarnav

        try {


            Movie orig_movie = MovieCreator.build(videoInput);

            File audioFile = new File(audioPath);

            AACTrackImpl aacTrack = new AACTrackImpl(new FileDataSourceImpl(audioFile));

//get duration of video
            IsoFile isoFile = new IsoFile(videoPath);
            double lengthInSeconds = (double)
                    isoFile.getMovieBox().getMovieHeaderBox().getDuration() /
                    isoFile.getMovieBox().getMovieHeaderBox().getTimescale();


            Track track = (Track) orig_movie.getTracks().get(0);

            Track audioTrack = (Track) aacTrack;


            double startTime1 = 0;
            double endTime1 = lengthInSeconds;

            boolean timeCorrected = false;

            if (audioTrack.getSyncSamples() != null && audioTrack.getSyncSamples().length > 0) {
                if (timeCorrected) {

                    throw new RuntimeException("The startTime has already been corrected by another track with SyncSample. Not Supported.");
                }
                startTime1 = correctTimeToSyncSample(audioTrack, startTime1, false);
                endTime1 = correctTimeToSyncSample(audioTrack, endTime1, true);
                timeCorrected = true;
            }

            long currentSample = 0;
            double currentTime = 0;
            double lastTime = -1;
            long startSample1 = -1;
            long endSample1 = -1;


            for (int i = 0; i < audioTrack.getSampleDurations().length; i++) {
                long delta = audioTrack.getSampleDurations()[i];


                if (currentTime > lastTime && currentTime <= startTime1) {
                    // current sample is still before the new starttime
                    startSample1 = currentSample;
                }
                if (currentTime > lastTime && currentTime <= endTime1) {
                    // current sample is after the new start time and still before the new endtime
                    endSample1 = currentSample;
                }

                lastTime = currentTime;
                currentTime += (double) delta / (double) audioTrack.getTrackMetaData().getTimescale();
                currentSample++;
            }

            CroppedTrack cropperAacTrack = new CroppedTrack(aacTrack, startSample1, endSample1);

            Movie movie = new Movie();

            movie.addTrack(track);
            movie.addTrack(cropperAacTrack);

            Container mp4file = new DefaultMp4Builder().build(movie);

            FileChannel fc = new FileOutputStream(new File(output)).getChannel();
            mp4file.writeContainer(fc);
            fc.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
}
   private static double correctTimeToSyncSample(Track track, double cutHere, boolean next) {
        double[] timeOfSyncSamples = new double[track.getSyncSamples().length];
        long currentSample = 0;
        double currentTime = 0;
        for (int i = 0; i < track.getSampleDurations().length; i++) {
            long delta = track.getSampleDurations()[i];

            if (Arrays.binarySearch(track.getSyncSamples(), currentSample + 1) >= 0) {
                timeOfSyncSamples[Arrays.binarySearch(track.getSyncSamples(), currentSample + 1)] = currentTime;
            }
            currentTime += (double) delta / (double) track.getTrackMetaData().getTimescale();
            currentSample++;

        }
        double previous = 0;
        for (double timeOfSyncSample : timeOfSyncSamples) {
            if (timeOfSyncSample > cutHere) {
                if (next) {
                    return timeOfSyncSample;
                } else {
                    return previous;
                }
            }
            previous = timeOfSyncSample;
        }
        return timeOfSyncSamples[timeOfSyncSamples.length - 1];
    }

@myarnav

kunall17 avatar May 09 '19 13:05 kunall17