最近项目有个需求要对录制的视频进行分割,查了很多资料,看到ffmpeg可以对视频进行分割。上网找到别人基于android的开源ffmpeg,终于编译成功ffmpeg.so。但是要使用的话还要查ffmpeg的api,并且写jni的调用接口,非常麻烦。偶然情况下发现了开源软件mp4parser: 一款非常棒的开源软件,可以对视频进行分割、组合等操作,而且使用起来非常简单。通过svn对其下载后可以看到里面带着视频分割的例子,但是是用java实现,将其稍微修改一下就可以用在Android上了。
首先将例子中的代码修改为一个工具类,通过接口传进视频文件的路径和截取视频的开始、结束时间。需要注意的是,如果传的开始时间是10s,视频一般不会刚好是从10s开始的,要根据视频的关键帧做一下调整。截取出来的视频会放到存储卡的Clip目录下。代码如下:
-
package com.example.mp4clip;
-
-
import java.io.File;
-
import java.io.FileOutputStream;
-
import java.io.IOException;
-
import java.nio.channels.FileChannel;
-
import java.util.Arrays;
-
import java.util.LinkedList;
-
import java.util.List;
-
-
import android.os.Environment;
-
import android.util.Log;
-
-
import com.coremedia.iso.boxes.Container;
-
import com.googlecode.mp4parser.authoring.Movie;
-
import com.googlecode.mp4parser.authoring.Track;
-
import com.googlecode.mp4parser.authoring.builder.DefaultMp4Builder;
-
import com.googlecode.mp4parser.authoring.container.mp4.MovieCreator;
-
import com.googlecode.mp4parser.authoring.tracks.CroppedTrack;
-
-
public class ClipUtil {
-
private static final String TAG = "ClipUtil";
-
-
/**
-
* 截取指定时间段的视频
-
* @param path 视频的路径
-
* @param begin 需要截取的开始时间
-
* @param end 截取的结束时间
-
* @throws IOException
-
*/
-
public static void clipVideo(String path, double begin, double end)
-
throws IOException {
-
File mSdCardDir = Environment.getExternalStorageDirectory();
-
File f = new File(mSdCardDir.getAbsolutePath() + File.separator
-
+ Util.SAVE_PATH);
-
if (!f.exists()) {
-
f.mkdir();
-
}
-
// Movie movie = new MovieCreator().build(new
-
// RandomAccessFile("/home/sannies/suckerpunch-distantplanet_h1080p/suckerpunch-distantplanet_h1080p.mov",
-
// "r").getChannel());
-
Movie movie = MovieCreator.build(path);
-
-
List<Track> tracks = movie.getTracks();
-
movie.setTracks(new LinkedList<Track>());
-
// remove all tracks we will create new tracks from the old
-
-
double startTime1 = begin;
-
double endTime1 = end;
-
// double startTime2 = 30;
-
// double endTime2 = 40;
-
-
boolean timeCorrected = false;
-
-
// Here we try to find a track that has sync samples. Since we can only
-
// start decoding
-
// at such a sample we SHOULD make sure that the start of the new
-
// fragment is exactly
-
// such a frame
-
for (Track track : tracks) {
-
if (track.getSyncSamples() != null
-
&& track.getSyncSamples().length > 0) {
-
if (timeCorrected) {
-
// This exception here could be a false positive in case we
-
// have multiple tracks
-
// with sync samples at exactly the same positions. E.g. a
-
// single movie containing
-
// multiple qualities of the same video (Microsoft Smooth
-
// Streaming file)
-
Log.e(TAG,
-
"The startTime has already been corrected by another track with SyncSample. Not Supported.");
-
throw new RuntimeException(
-
"The startTime has already been corrected by another track with SyncSample. Not Supported.");
-
}
-
startTime1 = correctTimeToSyncSample(track, startTime1, false);
-
endTime1 = correctTimeToSyncSample(track, endTime1, true);
-
// startTime2 = correctTimeToSyncSample(track, startTime2,
-
// false);
-
// endTime2 = correctTimeToSyncSample(track, endTime2, true);
-
timeCorrected = true;
-
}
-
}
-
-
for (Track track : tracks) {
-
long currentSample = 0;
-
double currentTime = 0;
-
double lastTime = 0;
-
long startSample1 = -1;
-
long endSample1 = -1;
-
// long startSample2 = -1;
-
// long endSample2 = -1;
-
-
for (int i = 0; i < track.getSampleDurations().length; i++) {
-
long delta = track.getSampleDurations()[i];
-
-
if (currentTime > lastTime && currentTime <= startTime1) {
-
// current sample is still before the new starttime
-
startSample1 = currentSample;
-
}
-
if (currentTime > lastTime && currentTime <= endTime1) {
-
// current sample is after the new start time and still
-
// before the new endtime
-
endSample1 = currentSample;
-
}
-
// if (currentTime > lastTime && currentTime <= startTime2) {
-
// // current sample is still before the new starttime
-
// startSample2 = currentSample;
-
// }
-
// if (currentTime > lastTime && currentTime <= endTime2) {
-
// // current sample is after the new start time and still
-
// before the new endtime
-
// endSample2 = currentSample;
-
// }
-
lastTime = currentTime;
-
currentTime += (double) delta
-
/ (double) track.getTrackMetaData().getTimescale();
-
currentSample++;
-
}
-
movie.addTrack(new CroppedTrack(track, startSample1, endSample1));// new
-
// AppendTrack(new
-
// CroppedTrack(track,
-
// startSample1,
-
// endSample1),
-
// new
-
// CroppedTrack(track,
-
// startSample2,
-
// endSample2)));
-
}
-
long start1 = System.currentTimeMillis();
-
Container out = new DefaultMp4Builder().build(movie);
-
long start2 = System.currentTimeMillis();
-
FileOutputStream fos = new FileOutputStream(f.getAbsolutePath()
-
+ File.separator
-
+ String.format("output-%f-%f.mp4", startTime1, endTime1));
-
FileChannel fc = fos.getChannel();
-
out.writeContainer(fc);
-
-
fc.close();
-
fos.close();
-
long start3 = System.currentTimeMillis();
-
Log.e(TAG, "Building IsoFile took : " + (start2 - start1) + "ms");
-
Log.e(TAG, "Writing IsoFile took : " + (start3 - start2) + "ms");
-
Log.e(TAG,
-
"Writing IsoFile speed : "
-
+ (new File(String.format("output-%f-%f.mp4",
-
startTime1, endTime1)).length()
-
/ (start3 - start2) / 1000) + "MB/s");
-
}
-
-
private static double correctTimeToSyncSample(Track track, double cutHere,
-
boolean next) {
-
double[] timeOfSyncSamples = new double[track.getSyncSamples().length];
-
long currentSample = 0;
-
double currentTime = 0;
-
for (int i = 0; i < track.getSampleDurations().length; i++) {
-
long delta = track.getSampleDurations()[i];
-
-
if (Arrays.binarySearch(track.getSyncSamples(), currentSample + 1) >= 0) {
-
// samples always start with 1 but we start with zero therefore
-
// +1
-
timeOfSyncSamples[Arrays.binarySearch(track.getSyncSamples(),
-
currentSample + 1)] = currentTime;
-
}
-
currentTime += (double) delta
-
/ (double) track.getTrackMetaData().getTimescale();
-
currentSample++;
-
-
}
-
double previous = 0;
-
for (double timeOfSyncSample : timeOfSyncSamples) {
-
if (timeOfSyncSample > cutHere) {
-
if (next) {
-
return timeOfSyncSample;
-
} else {
-
return previous;
-
}
-
}
-
previous = timeOfSyncSample;
-
}
-
return timeOfSyncSamples[timeOfSyncSamples.length - 1];
-
}
-
-
}
有了工具类,下面就是增加一个操作界面了。我用一个列表列出所有的视频,点击视频后就会在后台截取出5s~15s总共10s的视频。当然也可以根据需要加上自己想要的开始结束时间,代码如下:
-
package com.example.mp4clip;
-
-
import java.io.IOException;
-
import java.lang.ref.SoftReference;
-
-
import android.app.Activity;
-
import android.content.Context;
-
import android.content.Intent;
-
import android.database.Cursor;
-
import android.graphics.drawable.Drawable;
-
import android.net.Uri;
-
import android.os.Bundle;
-
import android.os.Environment;
-
import android.provider.MediaStore;
-
import android.util.Log;
-
import android.util.SparseArray;
-
import android.view.Menu;
-
import android.view.MenuItem;
-
import android.view.View;
-
import android.view.ViewGroup;
-
import android.widget.AdapterView;
-
import android.widget.AdapterView.OnItemClickListener;
-
import android.widget.ImageView;
-
import android.widget.ListView;
-
import android.widget.SimpleCursorAdapter;
-
import android.widget.TextView;
-
import edu.mit.mobile.android.imagecache.ImageCache;
-
import edu.mit.mobile.android.imagecache.ImageCache.OnImageLoadListener;
-
-
public class MainActivity extends Activity implements OnItemClickListener,
-
OnImageLoadListener {
-
-
private static final String TAG = "MainActivity";
-
ListView mList;
-
private Cursor mCursor;
-
private final SparseArray<SoftReference<ImageView>> mImageViewsToLoad = new SparseArray<SoftReference<ImageView>>();
-
private ImageCache mCache;
-
-
@Override
-
protected void onCreate(Bundle savedInstanceState) {
-
super.onCreate(savedInstanceState);
-
setContentView(R.layout.activity_main);
-
mCache = ImageCache.getInstance(this);
-
mCache.registerOnImageLoadListener(this);
-
mList = (ListView) findViewById(R.id.list);
-
mList.setOnItemClickListener(this);
-
mCursor = getContentResolver().query(
-
MediaStore.Video.Media.EXTERNAL_CONTENT_URI, null, null, null,
-
MediaStore.Video.Media.DATE_MODIFIED + " desc");
-
SimpleCursorAdapter adapter = new videoListAdapter(this,
-
R.layout.video_listitem, mCursor,
-
new String[] { MediaStore.Video.Media.TITLE },
-
new int[] { R.id.video_title });
-
mList.setAdapter(adapter);
-
}
-
-
@Override
-
public boolean onCreateOptionsMenu(Menu menu) {
-
getMenuInflater().inflate(R.menu.main, menu);
-
return true;
-
}
-
-
public boolean onOptionsItemSelected(MenuItem item) {
-
// 扫描新多媒体文件,添加到数据库中
-
sendBroadcast(new Intent(Intent.ACTION_MEDIA_MOUNTED,
-
Uri.parse("file://"
-
+ Environment.getExternalStorageDirectory()
-
.getAbsolutePath())));
-
return false;
-
}
-
-
@Override
-
public void onItemClick(AdapterView<?> parent, View view, int position,
-
long id) {
-
if (mCursor.moveToPosition(position)) {
-
int index = -1;
-
index = mCursor.getColumnIndex(MediaStore.Video.Media.DATA);
-
String path = null;
-
if (index >= 0) {
-
path = mCursor.getString(index);
-
try {
-
ClipUtil.clipVideo(path, 5, 15);
-
} catch (IOException e) {
-
// TODO Auto-generated catch block
-
e.printStackTrace();
-
}
-
}
-
}
-
-
}
-
-
private static final class ViewHolder {
-
/** 视频名称 */
-
TextView titleView;
-
/** 视频时长 */
-
TextView durationView;
-
/** 文件大小 */
-
TextView sizeView;
-
}
-
-
private class videoListAdapter extends SimpleCursorAdapter {
-
-
/*
-
* constructor.
-
*/
-
public videoListAdapter(Context context, int layout, Cursor c,
-
String[] from, int[] to) {
-
super(context, layout, c, from, to);
-
}
-
-
@Override
-
public int getCount() {
-
return super.getCount();
-
}
-
-
@Override
-
public Object getItem(int position) {
-
return super.getItem(position);
-
}
-
-
@Override
-
public long getItemId(int position) {
-
return super.getItemId(position);
-
}
-
-
@Override
-
public View getView(int position, View convertView, ViewGroup parent) {
-
View view = super.getView(position, convertView, parent);
-
Cursor cursor = getCursor();
-
cursor.moveToPosition(position);
-
ViewHolder holder = (ViewHolder) view.getTag();
-
if (holder == null) {
-
holder = new ViewHolder();
-
holder.titleView = (TextView) view
-
.findViewById(R.id.video_title);
-
holder.durationView = (TextView) view
-
.findViewById(R.id.video_duration);
-
holder.sizeView = (TextView) view.findViewById(R.id.video_size);
-
}
-
view.setTag(holder);
-
final ImageView iv = (ImageView) view.findViewById(R.id.thumbnail);
-
int index = -1;
-
index = mCursor.getColumnIndex(MediaStore.Video.Media.DATA);
-
String path = null;
-
if (index >= 0) {
-
path = mCursor.getString(index);
-
try {
-
Drawable draw = mCache.loadImage(position, Uri.parse(path),
-
120, 120);
-
if (draw != null) {
-
iv.setBackground(draw);
-
} else {
-
mImageViewsToLoad.put(position,
-
new SoftReference<ImageView>(iv));
-
}
-
} catch (IOException e) {
-
e.printStackTrace();
-
}
-
}
-
index = -1;
-
index = cursor.getColumnIndex(MediaStore.Video.Media.TITLE);
-
String title = null;
-
if (index >= 0) {
-
title = cursor.getString(index);
-
holder.titleView.setText(title);
-
}
-
index = -1;
-
index = cursor.getColumnIndex(MediaStore.Video.Media.DURATION);
-
int duration;
-
if (index >= 0) {
-
duration = cursor.getInt(index);
-
holder.durationView.setText(Util.durationFormat(duration));
-
}
-
index = -1;
-
index = cursor.getColumnIndex(MediaStore.Video.Media.SIZE);
-
long size;
-
if (index >= 0) {
-
size = cursor.getLong(index);
-
holder.sizeView.setText(Util.sizeFormat(size));
-
}
-
return view;
-
-
}
-
-
}
-
-
@Override
-
public void onImageLoaded(int id, Uri imageUri, Drawable image) {
-
Log.d(TAG, "onImageLoaded:" + id);
-
final SoftReference<ImageView> ivRef = mImageViewsToLoad.get(id);
-
if (ivRef == null) {
-
Log.d(TAG, "ivRef=null");
-
return;
-
}
-
final ImageView iv = ivRef.get();
-
if (iv == null) {
-
Log.d(TAG, "ivRef=null");
-
mImageViewsToLoad.remove(id);
-
return;
-
}
-
iv.setBackground(image);
-
}
-
}
阅读(1136) | 评论(0) | 转发(0) |