天天看點

Android視訊采集

http://www.rosoo.net/a/201111/15259.html

将之前做過的一個比較實用的在android實時采集視訊,并在pc上顯示出采集到的視訊的程式,包括pc端和android端程式,基于android 1.5 在htc g3上測試通過。開發平台是android 1.5,這個程式實作視訊流的擷取,程式簡單地在第20幀到來的時候,寫入到檔案中。這樣就可以拿

tag: android  視訊采集  

相關完整源碼可以從這裡下載下傳到:

http://bbs.rosoo.net/forum.php?mod=viewthread&tid=8669

2010-10-13晚上 更新~ 将之前做過的一個比較實用的在android實時采集視訊,并在pc上顯示出采集到的視訊的程式,包括pc端和android端程式,基于android 1.5

在htc g3上測試通過。代碼在分界線之後。

之前網上找了很多資料,沒有找到如何截取android視訊流。後來發現在android的拍照視訊預覽時就可以截取視訊資料。每獲得一幀就調用一下接口函數。

我的開發平台是android 1.5,這個程式實作視訊流的擷取,程式簡單地在第20幀到來的時候,寫入到檔案中。這樣就可以拿到電腦上進行分析。

具體請大家參考代碼

package com.sunshine;

import java.io.file;

import java.io.randomaccessfile;

import android.app.activity;

import android.content.res.configuration;

import android.graphics.pixelformat;

import android.hardware.camera;

import android.os.bundle;

import android.util.log;

import android.view.surfaceholder;

import android.view.surfaceview;

import android.view.window;

import android.view.windowmanager;

import android.view.surfaceholder.callback;

public class androidvideo extends activity implements callback,

camera.picturecallback {

private surfaceview msurfaceview = null;

private surfaceholder msurfaceholder = null;

private camera mcamera = null;

private boolean mpreviewrunning = false;

@override

public void oncreate(bundle savedinstancestate) {

super.oncreate(savedinstancestate);

   getwindow().setformat(pixelformat.translucent);

requestwindowfeature(window.feature_no_title);

getwindow().setflags(windowmanager.layoutparams.flag_fullscreen,

windowmanager.layoutparams.flag_fullscreen);

   setcontentview(r.layout.main);

   msurfaceview = (surfaceview) this.findviewbyid(r.id.surface_camera);

msurfaceholder = msurfaceview.getholder();

msurfaceholder.addcallback(this);

msurfaceholder.settype(surfaceholder.surface_type_push_buffers);

}

public void onpicturetaken(byte[] data, camera camera) {

try {

log.v("system.out", "get it!");

file file = new file("/sdcard/camera.jpg");

randomaccessfile raf = new randomaccessfile(file, "rw");

raf.write(data);

raf.close();

} catch (exception ex) {

log.v("system.out", ex.tostring());

public void surfacechanged(surfaceholder holder, int format, int width,

int height) {

if (mpreviewrunning) {

mcamera.stoppreview();

camera.parameters p = mcamera.getparameters();

p.setpreviewsize(width, height);

mcamera.setpreviewcallback(new streamit());

mcamera.setparameters(p);

mcamera.setpreviewdisplay(holder);

mcamera.startpreview();

mpreviewrunning = true;

public void surfacecreated(surfaceholder holder) {

mcamera = camera.open();

public void surfacedestroyed(surfaceholder holder) {

mpreviewrunning = false;

mcamera.release();

public void onconfigurationchanged(configuration newconfig) {

super.onconfigurationchanged(newconfig);

if (this.getresources().getconfiguration().orientation == configuration.orientation_landscape) {

} else if (this.getresources().getconfiguration().orientation == configuration.orientation_portrait) {

class streamit implements camera.previewcallback {

private int tick = 1;

public void onpreviewframe(byte[] data, camera camera) {

// todo auto-generated method stub

if (tick == 20) {

system.out.println("data len: " + data.length);

file file = new file("/sdcard/pal.pal");

if (!file.exists())

file.createnewfile();

tick++;

xml 布局檔案

<?xml version="1.0" encoding="utf-8"?>

<linearlayout xmlns:android="http://schemas.android.com/apk/res/android"

android:layout_width="fill_parent" android:layout_height="fill_parent"

android:orientation="vertical">

<surfaceview android:id="@+id/surface_camera"

android:layout_width="fill_parent" android:layout_height="fill_parent">

</surfaceview>

</linearlayout>

注意在項目配置檔案中還要加上通路權限

<uses-permission android:name="android.permission.camera" />

通過查資料發現,android每幀的資料流的格式是yuv420

下面附上一個将 yuv420轉成rgb的函數,

staticpublicvoiddecodeyuv420sp(byte[]

rgbbuf,byte[] yuv420sp,intwidth,intheight)

{   

    finalintframesize

= width * height;   

if(rgbbuf

==null)   

    thrownewnullpointerexception("buffer

'rgbbuf' is null");   

if(rgbbuf.length

< framesize *3)   

    thrownewillegalargumentexception("buffer

'rgbbuf' size "  

             + rgbbuf.length +" < minimum "+

framesize *3);   

if(yuv420sp

'yuv420sp' is null");   

if(yuv420sp.length

< framesize *3/2)

'yuv420sp' size "+ yuv420sp.length   

             +" < minimum "+

framesize *3/2);

    inti

=0, y =0;

    intuvp

=0, u =0,

v =0;   

    inty1192

=0, r =0,

g =0, b =0;

    for(intj

=0, yp =0;

j < height; j++) {   

         uvp = framesize + (j >>1)

* width;   

         u =0;

         v =0;

        for(i

=0; i < width; i++, yp++) {   

             y = (0xff&

((int) yuv420sp[yp])) -16;

            if(y

<0) y =0;

            if((i

&1) ==0)

                 v = (0xff&

yuv420sp[uvp++]) -128;   

                 u = (0xff&

             }   

             y1192 =1192*

y;   

             r = (y1192 +1634*

v);   

             g = (y1192 -833*

v -400* u);   

             b = (y1192 +2066*

u);   

            if(r

<0) r =0;elseif(r

>262143) r =262143;

            if(g

<0) g =0;elseif(g

>262143) g =262143;

            if(b

<0) b =0;elseif(b

>262143) b =262143;

             rgbbuf[yp *3]

= (byte)(r >>10);

             rgbbuf[yp *3+1]

= (byte)(g >>10);

             rgbbuf[yp *3+2]

= (byte)(b >>10);

         }   

     }   

   }  

代碼來自http://chenweihuacwh.javaeye.com/blog/571223

感謝cwh643

-----------------------------分界線-------------------------------------------

-----------------------------2010-10-13更新-------------------------------

android 端

import java.io.datainputstream;

import java.io.dataoutputstream;

import java.net.socket;

import android.view.view;

import android.view.view.onclicklistener;

import android.widget.button;

import android.widget.edittext;

public class androidvideo extends activity implements callback,onclicklistener{

//連接配接相關

private edittext remoteip=null;

private button connect=null;

private string remoteipstr=null;

//視訊資料

private streamit streamit=null;

public static  kit kit=null;

getwindow().setformat(pixelformat.translucent);

setcontentview(r.layout.main);

msurfaceview = (surfaceview) this.findviewbyid(r.id.surface_camera);

remoteip=(edittext)this.findviewbyid(r.id.remoteip);

connect=(button)this.findviewbyid(r.id.connect);

connect.setonclicklistener(this);

streamit=new streamit();

kit=new kit();

mcamera.setpreviewcallback(streamit);

class kit implements runnable {

private boolean run=true;

//        private final int datalen=57600; //307200 or 230400 76800 or 57600

private final int tt=28800;

public void run() {

socket socket = new socket(remoteipstr, 8899);

dataoutputstream dos = new dataoutputstream(socket

.getoutputstream());

datainputstream dis = new datainputstream(socket

.getinputstream());

while (run) {

dos.write(streamit.yuv420sp, 0, 28800);

dos.write(streamit.yuv420sp, 28800, 28800);

dis.readboolean();

thread.sleep(155);

run=false;

ex.printstacktrace();

public void onclick(view view) {

if(view==connect){//連接配接函數

remoteipstr=remoteip.gettext().tostring();

new thread(androidvideo.kit).start();

public byte[] yuv420sp =null;

private boolean t=true;

//        if(t){

//            t=false;

//            new thread(androidvideo.kit).start();

//        }

yuv420sp=data;

pc端

import java.awt.frame;

import java.awt.graphics;

import java.awt.point;

import java.awt.transparency;

import java.awt.color.colorspace;

import java.awt.image.bufferedimage;

import java.awt.image.componentcolormodel;

import java.awt.image.databuffer;

import java.awt.image.databufferbyte;

import java.awt.image.pixelinterleavedsamplemodel;

import java.awt.image.raster;

import java.awt.image.samplemodel;

import java.awt.image.writableraster;

import java.net.serversocket;

public class flushme extends frame {

private static final long serialversionuid = 1l;

private bufferedimage im;

// 圖像資訊

//    private final int width = 480;

//    private final int height = 320;

private static final int width = 240;

private static final int height = 160;

private static final int numbands = 3;

private static final int datalen = 57600;//307200 or 230400//57600 76800

private static final int tt = 28800;//14400;//28800;

// 圖像數組

private byte[] bytearray = new byte[width * height * numbands];// 圖像rgb數組

private byte[] yuv420sp = new byte[datalen];// 圖像yuv數組

private static final int[] bandoffsets = new int[] { 0, 1, 2 };

private static final samplemodel samplemodel = new pixelinterleavedsamplemodel(

databuffer.type_byte, width, height, 3, width * 3,

bandoffsets);

// colormodel

private static final colorspace cs=colorspace.getinstance(colorspace.cs_srgb);

private static final componentcolormodel cm=new componentcolormodel(cs, false, false,

transparency.opaque, databuffer.type_byte);

public flushme() {

super("flushing");

updateim();

setsize(480, 320);

// 視窗關閉方法

this.addwindowlistener(new java.awt.event.windowadapter() {

public void windowclosing(java.awt.event.windowevent e) {

system.exit(0);

});

// 視窗居中

this.setlocationrelativeto(null);

this.setresizable(false);

this.setvisible(true);

this.getdata();

public void update(graphics g){

paint(g);

public void paint(graphics g) {

g.drawimage(im, 0, 0, 480, 320, this);

public void getdata() {

serversocket server = new serversocket(8899);

socket socket = server.accept();

datainputstream dis = new datainputstream(socket.getinputstream());

dataoutputstream dos = new dataoutputstream(socket.getoutputstream());

while (true) {

for (int i = 0; i < datalen / tt; i++) {

dis.read(yuv420sp, i * tt, tt);

// 得到資料之後立即更新顯示

im.flush();

repaint();

dos.writeboolean(true);

private void updateim() {

// 解析yuv成rgb格式

decodeyuv420sp(bytearray, yuv420sp, width, height);

databuffer databuffer = new databufferbyte(bytearray, numbands);

writableraster wr = raster.createwritableraster(samplemodel,

databuffer, new point(0, 0));

im = new bufferedimage(cm, wr, false, null);

private static void decodeyuv420sp(byte[] rgbbuf, byte[] yuv420sp,

int width, int height) {

final int framesize = width * height;

if (rgbbuf == null)

throw new nullpointerexception("buffer 'rgbbuf' is null");

if (rgbbuf.length < framesize * 3)

throw new illegalargumentexception("buffer 'rgbbuf' size "

+ rgbbuf.length + " < minimum " + framesize * 3);

if (yuv420sp == null)

throw new nullpointerexception("buffer 'yuv420sp' is null");

if (yuv420sp.length < framesize * 3 / 2)

throw new illegalargumentexception("buffer 'yuv420sp' size "

+ yuv420sp.length + " < minimum " + framesize * 3 / 2);

int i = 0, y = 0;

int uvp = 0, u = 0, v = 0;

int y1192 = 0, r = 0, g = 0, b = 0;

for (int j = 0, yp = 0; j < height; j++) {

uvp = framesize + (j >> 1) * width;

u = 0;

v = 0;

for (i = 0; i < width; i++, yp++) {

y = (0xff & ((int) yuv420sp[yp])) - 16;

if (y < 0)

y = 0;

if ((i & 1) == 0) {

v = (0xff & yuv420sp[uvp++]) - 128;

u = (0xff & yuv420sp[uvp++]) - 128;

y1192 = 1192 * y;

r = (y1192 + 1634 * v);

g = (y1192 - 833 * v - 400 * u);

b = (y1192 + 2066 * u);

if (r < 0)

r = 0;

else if (r > 262143)

r = 262143;

if (g < 0)

g = 0;

else if (g > 262143)

g = 262143;

if (b < 0)

b = 0;

else if (b > 262143)

b = 262143;

rgbbuf[yp * 3] = (byte) (r >> 10);

rgbbuf[yp * 3 + 1] = (byte) (g >> 10);

rgbbuf[yp * 3 + 2] = (byte) (b >> 10);

public static void main(string[] args) {

frame f = new flushme();

上個截圖

Android視訊采集

(sundos)

繼續閱讀