Android ICS和MJPEG使用AsyncTask

我修改了Android和MJPEG的MJPEG查看器代码,使用AsyncTask工作(因此在Ice Cream Sandwich(ICS),4.0.4上工作),这里是我的代码。

如果任何人有任何build议,如何优化,清理,或做一些更正确的代码请让我知道。 我希望有两个问题可以帮助解决:

  • 如果你有一个stream上的设备,然后locking屏幕和解锁屏幕,它不会继续播放,直到你杀或恢复应用程序或旋转屏幕。 我所有的尝试使用OnResume()做某事或其他导致应用程序崩溃。

  • 特别是我想要在MjpegInputStream.java中获取AsyncTask,但无法使其工作。

MjpegActivity.java:

package com.demo.mjpeg; import java.io.IOException; import java.net.URI; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import com.demo.mjpeg.MjpegView.MjpegInputStream; import com.demo.mjpeg.MjpegView.MjpegView; import android.app.Activity; import android.os.AsyncTask; import android.os.Bundle; import android.util.Log; import android.view.Window; import android.view.WindowManager; import android.widget.Toast; public class MjpegActivity extends Activity { private static final String TAG = "MjpegActivity"; private MjpegView mv; public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); //sample public cam String URL = "http://trackfield.webcam.oregonstate.edu/axis-cgi/mjpg/video.cgi?resolution=800x600&amp%3bdummy=1333689998337"; requestWindowFeature(Window.FEATURE_NO_TITLE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); mv = new MjpegView(this); setContentView(mv); new DoRead().execute(URL); } public void onPause() { super.onPause(); mv.stopPlayback(); } public class DoRead extends AsyncTask<String, Void, MjpegInputStream> { protected MjpegInputStream doInBackground(String... url) { //TODO: if camera has authentication deal with it and don't just not work HttpResponse res = null; DefaultHttpClient httpclient = new DefaultHttpClient(); Log.d(TAG, "1. Sending http request"); try { res = httpclient.execute(new HttpGet(URI.create(url[0]))); Log.d(TAG, "2. Request finished, status = " + res.getStatusLine().getStatusCode()); if(res.getStatusLine().getStatusCode()==401){ //You must turn off camera User Access Control before this will work return null; } return new MjpegInputStream(res.getEntity().getContent()); } catch (ClientProtocolException e) { e.printStackTrace(); Log.d(TAG, "Request failed-ClientProtocolException", e); //Error connecting to camera } catch (IOException e) { e.printStackTrace(); Log.d(TAG, "Request failed-IOException", e); //Error connecting to camera } return null; } protected void onPostExecute(MjpegInputStream result) { mv.setSource(result); mv.setDisplayMode(MjpegView.SIZE_BEST_FIT); mv.showFps(true); } } } 

MjpegInputStream.java:

 package com.demo.mjpeg.MjpegView; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Properties; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.util.Log; public class MjpegInputStream extends DataInputStream { private static final String TAG = "MjpegInputStream"; private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 }; private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 }; private final String CONTENT_LENGTH = "Content-Length"; private final static int HEADER_MAX_LENGTH = 100; private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH; private int mContentLength = -1; public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); } private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException { int seqIndex = 0; byte c; for(int i=0; i < FRAME_MAX_LENGTH; i++) { c = (byte) in.readUnsignedByte(); if(c == sequence[seqIndex]) { seqIndex++; if(seqIndex == sequence.length) { return i + 1; } } else { seqIndex = 0; } } return -1; } private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException { int end = getEndOfSeqeunce(in, sequence); return (end < 0) ? (-1) : (end - sequence.length); } private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException { ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes); Properties props = new Properties(); props.load(headerIn); return Integer.parseInt(props.getProperty(CONTENT_LENGTH)); } public Bitmap readMjpegFrame() throws IOException { mark(FRAME_MAX_LENGTH); int headerLen = getStartOfSequence(this, SOI_MARKER); reset(); byte[] header = new byte[headerLen]; readFully(header); try { mContentLength = parseContentLength(header); } catch (NumberFormatException nfe) { nfe.getStackTrace(); Log.d(TAG, "catch NumberFormatException hit", nfe); mContentLength = getEndOfSeqeunce(this, EOF_MARKER); } reset(); byte[] frameData = new byte[mContentLength]; skipBytes(headerLen); readFully(frameData); return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData)); } } 

MjpegView.java:

 package com.demo.mjpeg.MjpegView; import java.io.IOException; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffXfermode; import android.graphics.Rect; import android.graphics.Typeface; import android.util.AttributeSet; import android.util.Log; import android.view.SurfaceHolder; import android.view.SurfaceView; public class MjpegView extends SurfaceView implements SurfaceHolder.Callback { private static final String TAG = "MjpegView"; public final static int POSITION_UPPER_LEFT = 9; public final static int POSITION_UPPER_RIGHT = 3; public final static int POSITION_LOWER_LEFT = 12; public final static int POSITION_LOWER_RIGHT = 6; public final static int SIZE_STANDARD = 1; public final static int SIZE_BEST_FIT = 4; public final static int SIZE_FULLSCREEN = 8; private MjpegViewThread thread; private MjpegInputStream mIn = null; private boolean showFps = false; private boolean mRun = false; private boolean surfaceDone = false; private Paint overlayPaint; private int overlayTextColor; private int overlayBackgroundColor; private int ovlPos; private int dispWidth; private int dispHeight; private int displayMode; public class MjpegViewThread extends Thread { private SurfaceHolder mSurfaceHolder; private int frameCounter = 0; private long start; private Bitmap ovl; public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; } private Rect destRect(int bmw, int bmh) { int tempx; int tempy; if (displayMode == MjpegView.SIZE_STANDARD) { tempx = (dispWidth / 2) - (bmw / 2); tempy = (dispHeight / 2) - (bmh / 2); return new Rect(tempx, tempy, bmw + tempx, bmh + tempy); } if (displayMode == MjpegView.SIZE_BEST_FIT) { float bmasp = (float) bmw / (float) bmh; bmw = dispWidth; bmh = (int) (dispWidth / bmasp); if (bmh > dispHeight) { bmh = dispHeight; bmw = (int) (dispHeight * bmasp); } tempx = (dispWidth / 2) - (bmw / 2); tempy = (dispHeight / 2) - (bmh / 2); return new Rect(tempx, tempy, bmw + tempx, bmh + tempy); } if (displayMode == MjpegView.SIZE_FULLSCREEN){ return new Rect(0, 0, dispWidth, dispHeight); } return null; } public void setSurfaceSize(int width, int height) { synchronized(mSurfaceHolder) { dispWidth = width; dispHeight = height; } } private Bitmap makeFpsOverlay(Paint p, String text) { Rect b = new Rect(); p.getTextBounds(text, 0, text.length(), b); int bwidth = b.width()+2; int bheight = b.height()+2; Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888); Canvas c = new Canvas(bm); p.setColor(overlayBackgroundColor); c.drawRect(0, 0, bwidth, bheight, p); p.setColor(overlayTextColor); c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p); return bm; } public void run() { start = System.currentTimeMillis(); PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER); Bitmap bm; int width; int height; Rect destRect; Canvas c = null; Paint p = new Paint(); String fps; while (mRun) { if(surfaceDone) { try { c = mSurfaceHolder.lockCanvas(); synchronized (mSurfaceHolder) { try { bm = mIn.readMjpegFrame(); destRect = destRect(bm.getWidth(),bm.getHeight()); c.drawColor(Color.BLACK); c.drawBitmap(bm, null, destRect, p); if(showFps) { p.setXfermode(mode); if(ovl != null) { height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight(); width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth(); c.drawBitmap(ovl, width, height, null); } p.setXfermode(null); frameCounter++; if((System.currentTimeMillis() - start) >= 1000) { fps = String.valueOf(frameCounter)+" fps"; frameCounter = 0; start = System.currentTimeMillis(); ovl = makeFpsOverlay(overlayPaint, fps); } } } catch (IOException e) { e.getStackTrace(); Log.d(TAG, "catch IOException hit in run", e); } } } finally { if (c != null) { mSurfaceHolder.unlockCanvasAndPost(c); } } } } } } private void init(Context context) { SurfaceHolder holder = getHolder(); holder.addCallback(this); thread = new MjpegViewThread(holder, context); setFocusable(true); overlayPaint = new Paint(); overlayPaint.setTextAlign(Paint.Align.LEFT); overlayPaint.setTextSize(12); overlayPaint.setTypeface(Typeface.DEFAULT); overlayTextColor = Color.WHITE; overlayBackgroundColor = Color.BLACK; ovlPos = MjpegView.POSITION_LOWER_RIGHT; displayMode = MjpegView.SIZE_STANDARD; dispWidth = getWidth(); dispHeight = getHeight(); } public void startPlayback() { if(mIn != null) { mRun = true; thread.start(); } } public void stopPlayback() { mRun = false; boolean retry = true; while(retry) { try { thread.join(); retry = false; } catch (InterruptedException e) { e.getStackTrace(); Log.d(TAG, "catch IOException hit in stopPlayback", e); } } } public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); } public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); } public void surfaceDestroyed(SurfaceHolder holder) { surfaceDone = false; stopPlayback(); } public MjpegView(Context context) { super(context); init(context); } public void surfaceCreated(SurfaceHolder holder) { surfaceDone = true; } public void showFps(boolean b) { showFps = b; } public void setSource(MjpegInputStream source) { mIn = source; startPlayback(); } public void setOverlayPaint(Paint p) { overlayPaint = p; } public void setOverlayTextColor(int c) { overlayTextColor = c; } public void setOverlayBackgroundColor(int c) { overlayBackgroundColor = c; } public void setOverlayPosition(int p) { ovlPos = p; } public void setDisplayMode(int s) { displayMode = s; } } 

好工作! 对于onResume()的问题,将以下代码从onCreate()移动到onResume()时不够?

  //sample public cam String URL = "http://trackfield.webcam.oregonstate.edu/axis-cgi/mjpg/video.cgi?resolution=800x600&amp%3bdummy=1333689998337"; mv = new MjpegView(this); setContentView(mv); new DoRead().execute(URL); 

然后,你只需重新创buildAsyncTask的视图和新实例…我尝试了它,它适用于我…

如果你想访问你的IP摄像机有一个用户名或密码,这将是有益的,你可能想要添加到您的DefaultHttpClient和上面的代码将适用于需要身份validation的摄像机

  CredentialsProvider provider = new BasicCredentialsProvider(); UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("yourusername", "yourpassword"); provider.setCredentials(AuthScope.ANY, credentials); DefaultHttpClient httpclient = new DefaultHttpClient(); httpclient.setCredentialsProvider(provider); 

感谢代码,这是非常有帮助的

我想提出一些优化技巧,这些技巧已经在我的代码中使用过,整体性能可以轻松地提高几倍。

  1. 在可能的情况下,我在读取帧的过程中删除了内存分配

     private final static int HEADER_MAX_LENGTH = 100; private final static int FRAME_MAX_LENGTH = 200000 + HEADER_MAX_LENGTH; private final String CONTENT_LENGTH = "Content-Length:"; private final String CONTENT_END = "\r\n"; private final static byte[] gFrameData = new byte[FRAME_MAX_LENGTH]; private final static byte[] gHeader = new byte[HEADER_MAX_LENGTH]; BitmapFactory.Options bitmapOptions = new BitmapFactory.Options(); public Bitmap readMjpegFrame() throws IOException { mark(FRAME_MAX_LENGTH); int headerLen = getStartOfSequence(SOI_MARKER); if(headerLen < 0) return false; reset(); readFully(gHeader, 0, headerLen); int contentLen; try { contentLen = parseContentLength(gHeader, headerLen); } catch (NumberFormatException nfe) { nfe.getStackTrace(); Log.d(TAG, "catch NumberFormatException hit", nfe); contentLen = getEndOfSequence(EOF_MARKER); } readFully(gFrameData, 0, contentLen); Bitmap bm = BitmapFactory.decodeByteArray(gFrameData, 0, contentLen, bitmapOptions); bitmapOptions.inBitmap = bm; return bm; } 
  2. 优化parseContentLength,尽可能删除String操作

     byte[] CONTENT_LENGTH_BYTES; byte[] CONTENT_END_BYTES; public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); bitmapOptions.inSampleSize = 1; bitmapOptions.inPreferredConfig = Bitmap.Config.RGB_565; bitmapOptions.inPreferQualityOverSpeed = false; bitmapOptions.inPurgeable = true; try { CONTENT_LENGTH_BYTES = CONTENT_LENGTH.getBytes("UTF-8"); CONTENT_END_BYTES = CONTENT_END.getBytes("UTF-8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } } private int findPattern(byte[] buffer, int bufferLen, byte[] pattern, int offset) { int seqIndex = 0; for(int i=offset; i < bufferLen; ++i) { if(buffer[i] == pattern[seqIndex]) { ++seqIndex; if(seqIndex == pattern.length) { return i + 1; } } else { seqIndex = 0; } } return -1; } private int parseContentLength(byte[] headerBytes, int length) throws IOException, NumberFormatException { int begin = findPattern(headerBytes, length, CONTENT_LENGTH_BYTES, 0); int end = findPattern(headerBytes, length, CONTENT_END_BYTES, begin) - CONTENT_END_BYTES.length; // converting string to int int number = 0; int radix = 1; for(int i = end - 1; i >= begin; --i) { if(headerBytes[i] > 47 && headerBytes[i] < 58) { number += (headerBytes[i] - 48) * radix; radix *= 10; } } return number; } 

有可能是代码中的错误,因为我正在重写它为stackoverflow,原来我使用2线程,一个是阅读框架,另一个是渲染。

我希望这会有助于某人。