web-dev-qa-db-fra.com

Android ICS et MJPEG utilisant AsyncTask

J'ai modifié le code de la visionneuse MJPEG de Android et MJPEG pour travailler en utilisant une AsyncTask (et donc travailler sur Ice cream sandwich (ICS), 4.0.4) et voici mon code.

Si quelqu'un a des suggestions sur la façon d'optimiser, de nettoyer ou de faire quelque chose de plus approprié avec le code, faites-le moi savoir. Deux questions que j'apprécierais d'aider à résoudre:

  • Si vous avez l'appareil sur un flux, verrouillez l'écran et déverrouillez l'écran, il ne reprendra pas la lecture jusqu'à ce que vous tuiez et repreniez l'application ou que vous tourniez l'écran. Toutes mes tentatives d'utilisation de OnResume () pour faire quelque chose ou autre ont entraîné des plantages d'application.

  • En particulier, j'aimerais récupérer AsyncTask dans MjpegInputStream.Java, mais je n'ai pas réussi à le faire fonctionner.

MjpegActivity.Java:

package com.demo.mjpeg;

import Java.io.IOException;
import Java.net.URI;

import org.Apache.http.HttpResponse;
import org.Apache.http.client.ClientProtocolException;
import org.Apache.http.client.methods.HttpGet;
import org.Apache.http.impl.client.DefaultHttpClient;

import com.demo.mjpeg.MjpegView.MjpegInputStream;
import com.demo.mjpeg.MjpegView.MjpegView;
import Android.app.Activity;
import Android.os.AsyncTask;
import Android.os.Bundle;
import Android.util.Log;
import Android.view.Window;
import Android.view.WindowManager;
import Android.widget.Toast;

public class MjpegActivity extends Activity {
    private static final String TAG = "MjpegActivity";

    private MjpegView mv;

    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        //sample public cam
        String URL = "http://trackfield.webcam.oregonstate.edu/axis-cgi/mjpg/video.cgi?resolution=800x600&amp%3bdummy=1333689998337";

        requestWindowFeature(Window.FEATURE_NO_TITLE);
        getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, 
        WindowManager.LayoutParams.FLAG_FULLSCREEN);

        mv = new MjpegView(this);
        setContentView(mv);        

        new DoRead().execute(URL);
    }

    public void onPause() {
        super.onPause();
        mv.stopPlayback();
    }

    public class DoRead extends AsyncTask<String, Void, MjpegInputStream> {
        protected MjpegInputStream doInBackground(String... url) {
            //TODO: if camera has authentication deal with it and don't just not work
            HttpResponse res = null;
            DefaultHttpClient httpclient = new DefaultHttpClient();     
            Log.d(TAG, "1. Sending http request");
            try {
                res = httpclient.execute(new HttpGet(URI.create(url[0])));
                Log.d(TAG, "2. Request finished, status = " + res.getStatusLine().getStatusCode());
                if(res.getStatusLine().getStatusCode()==401){
                    //You must turn off camera User Access Control before this will work
                    return null;
                }
                return new MjpegInputStream(res.getEntity().getContent());  
            } catch (ClientProtocolException e) {
                e.printStackTrace();
                Log.d(TAG, "Request failed-ClientProtocolException", e);
                //Error connecting to camera
            } catch (IOException e) {
                e.printStackTrace();
                Log.d(TAG, "Request failed-IOException", e);
                //Error connecting to camera
            }

            return null;
        }

        protected void onPostExecute(MjpegInputStream result) {
            mv.setSource(result);
            mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
            mv.showFps(true);
        }
    }
}

MjpegInputStream.Java:

package com.demo.mjpeg.MjpegView;

import Java.io.BufferedInputStream;
import Java.io.ByteArrayInputStream;
import Java.io.DataInputStream;
import Java.io.IOException;
import Java.io.InputStream;
import Java.util.Properties;

import Android.graphics.Bitmap;
import Android.graphics.BitmapFactory;
import Android.util.Log;

public class MjpegInputStream extends DataInputStream {
    private static final String TAG = "MjpegInputStream";

    private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
    private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
    private final String CONTENT_LENGTH = "Content-Length";
    private final static int HEADER_MAX_LENGTH = 100;
    private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
    private int mContentLength = -1;

    public MjpegInputStream(InputStream in) {
        super(new BufferedInputStream(in, FRAME_MAX_LENGTH));
    }

    private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
        int seqIndex = 0;
        byte c;
        for(int i=0; i < FRAME_MAX_LENGTH; i++) {
            c = (byte) in.readUnsignedByte();
            if(c == sequence[seqIndex]) {
                seqIndex++;
                if(seqIndex == sequence.length) {
                    return i + 1;
                }
            } else {
                seqIndex = 0;
            }
        }
        return -1;
    }

    private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
        int end = getEndOfSeqeunce(in, sequence);
        return (end < 0) ? (-1) : (end - sequence.length);
    }

    private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
        ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
        Properties props = new Properties();
        props.load(headerIn);
        return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
    }   

    public Bitmap readMjpegFrame() throws IOException {
        mark(FRAME_MAX_LENGTH);
        int headerLen = getStartOfSequence(this, SOI_MARKER);
        reset();
        byte[] header = new byte[headerLen];
        readFully(header);
        try {
            mContentLength = parseContentLength(header);
        } catch (NumberFormatException nfe) { 
            nfe.getStackTrace();
            Log.d(TAG, "catch NumberFormatException hit", nfe);
            mContentLength = getEndOfSeqeunce(this, EOF_MARKER); 
        }
        reset();
        byte[] frameData = new byte[mContentLength];
        skipBytes(headerLen);
        readFully(frameData);
        return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
    }
}

MjpegView.Java:

package com.demo.mjpeg.MjpegView;

import Java.io.IOException;

import Android.content.Context;
import Android.graphics.Bitmap;
import Android.graphics.Canvas;
import Android.graphics.Color;
import Android.graphics.Paint;
import Android.graphics.PorterDuff;
import Android.graphics.PorterDuffXfermode;
import Android.graphics.Rect;
import Android.graphics.Typeface;
import Android.util.AttributeSet;
import Android.util.Log;
import Android.view.SurfaceHolder;
import Android.view.SurfaceView;

public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
    private static final String TAG = "MjpegView";

    public final static int POSITION_UPPER_LEFT  = 9;
    public final static int POSITION_UPPER_RIGHT = 3;
    public final static int POSITION_LOWER_LEFT  = 12;
    public final static int POSITION_LOWER_RIGHT = 6;

    public final static int SIZE_STANDARD   = 1; 
    public final static int SIZE_BEST_FIT   = 4;
    public final static int SIZE_FULLSCREEN = 8;

    private MjpegViewThread thread;
    private MjpegInputStream mIn = null;    
    private boolean showFps = false;
    private boolean mRun = false;
    private boolean surfaceDone = false;    
    private Paint overlayPaint;
    private int overlayTextColor;
    private int overlayBackgroundColor;
    private int ovlPos;
    private int dispWidth;
    private int dispHeight;
    private int displayMode;

    public class MjpegViewThread extends Thread {
        private SurfaceHolder mSurfaceHolder;
        private int frameCounter = 0;
        private long start;
        private Bitmap ovl;

        public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) {
            mSurfaceHolder = surfaceHolder;
        }

        private Rect destRect(int bmw, int bmh) {
            int tempx;
            int tempy;
            if (displayMode == MjpegView.SIZE_STANDARD) {
                tempx = (dispWidth / 2) - (bmw / 2);
                tempy = (dispHeight / 2) - (bmh / 2);
                return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
            }
            if (displayMode == MjpegView.SIZE_BEST_FIT) {
                float bmasp = (float) bmw / (float) bmh;
                bmw = dispWidth;
                bmh = (int) (dispWidth / bmasp);
                if (bmh > dispHeight) {
                    bmh = dispHeight;
                    bmw = (int) (dispHeight * bmasp);
                }
                tempx = (dispWidth / 2) - (bmw / 2);
                tempy = (dispHeight / 2) - (bmh / 2);
                return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
            }
            if (displayMode == MjpegView.SIZE_FULLSCREEN){
                return new Rect(0, 0, dispWidth, dispHeight);
            }
            return null;
        }

        public void setSurfaceSize(int width, int height) {
            synchronized(mSurfaceHolder) {
                dispWidth = width;
                dispHeight = height;
            }
        }

        private Bitmap makeFpsOverlay(Paint p, String text) {
            Rect b = new Rect();
            p.getTextBounds(text, 0, text.length(), b);
            int bwidth  = b.width()+2;
            int bheight = b.height()+2;
            Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
            Canvas c = new Canvas(bm);
            p.setColor(overlayBackgroundColor);
            c.drawRect(0, 0, bwidth, bheight, p);
            p.setColor(overlayTextColor);
            c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
            return bm;           
        }

        public void run() {
            start = System.currentTimeMillis();
            PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
            Bitmap bm;
            int width;
            int height;
            Rect destRect;
            Canvas c = null;
            Paint p = new Paint();
            String fps;
            while (mRun) {
                if(surfaceDone) {
                    try {
                        c = mSurfaceHolder.lockCanvas();
                        synchronized (mSurfaceHolder) {
                            try {
                                bm = mIn.readMjpegFrame();
                                destRect = destRect(bm.getWidth(),bm.getHeight());
                                c.drawColor(Color.BLACK);
                                c.drawBitmap(bm, null, destRect, p);
                                if(showFps) {
                                    p.setXfermode(mode);
                                    if(ovl != null) {
                                        height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
                                        width  = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
                                        c.drawBitmap(ovl, width, height, null);
                                    }
                                    p.setXfermode(null);
                                    frameCounter++;
                                    if((System.currentTimeMillis() - start) >= 1000) {
                                        fps = String.valueOf(frameCounter)+" fps";
                                        frameCounter = 0; 
                                        start = System.currentTimeMillis();
                                        ovl = makeFpsOverlay(overlayPaint, fps);
                                    }
                                }
                            } catch (IOException e) {
                                e.getStackTrace();
                                Log.d(TAG, "catch IOException hit in run", e);
                            }
                        }
                    } finally { 
                        if (c != null) {
                            mSurfaceHolder.unlockCanvasAndPost(c); 
                        }
                    }
                }
            }
        }
    }

    private void init(Context context) {
        SurfaceHolder holder = getHolder();
        holder.addCallback(this);
        thread = new MjpegViewThread(holder, context);
        setFocusable(true);
        overlayPaint = new Paint();
        overlayPaint.setTextAlign(Paint.Align.LEFT);
        overlayPaint.setTextSize(12);
        overlayPaint.setTypeface(Typeface.DEFAULT);
        overlayTextColor = Color.WHITE;
        overlayBackgroundColor = Color.BLACK;
        ovlPos = MjpegView.POSITION_LOWER_RIGHT;
        displayMode = MjpegView.SIZE_STANDARD;
        dispWidth = getWidth();
        dispHeight = getHeight();
    }

    public void startPlayback() { 
        if(mIn != null) {
            mRun = true;
            thread.start();         
        }
    }

    public void stopPlayback() { 
        mRun = false;
        boolean retry = true;
        while(retry) {
            try {
                thread.join();
                retry = false;
            } catch (InterruptedException e) {
                e.getStackTrace();
                Log.d(TAG, "catch IOException hit in stopPlayback", e);
            }
        }
    }

    public MjpegView(Context context, AttributeSet attrs) { 
        super(context, attrs); init(context); 
    }

    public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { 
        thread.setSurfaceSize(w, h); 
    }

    public void surfaceDestroyed(SurfaceHolder holder) { 
        surfaceDone = false; 
        stopPlayback(); 
    }

    public MjpegView(Context context) { 
        super(context);
        init(context); 
    }

    public void surfaceCreated(SurfaceHolder holder) { 
        surfaceDone = true; 
    }

    public void showFps(boolean b) { 
        showFps = b; 
    }

    public void setSource(MjpegInputStream source) { 
        mIn = source;
        startPlayback();
    }

    public void setOverlayPaint(Paint p) { 
        overlayPaint = p; 
    }

    public void setOverlayTextColor(int c) { 
        overlayTextColor = c; 
    }

    public void setOverlayBackgroundColor(int c) { 
        overlayBackgroundColor = c; 
    }

    public void setOverlayPosition(int p) { 
        ovlPos = p; 
    }

    public void setDisplayMode(int s) { 
        displayMode = s; 
    }
}
29
bbodenmiller

Il sera utile pour les débutants que si vous souhaitez accéder à votre caméra IP avec un nom d'utilisateur ou un mot de passe, vous souhaiterez peut-être l'ajouter à votre DefaultHttpClient et le code ci-dessus fonctionnera pour les caméras qui nécessitent une authentification

 CredentialsProvider provider = new BasicCredentialsProvider();
            UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("yourusername", "yourpassword");
            provider.setCredentials(AuthScope.ANY, credentials);
            DefaultHttpClient httpclient = new DefaultHttpClient();
            httpclient.setCredentialsProvider(provider);
2
Ahmed

Bon travail! Pour votre problème avec onResume (), ne suffit-il pas lorsque vous déplacez le code suivant de onCreate () vers onResume ()?

    //sample public cam 
    String URL = "http://trackfield.webcam.oregonstate.edu/axis-cgi/mjpg/video.cgi?resolution=800x600&amp%3bdummy=1333689998337"; 

    mv = new MjpegView(this); 
    setContentView(mv);         

    new DoRead().execute(URL); 

Ensuite, vous recréez simplement la vue et la nouvelle instance de la tâche AsyncTask ... Je l'ai essayé et cela fonctionne pour moi ...

2
DC84

merci pour le code, c'est très utile

Je veux suggérer quelques conseils d'optimisation, qui sont déjà utilisés dans mon code, les performances globales peuvent être facilement augmentées de quelques fois.

  1. J'ai supprimé les allocations de mémoire lors de la lecture du cadre, si possible

    private final static int HEADER_MAX_LENGTH = 100;
    private final static int FRAME_MAX_LENGTH = 200000 + HEADER_MAX_LENGTH;
    private final String CONTENT_LENGTH = "Content-Length:";
    private final String CONTENT_END = "\r\n";
    private final static byte[] gFrameData = new byte[FRAME_MAX_LENGTH];
    private final static byte[] gHeader = new byte[HEADER_MAX_LENGTH];
    BitmapFactory.Options bitmapOptions = new BitmapFactory.Options();
    
    public Bitmap readMjpegFrame() throws IOException {
    
        mark(FRAME_MAX_LENGTH);
        int headerLen = getStartOfSequence(SOI_MARKER);
    
        if(headerLen < 0)
            return false;
    
        reset();
        readFully(gHeader, 0, headerLen);
    
        int contentLen;
    
        try
        {
            contentLen = parseContentLength(gHeader, headerLen);
        } catch (NumberFormatException nfe) 
        {
            nfe.getStackTrace();
            Log.d(TAG, "catch NumberFormatException hit", nfe);
            contentLen = getEndOfSequence(EOF_MARKER);
        }
    
        readFully(gFrameData, 0, contentLen);
    
        Bitmap bm = BitmapFactory.decodeByteArray(gFrameData, 0, contentLen, bitmapOptions);
        bitmapOptions.inBitmap = bm;
    
        return bm;
    }
    
  2. Optimisation de parseContentLength, en supprimant autant que possible les opérations String

    byte[] CONTENT_LENGTH_BYTES;
    byte[] CONTENT_END_BYTES;
    
    public MjpegInputStream(InputStream in)
    {
        super(new BufferedInputStream(in, FRAME_MAX_LENGTH));
    
        bitmapOptions.inSampleSize = 1;
        bitmapOptions.inPreferredConfig = Bitmap.Config.RGB_565;
        bitmapOptions.inPreferQualityOverSpeed = false;
        bitmapOptions.inPurgeable = true;
        try
        {
            CONTENT_LENGTH_BYTES = CONTENT_LENGTH.getBytes("UTF-8");
            CONTENT_END_BYTES = CONTENT_END.getBytes("UTF-8");
        } catch (UnsupportedEncodingException e) 
        {
            e.printStackTrace();
        }
    }
    
    private int findPattern(byte[] buffer, int bufferLen, byte[] pattern, int offset)
    {
        int seqIndex = 0;
        for(int i=offset; i < bufferLen; ++i)
        {
            if(buffer[i] == pattern[seqIndex])
            {
                ++seqIndex;
                if(seqIndex == pattern.length)
                {
                    return i + 1;
                }
            } else
            {
                seqIndex = 0;
            }
        }
    
        return -1;
    }
    
    
    
    private int parseContentLength(byte[] headerBytes, int length) throws IOException, NumberFormatException
    {
        int begin = findPattern(headerBytes, length, CONTENT_LENGTH_BYTES, 0);
        int end = findPattern(headerBytes, length, CONTENT_END_BYTES, begin) - CONTENT_END_BYTES.length;
    
        // converting string to int
        int number = 0;
        int radix = 1;
        for(int i = end - 1; i >= begin; --i)
        {
            if(headerBytes[i] > 47 && headerBytes[i] < 58)
            {
                number += (headerBytes[i] - 48) * radix;
                radix *= 10;
            }
        }
    
        return number;
    }
    

Il pourrait y avoir des erreurs dans le code puisque je le réécrivais pour stackoverflow, à l'origine j'utilise 2 threads, l'un lit des frames et un autre rend.

J'espère que cela aidera quelqu'un.

0
Rusty