Android c++屏幕实时录制

Home / C++ 百晓生 2019-5-29 3370

功能:沿用Android cmds下的screenrecord代码,整理成可以供native程序调用的一个可以获取实时screen h264码流的一个库,分享一下!!

头文件screenrecord.h

#include <stdint.h>
 
class IScreenRecordCallback
{
public:
	virtual ~IScreenRecordCallback(){}
	virtual void onData(void* pData, size_t size) = 0;
	//virtual void onCodecConfig(void* pData, size_t size) = 0;
};
 
class ScreenRecordImp;
class ScreenRecord
{
public:
	ScreenRecord();
	~ScreenRecord();
	int start(IScreenRecordCallback* callback);
	void stop();
private:
	ScreenRecordImp* m_pImp;
};

实现文件:screenrecord.cpp

#include "screenrecord.h"
#include <iostream>
#include <utils/Log.h>
#include <binder/IPCThreadState.h>
#include <utils/Errors.h>
#include <utils/Thread.h>
#include <utils/Timers.h>
#include <media/openmax/OMX_IVCommon.h>
#include <gui/Surface.h>
#include <gui/SurfaceComposerClient.h>
#include <gui/ISurfaceComposer.h>
#include <ui/DisplayInfo.h>
 
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaErrors.h>
#include <media/ICrypto.h>
#include <utils/Errors.h>
 
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include <stdio.h>
#include <fcntl.h>
#include <signal.h>
#include <getopt.h>
#include <sys/wait.h>
 
using namespace android;
 
using namespace std;
#define LOG_TAG "LibScreenRecord"
//#define LOG_NDEBUG 0
static const uint32_t kFallbackWidth = 1280;        // 720p
static const uint32_t kFallbackHeight = 720;
 
class ScreenRecordImp
{
public:
	ScreenRecordImp();
	int start(IScreenRecordCallback* callback);
	void stop();
private:
	bool isDeviceRotated(int orientation);
	status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
        sp<IGraphicBufferProducer>* pBufferProducer);
	status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
        const sp<IGraphicBufferProducer>& bufferProducer,
        sp<IBinder>* pDisplayHandle);
	status_t setDisplayProjection(const sp<IBinder>& dpy,
        const DisplayInfo& mainDpyInfo);
	status_t runEncoder(const sp<MediaCodec>& encoder, IScreenRecordCallback* callback, sp<IBinder>& mainDpy, sp<IBinder>& dpy, uint8_t orientation);
private:
	bool mRotate;                // rotate 90 degrees
	//bool mSizeSpecified;         // was size explicitly requested?
	uint32_t mVideoWidth;            // default width+height
	uint32_t mVideoHeight;
	uint32_t mBitRate;
	//uint32_t mTimeLimitSec;
	bool mStopRequested;
};
 
ScreenRecord::ScreenRecord()
{
	m_pImp = new ScreenRecordImp;
}
 
ScreenRecord::~ScreenRecord()
{
	stop();
	delete m_pImp;
	m_pImp = NULL;
}
 
 
int ScreenRecord::start(IScreenRecordCallback* callback)
{
	return m_pImp->start(callback);
}
void ScreenRecord::stop()
{
	m_pImp->stop();
}
	
 
ScreenRecordImp::ScreenRecordImp():mRotate(false),
							mVideoWidth(0),
							mVideoHeight(0),
							mBitRate(4000000),         // 4Mbp
							mStopRequested(true)
{
}
 
 
bool ScreenRecordImp::isDeviceRotated(int orientation) {
    return orientation != DISPLAY_ORIENTATION_0 &&
            orientation != DISPLAY_ORIENTATION_180;
}
 
status_t ScreenRecordImp::setDisplayProjection(const sp<IBinder>& dpy,
        const DisplayInfo& mainDpyInfo) {
    status_t err;
 
    // Set the region of the layer stack we're interested in, which in our
    // case is "all of it".  If the app is rotated (so that the width of the
    // app is based on the height of the display), reverse width/height.
    bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
    uint32_t sourceWidth, sourceHeight;
    if (!deviceRotated) {
        sourceWidth = mainDpyInfo.w;
        sourceHeight = mainDpyInfo.h;
    } else {
        ALOGV("using rotated width/height");
        sourceHeight = mainDpyInfo.w;
        sourceWidth = mainDpyInfo.h;
    }
    Rect layerStackRect(sourceWidth, sourceHeight);
 
    // We need to preserve the aspect ratio of the display.
    float displayAspect = (float) sourceHeight / (float) sourceWidth;
 
 
    // Set the way we map the output onto the display surface (which will
    // be e.g. 1280x720 for a 720p video).  The rect is interpreted
    // post-rotation, so if the display is rotated 90 degrees we need to
    // "pre-rotate" it by flipping width/height, so that the orientation
    // adjustment changes it back.
    //
    // We might want to encode a portrait display as landscape to use more
    // of the screen real estate.  (If players respect a 90-degree rotation
    // hint, we can essentially get a 720x1280 video instead of 1280x720.)
    // In that case, we swap the configured video width/height and then
    // supply a rotation value to the display projection.
    uint32_t videoWidth, videoHeight;
    uint32_t outWidth, outHeight;
    if (!mRotate) {
        videoWidth = mVideoWidth;
        videoHeight = mVideoHeight;
    } else {
        videoWidth = mVideoHeight;
        videoHeight = mVideoWidth;
    }
    if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
        // limited by narrow width; reduce height
        outWidth = videoWidth;
        outHeight = (uint32_t)(videoWidth * displayAspect);
    } else {
        // limited by short height; restrict width
        outHeight = videoHeight;
        outWidth = (uint32_t)(videoHeight / displayAspect);
    }
    uint32_t offX, offY;
    offX = (videoWidth - outWidth) / 2;
    offY = (videoHeight - outHeight) / 2;
    Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
 
    /*if (gVerbose) {
        if (mRotate) {
            printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
                    outHeight, outWidth, offY, offX);
        } else {
            printf("Content area is %ux%u at offset x=%d y=%d\n",
                    outWidth, outHeight, offX, offY);
        }
    }*/
 
    SurfaceComposerClient::setDisplayProjection(dpy,
            mRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
            layerStackRect, displayRect);
    return NO_ERROR;
}
 
status_t ScreenRecordImp::prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
        sp<IGraphicBufferProducer>* pBufferProducer)
{        
    cout<<"ScreenRecordImp prepareEncoder enter"<<endl;
    status_t err;
    sp<AMessage> format = new AMessage;
    format->setInt32("width", mVideoWidth);
    format->setInt32("height", mVideoHeight);
    format->setString("mime", "video/avc");
    format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
    format->setInt32("bitrate", mBitRate);
    format->setFloat("frame-rate", displayFps);
    format->setInt32("i-frame-interval", 10);
    sp<ALooper> looper = new ALooper;
    looper->setName("libscreenrecord_looper");
    looper->start();
    ALOGV("Creating codec");
	cout<<"ScreenRecordImp prepareEncoder Creating codec"<<endl;
    sp<MediaCodec> codec = MediaCodec::CreateByType(looper, "video/avc", true);
    if (codec == NULL) {
        fprintf(stderr, "ERROR: unable to create video/avc codec instance\n");
        return UNKNOWN_ERROR;
    }
    err = codec->configure(format, NULL, NULL,
            MediaCodec::CONFIGURE_FLAG_ENCODE);
    if (err != NO_ERROR) {
        codec->release();
        codec.clear();
        fprintf(stderr, "ERROR: unable to configure codec (err=%d)\n", err);
        return err;
    }
    ALOGV("Creating buffer producer");
	cout<<"ScreenRecordImp prepareEncoder Creating buffer producer"<<endl;
    sp<IGraphicBufferProducer> bufferProducer;
    err = codec->createInputSurface(&bufferProducer);
    if (err != NO_ERROR) {
        codec->release();
        codec.clear();
        fprintf(stderr,
            "ERROR: unable to create encoder input surface (err=%d)\n", err);
        return err;
    }
    ALOGV("Starting codec");
	cout<<"ScreenRecordImp prepareEncoder Starting codec"<<endl;
    err = codec->start();
    if (err != NO_ERROR) {
        codec->release();
        codec.clear();
        fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
        return err;
    }
    ALOGV("Codec prepared");
	cout<<"ScreenRecordImp prepareEncoder Codec prepared"<<endl;
    *pCodec = codec;
    *pBufferProducer = bufferProducer;
    return 0;
}
 
status_t ScreenRecordImp::prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
        const sp<IGraphicBufferProducer>& bufferProducer,
        sp<IBinder>* pDisplayHandle) {
    cout<<"ScreenRecordImp prepareVirtualDisplay enter"<<endl;
	
    status_t err;
    // Set the region of the layer stack we're interested in, which in our
    // case is "all of it".  If the app is rotated (so that the width of the
    // app is based on the height of the display), reverse width/height.
    bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
    uint32_t sourceWidth, sourceHeight;
    if (!deviceRotated) {
        sourceWidth = mainDpyInfo.w;
        sourceHeight = mainDpyInfo.h;
    } else {
        ALOGV("using rotated width/height");
        sourceHeight = mainDpyInfo.w;
        sourceWidth = mainDpyInfo.h;
    }
    Rect layerStackRect(sourceWidth, sourceHeight);
    // We need to preserve the aspect ratio of the display.
    float displayAspect = (float) sourceHeight / (float) sourceWidth;
    // Set the way we map the output onto the display surface (which will
    // be e.g. 1280x720 for a 720p video).  The rect is interpreted
    // post-rotation, so if the display is rotated 90 degrees we need to
    // "pre-rotate" it by flipping width/height, so that the orientation
    // adjustment changes it back.
    //
    // We might want to encode a portrait display as landscape to use more
    // of the screen real estate.  (If players respect a 90-degree rotation
    // hint, we can essentially get a 720x1280 video instead of 1280x720.)
    // In that case, we swap the configured video width/height and then
    // supply a rotation value to the display projection.
    uint32_t videoWidth, videoHeight;
    uint32_t outWidth, outHeight;
    if (!mRotate) {
        videoWidth = mVideoWidth;
        videoHeight = mVideoHeight;
    } else {
        videoWidth = mVideoHeight;
        videoHeight = mVideoWidth;
    }
    if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
        // limited by narrow width; reduce height
        outWidth = videoWidth;
        outHeight = (uint32_t)(videoWidth * displayAspect);
    } else {
        // limited by short height; restrict width
        outHeight = videoHeight;
        outWidth = (uint32_t)(videoHeight / displayAspect);
    }
    uint32_t offX, offY;
    offX = (videoWidth - outWidth) / 2;
    offY = (videoHeight - outHeight) / 2;
    Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
 
	cout<<"ScreenRecordImp prepareVirtualDisplay offX:"<<offX<<" offY:"<<offY<<" outWidth:"<<outWidth<<" outHeight"<<outHeight<<endl;	 
    sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
            String8("LibScreenRecorder"), false /* secure */);
    SurfaceComposerClient::openGlobalTransaction();
    SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer);
    SurfaceComposerClient::setDisplayProjection(dpy,
            mRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
            layerStackRect, displayRect);
    SurfaceComposerClient::setDisplayLayerStack(dpy, 0);    // default stack
    SurfaceComposerClient::closeGlobalTransaction();
    *pDisplayHandle = dpy;
    return NO_ERROR;
}
 
 
status_t ScreenRecordImp::runEncoder(const sp<MediaCodec>& encoder, IScreenRecordCallback* callback, sp<IBinder>& mainDpy, sp<IBinder>& virtualDpy, 
	uint8_t orientation)
{
	cout<<"ScreenRecordImp runEncoder enter"<<endl;
    static int kTimeout = 250000;   // be responsive on signal
    status_t err;
    ssize_t trackIdx = -1;
    uint32_t debugNumFrames = 0;
    int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
    //int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(180);
    Vector<sp<ABuffer> > buffers;
    err = encoder->getOutputBuffers(&buffers);
    if (err != NO_ERROR) {
        fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
        return err;
    }
	DisplayInfo mainDpyInfo;
	cout<<"ScreenRecordImp runEncoder start looping"<<endl;
    // This is set by the signal handler.
    mStopRequested = false;
    // Run until we're signaled.
    while (!mStopRequested) {
        size_t bufIndex, offset, size;
        int64_t ptsUsec;
        uint32_t flags;
        /*if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
            break;
        }*/
        ALOGV("Calling dequeueOutputBuffer");
        err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
                &flags, kTimeout);
        ALOGV("dequeueOutputBuffer returned %d", err);
		cout<<"ScreenRecordImp runEncoder dequeueOutputBuffer returned:"<<err<<endl;
        switch (err) {
        case NO_ERROR:
            // got a buffer
            if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0)
			{
                // ignore this -- we passed the CSD into MediaMuxer when
                // we got the format change notification
                
                ALOGV("Got codec config buffer (%u bytes); ignoring", size);
				cout<<"ScreenRecordImp runEncoder Got codec config buffer bytes:"<<size<<endl;
                //size = 0;
            }
            if (size != 0)
			{
                ALOGV("Got data in buffer %d, size=%d, pts=%lld",
                        bufIndex, size, ptsUsec);
                //CHECK(trackIdx != -1);
                // If the virtual display isn't providing us with timestamps,
                // use the current time.
                if (ptsUsec == 0) {
                    ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
                }
                // The MediaMuxer docs are unclear, but it appears that we
                // need to pass either the full set of BufferInfo flags, or
                // (flags & BUFFER_FLAG_SYNCFRAME).
                /*err = muxer->writeSampleData(buffers[bufIndex], trackIdx,
                        ptsUsec, flags);
                if (err != NO_ERROR) {
                    fprintf(stderr, "Failed writing data to muxer (err=%d)\n",
                            err);
                    return err;
                }*/
                { // scope
                    //ATRACE_NAME("orientation");
                    // Check orientation, update if it has changed.
                    //
                    // Polling for changes is inefficient and wrong, but the
                    // useful stuff is hard to get at without a Dalvik VM.
                    err = SurfaceComposerClient::getDisplayInfo(mainDpy,
                            &mainDpyInfo);
                    if (err != NO_ERROR) {
                        ALOGW("getDisplayInfo(main) failed: %d", err);
                    } else if (orientation != mainDpyInfo.orientation) {
                        ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
                        SurfaceComposerClient::openGlobalTransaction();
                        setDisplayProjection(virtualDpy, mainDpyInfo);
                        SurfaceComposerClient::closeGlobalTransaction();
                        orientation = mainDpyInfo.orientation;
                    }
                }
				
                debugNumFrames++;
				ALOGV("Got codec NumFrames:%d", debugNumFrames);
				cout<<"ScreenRecordImp runEncoder Got codec NumFrames:"<<debugNumFrames<<endl;
				callback->onData(buffers[bufIndex]->data(), size);
				//if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)
				//{
                    //fflush(rawFp);
                    //callback->FFlush();
                //}
				
            }
			cout<<"ScreenRecordImp runEncoder releaseOutputBuffer bufIndex:"<<bufIndex<<endl;
            err = encoder->releaseOutputBuffer(bufIndex);
            if (err != NO_ERROR) {
                fprintf(stderr, "Unable to release output buffer (err=%d)\n",
                        err);
                return err;
            }
			cout<<"ScreenRecordImp runEncoder releaseOutputBuffer done"<<endl;
            if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
                // Not expecting EOS from SurfaceFlinger.  Go with it.
                ALOGD("Received end-of-stream");
				cout<<"ScreenRecordImp runEncoder Received end-of-stream"<<endl;
                mStopRequested = true;
            }
            break;
        case -EAGAIN:                       // INFO_TRY_AGAIN_LATER
            ALOGV("Got -EAGAIN, looping");
			cout<<"ScreenRecordImp runEncoder Got -EAGAIN, looping"<<endl;
            break;
        case INFO_FORMAT_CHANGED:           // INFO_OUTPUT_FORMAT_CHANGED -1012
            {
                // format includes CSD, which we must provide to muxer
                //sp<AMessage> newFormat;
                //encoder->getOutputFormat(&newFormat);
                //callback->formatChanged(newFormat);
                ALOGV("Encoder format changed");
            }
            break;
        case INFO_OUTPUT_BUFFERS_CHANGED:   // INFO_OUTPUT_BUFFERS_CHANGED -1014
            // not expected for an encoder; handle it anyway
            ALOGV("Encoder buffers changed");
			cout<<"ScreenRecordImp runEncoder Encoder buffers changed"<<endl;
            err = encoder->getOutputBuffers(&buffers);
            if (err != NO_ERROR) {
                fprintf(stderr,
                        "Unable to get new output buffers (err=%d)\n", err);
                return err;
            }
            break;
        case INVALID_OPERATION:
            fprintf(stderr, "Request for encoder buffer failed\n");
            return err;
        default:
            fprintf(stderr,
                    "Got weird result %d from dequeueOutputBuffer\n", err);
            return err;
        }
    }
    ALOGV("Encoder stopping (req=%d)", mStopRequested);
	cout<<"ScreenRecordImp runEncoder Encoder stopping mStopRequested:"<<mStopRequested<<endl;
    return NO_ERROR;
}
 
 
int ScreenRecordImp::start(IScreenRecordCallback* callback)
{
	cout<<"ScreenRecordImp start enter"<<endl;
	status_t err;
	// Start Binder thread pool.  MediaCodec needs to be able to receive
    // messages from mediaserver.
	sp<ProcessState> self = ProcessState::self();
    self->startThreadPool();
 
	cout<<"ScreenRecordImp startThreadPool"<<endl;
	
	// Get main display parameters.
    sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay(
            ISurfaceComposer::eDisplayIdMain);
    DisplayInfo mainDpyInfo;
    err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
    if (err != NO_ERROR) {
        fprintf(stderr, "ERROR: unable to get display characteristics\n");
        return err;
    }
	
	bool rotated = isDeviceRotated(mainDpyInfo.orientation);
    if (mVideoWidth == 0) {
        mVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w;
    }
    if (mVideoHeight == 0) {
        mVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h;
    }
 
	cout<<"ScreenRecordImp mVideoWidth:"<<mVideoWidth<<" mVideoHeight:"<<mVideoHeight<<endl;
	
	// Configure and start the encoder.
    sp<MediaCodec> encoder;
    sp<IGraphicBufferProducer> bufferProducer;
	err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
 
	if (err != NO_ERROR) {
        // fallback is defined for landscape; swap if we're in portrait
        bool needSwap = mVideoWidth < mVideoHeight;
        uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
        uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
        if (mVideoWidth != newWidth && mVideoHeight != newHeight) {
            ALOGV("Retrying with 720p");
            fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
                    mVideoWidth, mVideoHeight, newWidth, newHeight);
            mVideoWidth = newWidth;
            mVideoHeight = newHeight;
            err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
        }
    }
 
	if (err != NO_ERROR) {
        return err;
    }
 
	sp<IBinder> dpy;
    err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
    if (err != NO_ERROR) {
        encoder->release();
        encoder.clear();
        return err;
    }
 
	 // Main encoder loop.
	
	 
    err = runEncoder(encoder,callback, mainDpy, dpy, mainDpyInfo.orientation);
    if (err != NO_ERROR) {
        encoder->release();
        encoder.clear();
        return err;
    }
 
	bufferProducer = NULL;
    SurfaceComposerClient::destroyDisplay(dpy);
    encoder->stop();
    //muxer->stop();
    encoder->release();
	
	return 0;
}
 
void ScreenRecordImp::stop()
{
	mStopRequested = true;
}

用法

class ScreenRecordCallback:public IScreenRecordCallback
{
public:
	virtual ~ScreenRecordCallback(){}
	virtual void onData(void* pData, size_t size)
	{
 
	}
	
};
 
 
int main(int arg, char** arv)
{
	ScreenRecordCallback callback;
	ScreenRecord recorder
	recorder.start(&callback);
	return 0;
}

抓到屏幕码流后ScreenRecordCallback的onData会被回调,收到的数据存下来就是h264码流

mikefile

LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
	screenrecord.cpp \
	
LOCAL_SHARED_LIBRARIES := \
	libstagefright libmedia libutils libbinder libstagefright_foundation \
	libjpeg libgui libcutils liblog libEGL libGLESv2
	
LOCAL_C_INCLUDES := \
	frameworks/av/media/libstagefright \
	frameworks/av/media/libstagefright/include \
	$(TOP)/frameworks/native/include/media/openmax \
	external/jpeg
	
LOCAL_CFLAGS += -Wno-multichar
LOCAL_MODULE_TAGS := optional
LOCAL_MODULE:= libscreenrecord
include $(BUILD_SHARED_LIBRARY)

后续继续实现Android推送码流到PC机,PC机能够解码显示。

原文:https://blog.csdn.net/star_ni/article/details/54948723 

本文链接:https://www.it72.com/12548.htm

推荐阅读
最新回复 (0)
返回