首页 诗词 字典 板报 句子 名言 友答 励志 学校 网站地图
当前位置: 首页 > 教程频道 > 移动开发 > Android >

Android使用sdl1.3播放视频的有关问题

2013-10-10 
Android使用sdl1.3播放视频的问题。无论我怎么调视频的画面都只出现在屏幕的右边。是SDL_Surface的问题还是S

Android使用sdl1.3播放视频的问题。
无论我怎么调视频的画面都只出现在屏幕的右边。是SDL_Surface的问题还是SDL_Overlay的问题亦或是其他的呢?求指导。jni代码:

int SDL_Play(JNIEnv *env, jclass cls)
{
LOGI("ready to play using sdl");
AVFormatContext *pFormatCtx;
#define SDL_AUDIO_BUFFER_SIZE 1024
static int sws_flags = SWS_BICUBIC;
    int i;
    int videoStream = -1;
    AVCodecContext *pCodecCtx;
    AVCodec *pCodec;
    AVFrame *pFrame;
    AVPacket packet;
    int frameFinished;
    float aspect_ratio;
    AVCodecContext *aCodecCtx;
    SDL_Overlay *bmp;
    SDL_Surface *m_screen;
    SDL_Rect rect;
    SDL_Event event;


    av_register_all();
    if(av_open_input_file(&pFormatCtx, "/mnt/sdcard/test.avi", NULL, 0, NULL)!=0)
        return -1; // Couldn't open file
    LOGI("open successful");
    if(av_find_stream_info(pFormatCtx)<0)
        return -1; // Couldn't find stream information
    // Dump information about file onto standard error
    dump_format(pFormatCtx, 0, "/mnt/sdcard/test.avi", 0);

    // Find the first video stream
    for(i=0; i<pFormatCtx->nb_streams; i++)
    {
        if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO && videoStream<0)
        {
            videoStream=i;
        }
    }
    if(videoStream==-1)
      return -1; // Didn't find a video stream

    // Get a pointer to the codec context for the video stream

    pCodecCtx=pFormatCtx->streams[videoStream]->codec;
    pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
    LOGI("find decoder successful");
    if(pCodec==NULL)
    {
        fprintf(stderr, "Unsupported codec!/n");
        LOGI("Unsupported codec!/n");
        return -1; // Codec not found
    }
    // Open codec
    if(avcodec_open(pCodecCtx, pCodec)<0)
        return -1; // Could not open codec
    LOGI("open decoder successful");
    // Allocate video frame
    pFrame=avcodec_alloc_frame();

    uint8_t *buffer;
    int numBytes;
    // Determine required buffer size and allocate buffer
    numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width,
        pCodecCtx->height);
    buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
    //一定要有,否则会fatal signal。 m_screen无法分配地址
    SDL_Android_Init(env, cls);

    if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))
    {
        fprintf(stderr, "Could not initialize SDL - %s/n", SDL_GetError());
        LOGI("Could not initialize SDL - %s/n");
        exit(1);
    }
    LOGI("SDL init successful");

    LOGI("pCodecCtx addr:0x%x",pCodecCtx);
    m_screen = SDL_SetVideoMode(/*pCodecCtx->width*/320, /*pCodecCtx->height*/480, 24, SDL_HWSURFACE | SDL_DOUBLEBUF);
    LOGI("m_screen addr:0x%x",m_screen);

    if(!m_screen)
    {
        fprintf(stderr, "SDL: could not set video mode - exiting/n");
        LOGI("SDL: could not set video mode - exiting/n");
        exit(1);
    }
    LOGI("set video mode successful");


    bmp = SDL_CreateYUVOverlay(/*pCodecCtx->width*/320, /*pCodecCtx->height*/480,
        SDL_YV12_OVERLAY, m_screen);
    //第二个
    //SDL_Overlay * overlay2 = SDL_CreateYUVOverlay(/*pCodecCtx->width*/320, /*pCodecCtx->height*/480,
    //    SDL_YV12_OVERLAY, m_screen);
    LOGI("Create YUV Overlay successful");
    static struct SwsContext *img_convert_ctx;
    if (img_convert_ctx == NULL)
    {
        img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
                                         pCodecCtx->pix_fmt,
                                         /*pCodecCtx->width*/320, /*pCodecCtx->height*/480,
                                         PIX_FMT_YUV420P,
                                         sws_flags, NULL, NULL, NULL);
        if (img_convert_ctx == NULL)
        {
            fprintf(stderr, "Cannot initialize the conversion context/n");
            LOGI("Cannot initialize the conversion context/n");
            exit(1);
        }
    }
    i=0;
    while(av_read_frame(pFormatCtx, &packet)>=0)
    {
        // Is this a packet from the video stream?
        if(packet.stream_index==videoStream)
        {
            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
            // Did we get a video frame?
            if(frameFinished)
            {
                // Convert the image from its native format to RGB
                /*sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize,
                      0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);*/
                // Save the frame to disk
                /*if(++i<=5)
                    SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height, i);*/
                SDL_LockYUVOverlay(bmp);
                //SDL_LockYUVOverlay(overlay2);
                AVPicture pict;
                //AVPicture pic2;
                pict.data[0] = bmp->pixels[0];
                pict.data[1] = bmp->pixels[2];
                pict.data[2] = bmp->pixels[1];

                pict.linesize[0] = bmp->pitches[0];
                pict.linesize[1] = bmp->pitches[2];


                pict.linesize[2] = bmp->pitches[1];
                ///
                //pic2.data[0] = overlay2->pixels[0];
//pic2.data[1] = overlay2->pixels[2];
//pic2.data[2] = overlay2->pixels[1];

//pic2.linesize[0] = overlay2->pitches[0];
//pic2.linesize[1] = overlay2->pitches[2];
//pic2.linesize[2] = overlay2->pitches[1];
                // Convert the image into YUV format that SDL uses
                /*img_convert(&pict, PIX_FMT_YUV420P,
                    (AVPicture *)pFrame, pCodecCtx->pix_fmt,
                    pCodecCtx->width, pCodecCtx->height);*/
                sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize,
                    0, pCodecCtx->height, pict.data, pict.linesize);
                //第二个
                // sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize,
                // 0, pCodecCtx->height, pic2.data, pic2.linesize);
                //SDL_UnlockYUVOverlay(overlay2);
                SDL_UnlockYUVOverlay(bmp);
                //第一个
                rect.x = 0;
                rect.y = 100;
                rect.w = 320;
                rect.h = 320;
                //rect.w = 100;
                //rect.h = 100;
                //第二个
                //SDL_Rect R;
                //R.x = 120;
                //R.y = 120;
                //rect.w = pCodecCtx->width;
                //rect.h = pCodecCtx->height;
                //R.w = 100;
                //R.h = 100;
                SDL_DisplayYUVOverlay(bmp, &rect);
                //SDL_DisplayYUVOverlay(overlay2, &R);
                //Sleep(60);
            }
        }

        // Free the packet that was allocated by av_read_frame
        av_free_packet(&packet);

        SDL_PollEvent(&event);
        switch(event.type)
        {
        case SDL_QUIT:
            SDL_Quit();
            exit(0);
            break;
        default: break;
        }
    };
    // Free the RGB image
    av_free(buffer);


    //av_free(pFrameRGB);
    // Free the YUV frame
    av_free(pFrame);
    // Close the codec
    avcodec_close(pCodecCtx);
    // Close the video file
    av_close_input_file(pFormatCtx);
    return 0;

}

Android sdl
[解决办法]
你这个布局文件是怎么布局的?我的已经解决了。

热点排行