// dllmain.cpp : 定义 DLL 应用程序的入口点。
#include <windows.h>
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
)
{
switch (ul_reason_for_call)
{
case DLL_PROCESS_ATTACH:
case DLL_THREAD_ATTACH:
case DLL_THREAD_DETACH:
case DLL_PROCESS_DETACH:
break;
}
return TRUE;
}
MYLIBDLL void* pre_save(char* filepath, int width, int height, int frame_rate);
MYLIBDLL int save_frame(int* plus1, int len, void* vp);
void* pre_save(char* filepath, int width, int height, int frame_rate) {
av_log_set_level(AV_LOG_ERROR);
Rtmp_tool *rtmp_tool;
rtmp_tool = new Rtmp_tool();
int nLen;
int fileI;
rtmp_tool->nWidth = width;
rtmp_tool->nHeight = height;
av_register_all();
avcodec_register_all();
AVCodecContext *c = NULL;
AVCodecContext *in_c = NULL;
AVCodec *pCodecH264; //编码器
pCodecH264 = avcodec_find_encoder(AV_CODEC_ID_H264);//查找h264编码器
c = avcodec_alloc_context3(pCodecH264);
if (!c)
{
printf("Could not allocate video codec context\n");
exit(1);
}
c->bit_rate = 1024 * 1024;// put sample parameters
c->width = width;
c->height = height;
c->time_base.den = frame_rate;//视频每秒帧数
c->time_base.num = 1;
c->framerate.den = 1;//视频帧速
c->framerate.num = frame_rate;
c->gop_size = 20; // emit one intra frame every ten frames
c->max_b_frames = 1;
c->thread_count = 1;
c->pix_fmt = AV_PIX_FMT_YUV420P;//PIX_FMT_RGB24;
//打开编码器
if (avcodec_open2(c, pCodecH264, NULL)<0)
printf("不能打开编码库");
int size = c->width * c->height;
rtmp_tool->yuv_buff = (uint8_t *)malloc((size * 3) / 2); // size for YUV 420
rtmp_tool->f = fopen(filepath, "wb");
if (!rtmp_tool->f)
{
printf("could not open %s\n", filepath);
exit(1);
}
printf("context3 w h %d %d\n", c->width, c->height);
AVFrame *frame = av_frame_alloc();
if (!frame) {
fprintf(stderr, "Could not allocate video frame\n");
exit(1);
}
frame->format = c->pix_fmt;
frame->width = c->width;
frame->height = c->height;
int ret = av_frame_get_buffer(frame, 32);
if (ret < 0)
{
printf("Could not allocate the video frame data\n");
exit(1);
}
//初始化格式转换器SwsContext
rtmp_tool->scxt = sws_getContext(c->width, c->height, AV_PIX_FMT_BGR24, c->width, c->height, AV_PIX_FMT_YUV420P, SWS_POINT, NULL, NULL, NULL);
rtmp_tool->m_pYUVFrame = frame;
rtmp_tool->c = c;
return rtmp_tool;
}
int save_frame(int* plus1, int len, void* vp) {
Rtmp_tool *rtmp_tool = (Rtmp_tool *)vp;
av_log_set_level(AV_LOG_DEBUG);
for (int i = 0; i < len; i++) {
plus1[i] = (uint8_t)plus1[i];
}
AVCodecContext *c = rtmp_tool->c;// (AVCodecContext*)vp;
AVPacket *avpkt;
AVFrame *m_pRGBFrame = rtmp_tool->m_pRGBFrame;
AVFrame *m_pYUVFrame = rtmp_tool->m_pYUVFrame;
/*unsigned char *pBmpBuf;
pBmpBuf = new unsigned char[len];*/
//memcpy(rgb_buff, (uint8_t*)plus1, nDataLen);
//
av_image_fill_arrays(m_pRGBFrame->data, m_pRGBFrame->linesize, (uint8_t*)plus1, AV_PIX_FMT_RGB24, rtmp_tool->nWidth, rtmp_tool->nHeight, 1);
m_pRGBFrame->width = rtmp_tool->nWidth;
m_pRGBFrame->height = rtmp_tool->nHeight;
uint8_t *p = m_pRGBFrame->data[0];
int y = 0, x = 0;
for (y = 0; y < rtmp_tool->nHeight; y++) {
for (x = 0; x < rtmp_tool->nWidth; x++) {
*p++ = (uint8_t)plus1[(y*rtmp_tool->nWidth + x) * 3]; // R
*p++ = (uint8_t)plus1[(y*rtmp_tool->nWidth + x) * 3 + 1]; // G
*p++ = (uint8_t)plus1[(y*rtmp_tool->nWidth + x) * 3 + 2]; // B
}
}
//将YUV buffer 填充YUV Frame
//avpicture_fill((AVPicture*)m_pYUVFrame, (uint8_t*)rtmp_tool->yuv_buff, AV_PIX_FMT_YUV420P, rtmp_tool->nWidth, rtmp_tool->nHeight);
av_image_fill_arrays(m_pYUVFrame->data, m_pYUVFrame->linesize, (uint8_t*)rtmp_tool->yuv_buff, AV_PIX_FMT_YUV420P, c->width, c->height, 1);
// 翻转RGB图像
//m_pRGBFrame->data[0] += m_pRGBFrame->linesize[0] * (rtmp_tool->nHeight - 1);
//m_pRGBFrame->linesize[0] *= -1;
//m_pRGBFrame->data[1] += m_pRGBFrame->linesize[1] * (rtmp_tool->nHeight / 2 - 1);
//m_pRGBFrame->linesize[1] *= -1;
//m_pRGBFrame->data[2] += m_pRGBFrame->linesize[2] * (rtmp_tool->nHeight / 2 - 1);
//m_pRGBFrame->linesize[2] *= -1;
int ret = av_frame_make_writable(m_pYUVFrame);//检测帧对象是否可读
if (ret < 0) {
printf("av_frame_make_writable %d\n", ret);
exit(1);
}
sws_scale(rtmp_tool->scxt, (uint8_t * const *)m_pRGBFrame->data, m_pRGBFrame->linesize, 0, c->height, m_pYUVFrame->data, m_pYUVFrame->linesize);
//m_pYUVFrame->pts++;
//将RGB转化为YUV
//sws_scale(rtmp_tool->scxt, m_pRGBFrame->data, m_pRGBFrame->linesize, 0, c->height, m_pYUVFrame->data, m_pYUVFrame->linesize);
int got_packet_ptr = 0;
avpkt = av_packet_alloc();
/*if (m_pYUVFrame->pts % 10 == 0) {
m_pYUVFrame->key_frame = 1;
m_pYUVFrame->pict_type = AV_PICTURE_TYPE_I;
}
else {
m_pYUVFrame->key_frame = 0;
m_pYUVFrame->pict_type = AV_PICTURE_TYPE_P;
}*/
//int u_size = avcodec_encode_video2(c, &avpkt, m_pYUVFrame, &got_packet_ptr);
int send_ret = avcodec_send_frame(c, m_pYUVFrame);
m_pYUVFrame->pts++;
//printf("avcodec_send_frame %d\n", send_ret);
if (ret < 0)
{
printf("Error sending a frame for encoding\n");
exit(1);
}
while (ret >= 0)
{
ret = avcodec_receive_packet(c, avpkt);
if (ret != 0) {
if (ret == -11 || ret == AVERROR(EAGAIN)) {//-11
//printf("avcodec_receive_packet AVERROR(EAGAIN)\n");
}
else if (ret == AVERROR(EINVAL)) {//-22
printf("avcodec_receive_packet AVERROR(EINVAL)\n");
}
else {
printf("avcodec_receive_packet error %d\n", ret);
}
return ret;
}
printf("Write packet %3(size=%5d)\n", avpkt->pts, avpkt->size);
fwrite(avpkt->data, 1, avpkt->size, rtmp_tool->f);
av_packet_unref(avpkt);
}
return 0;
}