I'd like to convert NSImage/UIImage to AVFrame(ffmpeg). I found a example code. http://lists.mplayerhq.hu/pipermail/libav-user/2010-April/004550.html but this code doesn't work.
I tried another approach.
AVFrame *frame = avcodec_alloc_frame();
int numBytes = avpicture_get_size(PIX_FMT_YUV420P, outputWidth, outputHeight);
uint8_t *buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
avpicture_fill((AVPicture *)frame, buffer, PIX_FMT_YUV420P, outputWidth, outputHeight);
//UIImage *image = … smothing … ;
NSImage *image = … smothing … ;
//CGImageRef newCgImage = image.CGImage;
CGImageRef newCgImage = [image CGImageForProposedRect:nil context:nil hints:nil];
//NSBitmapImageRep* bm = [NSBitmapImageRep imageRepWithData:[image TIFFRepresentation]];
//CGImageRef newCgImage = [bm CGImage];
size_t w = CGImageGetWidth(newCgImage);
size_t h = CGImageGetHeight(cgImage);
CGDataProviderRef dataProvider = CGImageGetDataProvider(newCgImage);
CFDataRef bitmapData = CGDataProviderCopyData(dataProvider);
uint8_t *buffer = (uint8_t *)CFDataGetBytePtr(bitmapData);
frame->linesize[0] = w;
int y, x;
for (y = 0; y < h; y++) {
for (x = 0; x < w; x++) {
int z = y * w + x;
frame->data[0][z] = buffer[z];
}
}
but this AVFrame give me green picture.
Please let me know how can i get it.
Thanks.
following is additional.
I tried again with paying attention color format. I found example to conver RGB to YUM. How to perform RGB->YUV conversion in C/C++?
new code is like this.but,still doesn't work…
#import <Foundation/Foundation.h>
#import <AppKit/AppKit.h>
#import <libavutil/avstring.h>
#import <libavcodec/avcodec.h>
#import <libavformat/avformat.h>
#import <libswscale/swscale.h>
int main(int argc, char *argv[]) {
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
int i;
int outputWidth = 480; //size must be same size of arg
int outputHeight = 360; //size must be same size of arg
av_register_all();
AVOutputFormat *format = av_guess_format("mp4", NULL, NULL);
if(!format) return -1;
AVFormatContext *outFormatCtx = avformat_alloc_context();
if(!outFormatCtx) return -1;
outFormatCtx->oformat = format;
av_strlcpy(outFormatCtx->filename, "test.mov", sizeof(outFormatCtx->filename));
AVStream *vstream = av_new_stream(outFormatCtx, 0);
if(!vstream) return -1;
enum CodecID codec_id = av_guess_codec(outFormatCtx->oformat,
NULL,
outFormatCtx->filename,
NULL, CODEC_TYPE_VIDEO);
AVCodec *ovCodec = avcodec_find_encoder(codec_id);
if(!ovCodec) return -1;
AVCodecContext *ovCodecCtx = vstream->codec;
ovCodecCtx->codec_id = ovCodec->id;
ovCodecCtx->codec_type = CODEC_TYPE_VIDEO;
ovCodecCtx->width = outputWidth;
ovCodecCtx->height = outputHeight;
ovCodecCtx->pix_fmt = PIX_FMT_NONE;
if(ovCodec && ovCodec->pix_fmts){
const enum PixelFormat *p = ovCodec->pix_fmts;
while(*p++ != -1){
if(*p == ovCodecCtx->pix_fmt) break;
}
if(*p == -1) ovCodecCtx->pix_fmt = ovCodec->pix_fmts[0];
}
ovCodecCtx->time_base.num = 1;
ovCodecCtx->time_base.den = 30;
if(format->flags & AVFMT_GLOBALHEADER)
ovCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
if(avcodec_open(ovCodecCtx, ovCodec) != 0) return -1;
if (! ( format->flags & AVFMT_NOFILE )) {
if(url_fopen(&outFormatCtx->pb, outFormatCtx->filename, URL_WRONLY) < 0) return NO;
}
av_write_header(outFormatCtx);
int buf_size = ovCodecCtx->width * ovCodecCtx->height * 4;
uint8_t *buf = av_malloc(buf_size);
AVFrame *buffer_frame = avcodec_alloc_frame();
if(!buffer_frame) return -1;
AVFrame *frame = avcodec_alloc_frame();
if(!frame) return -1;
int numBytes = avpicture_get_size(PIX_FMT_YUV420P, outputWidth, outputHeight);
uint8_t *buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
avpicture_fill((AVPicture *)frame, buffer, PIX_FMT_YUV420P, outputWidth, outputHeight);
for(i=1;i<argc;i++){
NSAutoreleasePool *innerPool = [[NSAutoreleasePool alloc] init];
NSImage *image = [[NSImage alloc] initWithContentsOfFile:[NSString stringWithCString: argv[i] encoding: NSUTF8StringEncoding]];
CGImageRef imageRef = [image CGImageForProposedRect:nil context:nil hints:nil];
size_t w = CGImageGetWidth(imageRef);
size_t h = CGImageGetHeight(imageRef);
size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);
CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef);
CFDataRef bitmapData = CGDataProviderCopyData(dataProvider);
uint8_t *buff = (uint8_t *)CFDataGetBytePtr(bitmapData);
uint8_t R,G,B,Y,U,V;
int x,y;
for(y=0;y<h;y++){
for(x=0;x<w;x++){
uint8_t *tmp = buff + y * bytesPerRow + x * 4;
R = *(tmp + 3);
G = *(tmp + 2);
B = *(tmp + 1);
Y = (0.257 * R) + (0.504 * G) + (0.098 * B) + 16;
U = -(0.148 * R) - (0.291 * G) + (0.439 * B) + 128;
V = (0.439 * R) - (0.368 * G) - (0.071 * B) + 128;
//printf("y:%d x:%d R:%d,G:%d,B:%d Y:%d,U:%d,V:%d \n",y,x,R,G,B,Y,U,V);
frame->data[0][y*frame->linesize[0]+x]= Y;
//frame->data[1][y*frame->linesize[1]+x]= U; //if coment out "Bus error"
//frame->data[2][y*frame->linesize[2]+x]= V; //if coment out "Bus error"
}
}
CGImageRelease(imageRef);
CFRelease(bitmapData);
int out_size = avcodec_encode_video (ovCodecCtx, buf, buf_size, frame);
AVPacket outPacket;
av_init_packet(&outPacket);
outPacket.stream_index= vstream->index;
outPacket.data= buf;
outPacket.size= out_size;
//outPacket.pts = ?;
//outPacket.dts = ?;
if(ovCodecCtx->coded_frame->key_frame)
outPacket.flags |= PKT_FLAG_KEY;
if(av_interleaved_write_frame(outFormatCtx, &outPacket) != 0) return -1;
[image release];
[innerPool release];
}
av_write_trailer(outFormatCtx);
if (! ( format->flags & AVFMT_NOFILE ))
if(url_fclose(outFormatCtx->pb) < 0) return -1;
avcodec_close(vstream->codec);
for(i = 0; i < outFormatCtx->nb_streams; i++) {
av_freep(&outFormatCtx->streams[i]->codec);
av_freep(&outFormatCtx->streams[i]);
}
av_freep(&outFormatCtx);
av_free(buffer);
av_free(frame);
av_free(buffer_frame);
[pool release];
return 0;
}
and mekefile is like this.
CC = /usr/bin/gcc
CFLAGS = -O4 开发者_运维百科-Wall -I/usr/local/include
LDFLAGS =
LDLIBS = -L/usr/local/bin -lavutil -lavformat -lavcodec -lswscale
FRAMEWORK = -framework Foundation -framework AppKit #-framework CoreGraphics
OBJS = test.o
test: $(OBJS)
$(CC) -o $@ $(LDFLAGS) $(OBJS) $(LDLIBS) $(FRAMEWORK) -lz -lbz2 -arch x86_64
Please somebody help me.
There is a colorspace mismatch between the data of the CGImage and the destination AVFrame. In order to fix that, you need to convert the CGImage data (probably in ARGB) into the YUV420 format (FFMpeg has built-in format converter). You can get information on the colorspace of a CGImage with the CGImageGetBitsPerComponent
, CGImageGetBitsPerPixel
and CGImageGetBytesPerRow
functions.
精彩评论