提交 dc7c79ac 编写于 作者: L libb

rgb写文件

Change-Id: I354729e0f5a60e35524f9bad759cbe1d8b0ee418
上级 5e2fbb79
......@@ -31,6 +31,7 @@
841255E516B14E45001749D9 /* RTSPClientConnection.mm in Sources */ = {isa = PBXBuildFile; fileRef = 841255E416B14E45001749D9 /* RTSPClientConnection.mm */; };
841399FA16B1842B00FAD610 /* RTSPMessage.m in Sources */ = {isa = PBXBuildFile; fileRef = 841399F916B1842B00FAD610 /* RTSPMessage.m */; };
846119C716D3BF8D00468D98 /* CameraServer.m in Sources */ = {isa = PBXBuildFile; fileRef = 846119C616D3BF8D00468D98 /* CameraServer.m */; };
F381C2FA24454F1300B72E4E /* CoreImage.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = F381C2F924454F1300B72E4E /* CoreImage.framework */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
......@@ -71,6 +72,7 @@
841399F916B1842B00FAD610 /* RTSPMessage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RTSPMessage.m; sourceTree = "<group>"; };
846119C516D3BF8D00468D98 /* CameraServer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraServer.h; sourceTree = "<group>"; };
846119C616D3BF8D00468D98 /* CameraServer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraServer.m; sourceTree = "<group>"; };
F381C2F924454F1300B72E4E /* CoreImage.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreImage.framework; path = System/Library/Frameworks/CoreImage.framework; sourceTree = SDKROOT; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
......@@ -78,6 +80,7 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
F381C2FA24454F1300B72E4E /* CoreImage.framework in Frameworks */,
04A9F7A822FADB6900B593DA /* OpenGLES.framework in Frameworks */,
841255CB16A09114001749D9 /* CoreVideo.framework in Frameworks */,
841255C916A035F9001749D9 /* CoreMedia.framework in Frameworks */,
......@@ -114,6 +117,7 @@
8412559F16A035E3001749D9 /* Frameworks */ = {
isa = PBXGroup;
children = (
F381C2F924454F1300B72E4E /* CoreImage.framework */,
04A9F7A722FADB6900B593DA /* OpenGLES.framework */,
841255A016A035E3001749D9 /* UIKit.framework */,
841255A216A035E3001749D9 /* Foundation.framework */,
......@@ -213,7 +217,7 @@
ORGANIZATIONNAME = "Geraint Davies";
TargetAttributes = {
8412559B16A035E3001749D9 = {
DevelopmentTeam = 6TZ958XNMU;
DevelopmentTeam = 7448PDV238;
};
};
};
......@@ -222,6 +226,7 @@
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
English,
en,
);
mainGroup = 8412559116A035E3001749D9;
......@@ -354,11 +359,11 @@
841255C416A035E3001749D9 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
DEVELOPMENT_TEAM = 6TZ958XNMU;
DEVELOPMENT_TEAM = 7448PDV238;
GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = "Encoder Demo/Encoder Demo-Prefix.pch";
INFOPLIST_FILE = "Encoder Demo/Encoder Demo-Info.plist";
PRODUCT_BUNDLE_IDENTIFIER = com.bairuitech.test;
PRODUCT_BUNDLE_IDENTIFIER = "com.bairuitech.test-rtsp";
PRODUCT_NAME = "$(TARGET_NAME)";
WRAPPER_EXTENSION = app;
};
......@@ -367,11 +372,11 @@
841255C516A035E3001749D9 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
DEVELOPMENT_TEAM = 6TZ958XNMU;
DEVELOPMENT_TEAM = 7448PDV238;
GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = "Encoder Demo/Encoder Demo-Prefix.pch";
INFOPLIST_FILE = "Encoder Demo/Encoder Demo-Info.plist";
PRODUCT_BUNDLE_IDENTIFIER = com.bairuitech.test;
PRODUCT_BUNDLE_IDENTIFIER = "com.bairuitech.test-rtsp";
PRODUCT_NAME = "$(TARGET_NAME)";
WRAPPER_EXTENSION = app;
};
......
......@@ -9,6 +9,7 @@
#import "CameraServer.h"
#import "AVEncoder.h"
#import "RTSPServer.h"
#import <CoreImage/CoreImage.h>
static CameraServer* theServer;
......@@ -23,6 +24,8 @@ static CameraServer* theServer;
RTSPServer* _rtsp;
}
@property (nonatomic, strong) CIContext *ciContext;
@end
......@@ -89,10 +92,174 @@ static CameraServer* theServer;
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
[self dealWithSampleBuffer:sampleBuffer];
// pass frame to encoder
[_encoder encodeFrame:sampleBuffer];
}
- (void)dealWithSampleBuffer:(CMSampleBufferRef)buffer {
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(buffer);
CIImage *ciimage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
NSLog(@"pixelBuffer width:%d height:%d",width,height);
CGFloat widthScale = width/720.0;
CGFloat heightScale = height/1280.0;
CGFloat realWidthScale = 1;
CGFloat realHeightScale = 1;
if (widthScale > 1 || heightScale > 1) {
if (widthScale < heightScale) {
realHeightScale = 1280.0/height;
CGFloat nowWidth = width * 1280 / height;
height = 1280;
realWidthScale = nowWidth/width;
width = nowWidth;
} else {
realWidthScale = 720.0/width;
CGFloat nowHeight = 720 * height / width;
width = 720;
realHeightScale = nowHeight/height;
height = nowHeight;
}
}
{
_ciContext = [CIContext contextWithOptions:nil];
CIImage *newImage = [ciimage imageByApplyingTransform:CGAffineTransformMakeScale(realWidthScale, realHeightScale)];
// UIImage *tmpImage = [self imageWithColor:[UIColor redColor] AndRect:CGRectMake(0, 0, width, height)];
// CIImage *newImage = [CIImage imageWithCGImage:tmpImage.CGImage];
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
CVPixelBufferRef newPixcelBuffer = nil;
CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, nil, &newPixcelBuffer);
[_ciContext render:newImage toCVPixelBuffer:newPixcelBuffer];
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
// [self.videoEncoder encodeVideoData:newPixcelBuffer timeStamp:(CACurrentMediaTime()*1000)];
size_t newWidth = CVPixelBufferGetWidth(newPixcelBuffer);
size_t newHeight = CVPixelBufferGetHeight(newPixcelBuffer);
NSLog(@"newPixcelBuffer width:%d height:%d",newWidth,newHeight);
UIImage* sampleImage = [self imageFromSamplePlanerPixelBuffer:newPixcelBuffer];
CVPixelBufferRelease(newPixcelBuffer);
}
}
- (UIImage *)imageWithColor:(UIColor *)color AndRect:(CGRect)rect{
UIGraphicsBeginImageContext(rect.size);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetFillColorWithColor(context, [color CGColor]);
CGContextFillRect(context, rect);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return image;
}
- (UIImage *) imageFromSamplePlanerPixelBuffer:(CVPixelBufferRef)imageBuffer{
@autoreleasepool {
// // Get a CMSampleBuffer's Core Video image buffer for the media data
// CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the plane pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
// Get the number of bytes per row for the plane pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
size_t size = CVPixelBufferGetDataSize(imageBuffer);
OSType type = CVPixelBufferGetPixelFormatType(imageBuffer);
NSLog(@"buffer type:%d size:%d",type,size);
static int i=0;
i++;
if (i<4) {
int len = (int)width * height *4;
uint8_t *rgb_frame = (uint8_t *)malloc(len);
for(int y = 0; y < height; y++) {
uint8_t *yBufferLine = &baseAddress[y * bytesPerRow];
for(int x = 0; x < bytesPerRow; x++) {
rgb_frame[x+y * bytesPerRow] = yBufferLine[x];
}
}
NSString *path2 = [self getHome2Path];
const char *resultCString2 = NULL;
if ([path2 canBeConvertedToEncoding:NSUTF8StringEncoding]) {
resultCString2 = [path2 cStringUsingEncoding:NSUTF8StringEncoding];
}
unsigned char *buffer = (unsigned char *)rgb_frame;
FILE* fpyuv = fopen(resultCString2, "wb");
for (int i = 0; i < len; i ++) {
fwrite(buffer, 1, 1, fpyuv);
buffer ++;
}
fclose(fpyuv);
free(rgb_frame);
}
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}
}
- (void) shutdown
{
NSLog(@"shutting down server");
......@@ -124,4 +291,13 @@ static CameraServer* theServer;
return _preview;
}
- (NSString *)getHome2Path{
NSString *path = NSHomeDirectory();
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docDir = [paths objectAtIndex:0];
static int i=0;
return [NSString stringWithFormat:@"%@/%d.rgb",docDir,i++];
}
@end
......@@ -339,7 +339,7 @@ static void onRTCP(CFSocketRef s,
nalu = ppsData;
}
int cBytes = (int)[nalu length];
BOOL bLast = 1;//(i == nNALUs-1);
BOOL bLast = (i == nNALUs-1);
const unsigned char* pSource = (unsigned char*)[nalu bytes];
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册