Ios 通过NSInputStream/NSOutputStream传输多个图像
我有两个Ios 通过NSInputStream/NSOutputStream传输多个图像,ios,image,nsstream,nsinputstream,nsoutputstream,Ios,Image,Nsstream,Nsinputstream,Nsoutputstream,我有两个NSInputStream和NSOutputStream通过网络相互连接。我想将核心数据对象和相关图像从一台设备传输到另一台设备。我已成功地将核心数据对象转换为JSON,并传输到流的另一端,然后从JSON填充核心数据。现在有了与每个录制的图像关联的图像。图像在光盘上,只有路径存储在核心数据对象中。现在,在写入输出流时,您必须手头有完整的数据。我已经准备好了XML(其中包含JSON) 1.但是如何将图像(NSData*)与XML(也就是NSData*)一起传输呢?在读取端(NSInputS
NSInputStream
和NSOutputStream
通过网络相互连接。我想将核心数据对象和相关图像从一台设备传输到另一台设备。我已成功地将核心数据对象转换为JSON
,并传输到流的另一端,然后从JSON
填充核心数据。现在有了与每个录制的图像关联的图像。图像在光盘上,只有路径存储在核心数据对象中。现在,在写入输出流时,您必须手头有完整的数据。我已经准备好了XML
(其中包含JSON
)
1.但是如何将图像(NSData*
)与XML
(也就是NSData*
)一起传输呢?在读取端(NSInputStream
)如何区分XML和图像
2.另外,我必须传输多个图像,我们如何在NSInputStream
结尾判断一个图像的字节已完成,下一个图像的字节已开始
3.我们如何知道传输了哪个图像(名称)?
感谢将NSData
(每个UIImage
)转换为NSString
表示,然后将所有NSString
对象放入NSDictionary
并序列化该字典。通过这种方式,当您通过网络传输数据时,您可以反转提取图像的过程,从而知道哪个关键点指向哪个图像。这样,您应该能够传输多个图像
希望这有帮助
干杯我通过以下步骤解决了这个问题:
1.将每个托管对象转换为NSDictionary
2.将所有词典放入NSArray
3.使用NSKeyedArchiver
将NSArray
转换为NSData
4.通过流传输数据
在接收器的一端,我颠倒了上述步骤。
谢谢马吕斯·库戈纳斯,每个答案听起来都很可笑。试着这样做:
case NSStreamEventHasBytesAvailable: {
NSLog(@"NSStreamEventHasBytesAvailable");
uint8_t * mbuf[DATA_LENGTH];
mlen = [(NSInputStream *)stream read:(uint8_t *)mbuf maxLength:DATA_LENGTH];
NSLog(@"mlen == %lu", mlen);
[mdata appendBytes:(const void *)mbuf length:mlen];
NSLog(@"mdata length == %lu", mdata.length);
if (mlen < DATA_LENGTH) {
NSLog(@"displayImage");
UIImage *image = [UIImage imageWithData:mdata];
[self.peerConnectionViewController.view.subviews[0].layer setContents:(__bridge id)image.CGImage];
mdata = nil;
mlen = DATA_LENGTH;
mdata = [[NSMutableData alloc] init];
}
} break;
案例NSTRAVENTHSBYTESAVAILable:{
NSLog(@“nsstreamventhasbytesavailable”);
uint8_t*mbuf[数据长度];
mlen=[(NSInputStream*)流读取:(uint8_t*)mbuf maxLength:DATA_LENGTH];
NSLog(@“mlen==%lu”,mlen);
[mdata appendBytes:(const void*)mbuf长度:mlen];
NSLog(@“mdata length=%lu”,mdata.length);
if(最大长度<数据长度){
NSLog(@“显示图像”);
UIImage*image=[UIImage imageWithData:mdata];
[self.peerConnectionViewController.view.subview[0]。图层集内容:(_桥id)image.CGImage];
mdata=零;
mlen=数据长度;
mdata=[[NSMutableData alloc]init];
}
}中断;
...
-(void)captureOutput:(AVCaptureOutput*)输出didOutputSampleBuffer:(CMSampleBufferRef)SampleBufferfromConnection:(AVCaptureConnection*)连接
{
CVImageBufferRef imageBuffer=CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t*基地址=(uint8_t*)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow=CVPixelBufferGetBytesPerRow(图像缓冲区);
size\u t width=CVPixelBufferGetWidth(imageBuffer);
大小\u t高度=CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace=CGColorSpaceCreateDeviceRGB();
CGContextRef newContext=CGBitmapContextCreate(基地址、宽度、高度、8、字节数、颜色空间、kCGBitmapByteOrder32Little | KCGimageAlphaPremultipledFirst);
CGImageRef newImage=cgitmapcontextcreateimage(newContext);
CGContextRelease(newContext);
CGCOLORSPACTERELEASE(色彩空间);
UIImage*image=[[UIImage alloc]initWithCGImage:newImage比例:1方向:UIImageOrientationUp];
CGImageRelease(newImage);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
NSData*data=[NSData data WITHDATA:UIIMAGEJPEG表示法(图像,0.25)];
\uuuu block BOOL baseCaseCondition=NO;//显然,这应该是数据驱动的,而不是硬编码的
_uu块NSInteger u len=数据长度;
_uu块NSInteger u byteIndex=0;
typedef void(^RecursiveBlock)(void(^)();
递归块是游标块;
aRecursiveBlock=^(递归块){
NSLog(@“块调用…”);
baseCaseCondition=(data.length>0&&u byteIndex
谢谢,亲爱的。你的回答使我找到了解决办法。
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
UIImage *image = [[UIImage alloc] initWithCGImage:newImage scale:1 orientation:UIImageOrientationUp];
CGImageRelease(newImage);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
NSData *data = [NSData dataWithData:UIImageJPEGRepresentation(image, 0.25)];
__block BOOL baseCaseCondition = NO; // obviously this should be data driven, not hardcoded
__block NSInteger _len = DATA_LENGTH;
__block NSInteger _byteIndex = 0;
typedef void (^RecursiveBlock)(void (^)());
RecursiveBlock aRecursiveBlock;
aRecursiveBlock = ^(RecursiveBlock block) {
NSLog(@"Block called...");
baseCaseCondition = (data.length > 0 && _byteIndex < data.length) ? TRUE : FALSE;
if ((baseCaseCondition) && block)
{
_len = (data.length - _byteIndex) == 0 ? 1 : (data.length - _byteIndex) < DATA_LENGTH ? (data.length - _byteIndex) : DATA_LENGTH;
//
NSLog(@"START | byteIndex: %lu/%lu writing len: %lu", _byteIndex, data.length, _len);
//
uint8_t * bytes[_len];
[data getBytes:&bytes range:NSMakeRange(_byteIndex, _len)];
_byteIndex += [self.outputStream write:(const uint8_t *)bytes maxLength:_len];
//
NSLog(@"END | byteIndex: %lu/%lu wrote len: %lu", _byteIndex, data.length, _len);
//
dispatch_barrier_async(dispatch_get_main_queue(), ^{
block(block);
});
}
};
if (self.outputStream.hasSpaceAvailable)
aRecursiveBlock(aRecursiveBlock);
}