Objective c GCDAsyncSocket未接收所有传输的数据,缺少“last”;“块”;
我试图将一些字符串和图像数据从python脚本发送到运行在OSX上的objective C应用程序 我使用GCDAsyncSocket收集传输的数据,并将其附加到NSMutableData,直到服务器断开连接。然后我处理这些数据,并将其拆分为原始部分 传输的数据包括以下内容: ID字符串,填充为16字节 图像编号字符串,填充为16字节 原始图像数据 终止字符串,填写为16字节 问题是,我没有接收/获取最后一块数据,最终丢失了JPEG图像的结尾,导致图像损坏(尽管大部分显示)和终止字符串丢失 以下是我使用GCDAsyncSocket获取数据并对其进行处理的代码: 插座连接:Objective c GCDAsyncSocket未接收所有传输的数据,缺少“last”;“块”;,objective-c,sockets,osx-mountain-lion,asyncsocket,gcdasyncsocket,Objective C,Sockets,Osx Mountain Lion,Asyncsocket,Gcdasyncsocket,我试图将一些字符串和图像数据从python脚本发送到运行在OSX上的objective C应用程序 我使用GCDAsyncSocket收集传输的数据,并将其附加到NSMutableData,直到服务器断开连接。然后我处理这些数据,并将其拆分为原始部分 传输的数据包括以下内容: ID字符串,填充为16字节 图像编号字符串,填充为16字节 原始图像数据 终止字符串,填写为16字节 问题是,我没有接收/获取最后一块数据,最终丢失了JPEG图像的结尾,导致图像损坏(尽管大部分显示)和终止字符串丢失 以下
- (void)socket:(GCDAsyncSocket *)sock didAcceptNewSocket:(GCDAsyncSocket *)newSocket
{
// This method is executed on the socketQueue (not the main thread)
@synchronized(connectedSockets)
{
[connectedSockets addObject:newSocket];
}
NSString *host = [newSocket connectedHost];
UInt16 port = [newSocket connectedPort];
dispatch_async(dispatch_get_main_queue(), ^{
@autoreleasepool {
[self logInfo:FORMAT(@"Accepted client %@:%hu", host, port)];
}
});
[newSocket readDataToData:[GCDAsyncSocket CRLFData] withTimeout:-1 tag:0];
}
接收到套接字数据
- (void)socket:(GCDAsyncSocket *)sock didReadData:(NSData *)data withTag:(long)tag
{
// This method is executed on the socketQueue (not the main thread)
dispatch_async(dispatch_get_main_queue(), ^{
@autoreleasepool {
NSLog(@"Thread Data Length is %lu", (unsigned long)[data length]);
if (!imageBuffer){
imageBuffer = [[NSMutableData alloc]init];
}
[imageBuffer appendData:[data subdataWithRange:NSMakeRange(0, [data length])]];
NSLog(@"Total Data Length is %lu", (unsigned long)[imageBuffer length]);
}
});
// Echo message back to client
[sock writeData:data withTimeout:-1 tag:ECHO_MSG];
[sock readDataToData:[GCDAsyncSocket CRLFData] withTimeout:-1 tag:0];
}
- (void)socketDidDisconnect:(GCDAsyncSocket *)sock withError:(NSError *)err
{
if (sock != listenSocket)
{
dispatch_async(dispatch_get_main_queue(), ^{
@autoreleasepool {
[self logInfo:FORMAT(@"Client Disconnected")];
NSData *cameraNumberData;
NSData *imageNumberData;
NSData *imageData;
NSData *endCommandData;
//if ([data length] > 40){
cameraNumberData = [imageBuffer subdataWithRange:NSMakeRange(0, 16)];
imageNumberData = [imageBuffer subdataWithRange:NSMakeRange(16, 16)];
imageData = [imageBuffer subdataWithRange:NSMakeRange(32, [imageBuffer length]-34)];
endCommandData = [imageBuffer subdataWithRange:NSMakeRange([imageBuffer length]-16, 16)];
//}
NSString *cameraNumberString = [[NSString alloc] initWithData:cameraNumberData encoding:NSUTF8StringEncoding];
NSString *imageNumberString = [[NSString alloc] initWithData:imageNumberData encoding:NSUTF8StringEncoding];
NSString *endCommandString = [[NSString alloc] initWithData:endCommandData encoding:NSUTF8StringEncoding];
NSImage* image = [[NSImage alloc]initWithData:imageData];
if (cameraNumberString)
{
NSLog(@"Image recieved from Camera no %@", cameraNumberString);
[self logMessage:cameraNumberString];
}
else
{
[self logError:@"Error converting received data into UTF-8 String"];
}
if (imageNumberString)
{
NSLog(@"Image is number %@", imageNumberString);
[self logMessage:imageNumberString];
}
else
{
[self logError:@"Error converting received data into UTF-8 String"];
}
if (image)
{
NSLog(@"We have an image");
[self.imageView setImage:image];
}
else
{
[self logError:@"Error converting received data into image"];
}
if (endCommandString)
{
NSLog(@"Command String is %@", endCommandString);
[self logMessage:endCommandString];
}
else
{
[self logError:@"No command string"];
}
//self.imageBuffer = nil;
}
});
@synchronized(connectedSockets)
{
[connectedSockets removeObject:sock];
}
}
}
插座断开连接
- (void)socket:(GCDAsyncSocket *)sock didReadData:(NSData *)data withTag:(long)tag
{
// This method is executed on the socketQueue (not the main thread)
dispatch_async(dispatch_get_main_queue(), ^{
@autoreleasepool {
NSLog(@"Thread Data Length is %lu", (unsigned long)[data length]);
if (!imageBuffer){
imageBuffer = [[NSMutableData alloc]init];
}
[imageBuffer appendData:[data subdataWithRange:NSMakeRange(0, [data length])]];
NSLog(@"Total Data Length is %lu", (unsigned long)[imageBuffer length]);
}
});
// Echo message back to client
[sock writeData:data withTimeout:-1 tag:ECHO_MSG];
[sock readDataToData:[GCDAsyncSocket CRLFData] withTimeout:-1 tag:0];
}
- (void)socketDidDisconnect:(GCDAsyncSocket *)sock withError:(NSError *)err
{
if (sock != listenSocket)
{
dispatch_async(dispatch_get_main_queue(), ^{
@autoreleasepool {
[self logInfo:FORMAT(@"Client Disconnected")];
NSData *cameraNumberData;
NSData *imageNumberData;
NSData *imageData;
NSData *endCommandData;
//if ([data length] > 40){
cameraNumberData = [imageBuffer subdataWithRange:NSMakeRange(0, 16)];
imageNumberData = [imageBuffer subdataWithRange:NSMakeRange(16, 16)];
imageData = [imageBuffer subdataWithRange:NSMakeRange(32, [imageBuffer length]-34)];
endCommandData = [imageBuffer subdataWithRange:NSMakeRange([imageBuffer length]-16, 16)];
//}
NSString *cameraNumberString = [[NSString alloc] initWithData:cameraNumberData encoding:NSUTF8StringEncoding];
NSString *imageNumberString = [[NSString alloc] initWithData:imageNumberData encoding:NSUTF8StringEncoding];
NSString *endCommandString = [[NSString alloc] initWithData:endCommandData encoding:NSUTF8StringEncoding];
NSImage* image = [[NSImage alloc]initWithData:imageData];
if (cameraNumberString)
{
NSLog(@"Image recieved from Camera no %@", cameraNumberString);
[self logMessage:cameraNumberString];
}
else
{
[self logError:@"Error converting received data into UTF-8 String"];
}
if (imageNumberString)
{
NSLog(@"Image is number %@", imageNumberString);
[self logMessage:imageNumberString];
}
else
{
[self logError:@"Error converting received data into UTF-8 String"];
}
if (image)
{
NSLog(@"We have an image");
[self.imageView setImage:image];
}
else
{
[self logError:@"Error converting received data into image"];
}
if (endCommandString)
{
NSLog(@"Command String is %@", endCommandString);
[self logMessage:endCommandString];
}
else
{
[self logError:@"No command string"];
}
//self.imageBuffer = nil;
}
});
@synchronized(connectedSockets)
{
[connectedSockets removeObject:sock];
}
}
}
我用过wireshark,数据正在传输,只是没有通过GCDAsynSocket
所以,我显然错过了一些东西。像这样的套接字编程和数据编码/解码对我来说是比较新的,所以我可能是个白痴
非常感谢您的帮助
谢谢
Gareth好的,我终于让它工作了。它涉及修改Python中的传输代码,以便在数据的末尾发送一个完成字符串,并对此进行监视。最大的收获是,每次套接字读取一些数据时,我都需要重新调用readDataToData:方法,否则它会坐在那里等待,传输套接字也会坐在那里 我还必须实现使用标记重新调用第二个接收,以便将接收到的数据存储在NSMutableArray中正确的NSMutableData对象中,否则我无法知道第一次接收后数据来自哪个传输套接字,因为ID仅位于第一条消息的开头 以下是didReadData代码:
- (void)socket:(GCDAsyncSocket *)sock didReadData:(NSData *)data withTag:(long)tag
{
dispatch_async(dispatch_get_main_queue(), ^{
@autoreleasepool {
NSInteger cameraNumberNumber = 0;
NSString *cameraNumberString = [[NSString alloc]init];
if (tag > 10){
cameraNumberNumber = tag-11;
DDLogVerbose(@"Second data loop, tag is %ld", tag);
} else {
NSData *cameraNumberData;
//if ([data length] > 40){
cameraNumberData = [data subdataWithRange:NSMakeRange(0, 16)];
NSString *cameraNumberString = [[NSString alloc] initWithData:cameraNumberData encoding:NSUTF8StringEncoding];
cameraNumberString = [cameraNumberString stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]];
cameraNumberNumber = [cameraNumberString intValue]-1;
}
if (cameraNumberNumber+1 <= self.images.count){
if ([self.images objectAtIndex:cameraNumberNumber] == [NSNull null]){
image* cameraImage = [[image alloc]init];
[self.images replaceObjectAtIndex: cameraNumberNumber withObject:cameraImage];
}
image* cameraImage = [self.images objectAtIndex:cameraNumberNumber];
[cameraImage.imageData appendData:[data subdataWithRange:NSMakeRange(0, [data length])]];
cameraImage.cameraNumber = cameraNumberString;
if (!imageBuffer){
imageBuffer = [[NSMutableData alloc]init];
}
[imageBuffer appendData:[data subdataWithRange:NSMakeRange(0, [data length])]];
DDLogVerbose(@"Total Data Length is %lu", (unsigned long)[imageBuffer length]);
} else {
DDLogInfo(@"Wrong camera quantity!");
NSAlert *testAlert = [NSAlert alertWithMessageText:@"Wrong camera quantity!"
defaultButton:@"Ok"
alternateButton:nil
otherButton:nil
informativeTextWithFormat:@"We have recieved more images than cameras, please set No.Cameras correctly!"];
[testAlert beginSheetModalForWindow:[self window]
modalDelegate:self
didEndSelector:@selector(stop)
contextInfo:nil];
}
[sock readDataToData:[@"end" dataUsingEncoding:NSUTF8StringEncoding] withTimeout:-1 tag:cameraNumberNumber + 11];
}
});
}
这里还有我如何修改Python代码以添加额外的“end”标记
希望这能帮助其他陷入同样处境的人 也许你不需要发送图像的原始字节,你可以使用一些更粗糙的传输编码,例如base64。Hi Hyperboreus,这是一个很好的观点,可能会提高效率。然而,它不能解释为什么字符串解码失败?是的,它不能回答你的问题(因为我不知道答案),这就是我写评论的原因。事实上,有效点:)嗨,Hyperboreus,我已经用更多信息更新了我上面的问题,任何进一步的输入都将不胜感激!