记录一下,这两天使用GCDAsyncUdpSocket接收组播视频数据,从接收,到排序,在到组包,最后到送去解码渲染。
1.初始化udpSocket,绑定端口,加入组播地址,接收数据
1.1 初始化udpSocket
//获取全局队列,并创建GCDAsyncUdpSocket,设置代理
dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT,0);
GCDAsyncUdpSocket udpSocket = [[GCDAsyncUdpSocket alloc]initWithDelegate:self delegateQueue:queue];
1.2 绑定端口,加入组播地址
//225.18.1.10
NSError *error;
[_udpSocket bindToPort:19827 error:&error];
if (nil != error) {
NSLog(@"failed.:%@",[error description]);
}
[_udpSocket enableBroadcast:YES error:&error];
if (nil != error) {
NSLog(@"failed.:%@",[error description]);
}
//组播224.0.0.2地址,如果地址大于224的话,就要设置GCDAsyncUdpSocket的TTL (默认TTL为1)
[_udpSocket joinMulticastGroup:@"224.0.0.2" error:&error];
if (nil != error) {
NSLog(@"failed.:%@",[error description]);
}
[_udpSocket beginReceiving:&error];
if (nil != error) {
NSLog(@"failed.:%@",[error description]);
}
1.3 接收数据
#pragma mark -GCDAsyncUdpSocketDelegate
- (void)udpSocket:(GCDAsyncUdpSocket *)sock didReceiveData:(NSData *)data fromAddress:(NSData *)address withFilterContext:(id)filterContext
{
[self.analyseObject AnalyseMulticastData:data];
// NSLog(@"接收到%@的消息:%@",address,data);//自行转换格式吧
}
- (void)udpSocketDidClose:(GCDAsyncUdpSocket *)sock withError:(NSError *)error
{// NSLog(@"udpSocketDidClose Error:%@",[error description]);
}
2. 数据排序,组包

发送端将一帧数据拆分为多个7392大小的数据包,并在每包中添加了固定大小的私有head,
接收端走网线的话:接收到数据,进行排序,在将数据head去掉并拼接成完整的一帧,就可以送去解码
接收端走路由器的话:路由器又会将7392大小的数据包拆分为多个1472大小的数据包,并且同时将拆分后的数据发送出来,这时接收端就需要将路由器拆分后的1472大小数据包进行存储,排序,去掉head在拼接成完整的一帧。
初始化解析类,获取线程,存储接收到每包数据,循环读取存储的数据,判断读取的数据数据是否出现丢包情况,出现丢包的话将之前已经存好的相同帧包给丢弃,没有的话,判断是否与上一包数据是同一帧数据,是就直接存到同一个NSMutableDictionary里,到一帧的结束标志位,就将NSMutableDictionary存的同一帧数据送去排序,排序完在循环取到对应的值,把值的数据head去掉,在拼接成完整的一帧,回调出去
- (instancetype)init
{
if(self = [super init]){
[self frameInit];
self.muDataArr = @[].mutableCopy;
self.m_isCanRun = 1;
__weak typeof(self) weakSelf = self;
/* 创建一个并发队列 */
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
/* 耗时任务... */
[weakSelf handleDataThread];
});
}
return self;
}
- (void)frameInit
{//初始化,帧相关参数
self.m_frameseq = 0;
if(!self.sortDataDic){
self.sortDataDic = @{}.mutableCopy;
}else{
if(self.sortDataDic.count != 0){
[self.sortDataDic removeAllObjects];
}
}
self.m_frameid = 0;
self.m_framelength = 0;
self.m_tempflags = -1;
self.m_curtempsize = 0;
}
- (NSLock *)theLock
{//懒加载安全锁
if (!_theLock) {
_theLock = [[NSLock alloc] init];
}
return _theLock;
}
- (void)AnalyseMulticastData:(NSData *)data
{//接收到每包数据,并且存储
[self.theLock lock];
[self.muDataArr addObject:data];
[self.theLock unlock];
}
- (void)msgNtoh:(VDataMsgHeader *)header
{//转换端序
header->length = ntohs(header->length);
header->sequence_id = ntohs(header->sequence_id);
header->fragment = ntohs(header->fragment);
header->destination = ntohl(header->destination);
}
- (void)handleDataThread
{//循环处理每包数据
while (_m_isCanRun) {
[self.theLock lock];
if(self.muDataArr.count != 0){
NSData *data = self.muDataArr.firstObject;
uint8_t *frame = (uint8_t *)data.bytes;
uint32_t frame_size = (uint32_t)data.length;
VDataMsgHeader *header = (VDataMsgHeader *)frame;
[self msgNtoh:header];
int frame_id = header->fragment & 0x3fff;
int frame_flag = header->fragment >> 14;
if (header->type == 0) {//视频数据
if(0==frame_id && 0==frame_flag){//一帧完整数据时,不需要排序,组播,直接回调出去
[self frameInit];
if(self.delegate && [self.delegate respondsToSelector:@selector(multicastDataAnalyseCompleteCallback:withSize:)]){
[self.delegate multicastDataAnalyseCompleteCallback:&frame[VTM_HEADER_LEN] withSize:frame_size-VTM_HEADER_LEN];
}
}
if(self.m_frameseq != header->sequence_id)
{//判断是否是同一帧数据,不是的话将上一帧的数据重新初始化
if(self.m_frameseq>(header->sequence_id+10) || self.m_frameseq<header->sequence_id){
// LOG_DEBUG << "m_videoelement->m_frameseq= "<<m_videoelement->m_frameseq<<"header->sequence_id ====== "<<header->sequence_id;
[self frameInit];
self.m_frameseq = header->sequence_id;
}
}
if(0==frame_flag){//判断是否是同一帧里面分包的结束标识
self.m_tempflags = frame_id;
}
if(((self.m_framelength) + frame_size - VTM_HEADER_LEN) < VIDEO_FRAME_SIZE)
{
self.m_framelength += frame_size - VTM_HEADER_LEN;
[self.sortDataDic setObject:data forKey:@(frame_id)];
if(self.m_tempflags == self.sortDataDic.count-1){//判断是否是同一帧里面分包的最后一包
NSMutableData *muData = [self handleVideoDataSortFunction:self.sortDataDic];
[self.delegate multicastDataAnalyseCompleteCallback:(uint8_t *)[muData bytes] withSize:(uint32_t)muData.length];
}
}else{
// qDebug()<<m_framelength <<"dataLen = "<<dataLen <<" seq="<<m_frameseq;
}
}
// NSLog(@"frame_id====>%d======>frame_flag======>%d",frame_id,frame_flag);
[self.muDataArr removeObject:data];
}
[self.theLock unlock];
usleep(10);
}
}
- (NSMutableData *)handleVideoDataSortFunction:(NSMutableDictionary *)sortDataDic
{//对视频数据进行排序
NSMutableArray *mutableArray = [sortDataDic.allKeys mutableCopy];
[mutableArray sortUsingComparator:^NSComparisonResult(id _Nonnull obj1, id _Nonnull obj2)
{
//此处的规则含义为:若前一元素比后一元素小,则返回降序(即后一元素在前,为从大到小排列)
if ([obj1 integerValue] > [obj2 integerValue])
{
return NSOrderedDescending;
}
else
{
return NSOrderedAscending;
}
}];
NSMutableData *muData = [[NSMutableData alloc]init];
for (id key in mutableArray) {
NSData *data = sortDataDic[key];
// NSLog(@"循环结果:====>%@",key);
NSData *originalData = [data subdataWithRange:NSMakeRange(VTM_HEADER_LEN, data.length-VTM_HEADER_LEN)];//取得长度数据
[muData appendData:originalData];
}
// NSLog(@"排序结果:====>%@",mutableArray);
return muData;
}
以上只贴出来了.m文件实现。解码渲染相关,后续在写。
网友评论