C++ armv7s上的Live555(RTP/RTSP)

C++ armv7s上的Live555(RTP/RTSP),c++,ios,rtsp,rtp,live555,C++,Ios,Rtsp,Rtp,Live555,是否可以将live555库与armv7s一起使用?因为我试着用config编译它 # Change the following version number, if necessary, before running "genMakefiles iphoneos" IOS_VERSION = 7.0 DEVELOPER_PATH = /Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/D

是否可以将live555库与armv7s一起使用?因为我试着用config编译它

# Change the following version number, if necessary, before running "genMakefiles iphoneos"
IOS_VERSION =       7.0

DEVELOPER_PATH =    /Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer
TOOL_PATH =     $(DEVELOPER_PATH)/usr/bin
SDK_PATH =      $(DEVELOPER_PATH)/SDKs
SDK =           $(SDK_PATH)/iPhoneOS$(IOS_VERSION).sdk
COMPILE_OPTS =          $(INCLUDES) -I. $(EXTRA_LDFLAGS) -DBSD=1 -O2 -DSOCKLEN_T=socklen_t -DHAVE_SOCKADDR_LEN=1 -D_LARGEFILE_SOURCE=1 -D_FILE_OFFSET_BITS=64 -fPIC -arch armv7s --sysroot=$(SDK)
C =                     c
C_COMPILER =            $(TOOL_PATH)/gcc
C_FLAGS =               $(COMPILE_OPTS)
CPP =                   cpp
CPLUSPLUS_COMPILER =    $(TOOL_PATH)/g++
CPLUSPLUS_FLAGS =       $(COMPILE_OPTS) -Wall
OBJ =                   o
LINK =                  $(TOOL_PATH)/g++ -o 
LINK_OPTS =             -L. -arch armv7s --sysroot=$(SDK) -L$(SDK)/usr/lib/system
CONSOLE_LINK_OPTS =     $(LINK_OPTS)
LIBRARY_LINK =          libtool -s -o 
LIBRARY_LINK_OPTS =
LIB_SUFFIX =            a
LIBS_FOR_CONSOLE_APPLICATION =
LIBS_FOR_GUI_APPLICATION =
EXE =
然后我尝试将程序testMP3Reciver复制并粘贴到我的iOS项目中(是的,我使用.mm作为后缀,而不是.m,并且需要包含每个标题),但我仍然收到了14个错误
架构armv7s的未定义符号

我的代码:

#import "TViewController.h"
#include "liveMedia.hh"
#include "GroupsockHelper.hh"
#include "BasicUsageEnvironment.hh"

@interface TViewController ()

@end

@implementation TViewController
UsageEnvironment* env;
- (void)viewDidLoad
{
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.
}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

struct sessionState_t {
    FramedSource* source;
    FileSink* sink;
    RTCPInstance* rtcpInstance;
} sessionState;

- (IBAction)start:(id)sender {
    if (!wasClicked) {
        //start
        [self startButton];
        wasClicked = true;
    } else {
        //stop
        [self stopButton];
        wasClicked = false;
    }
}

-(void)startButton{
    // Begin by setting up our usage environment:
    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    env = BasicUsageEnvironment::createNew(*scheduler);

    // Create the data sink for 'stdout':
    sessionState.sink = FileSink::createNew(*env, "stdout");
    // Note: The string "stdout" is handled as a special case.
    // A real file name could have been used instead.

    // Create 'groupsocks' for RTP and RTCP:
    char const* sessionAddressStr
#ifdef USE_SSM
    = "232.255.42.42";
#else
    = "239.255.42.42";
    // Note: If the session is unicast rather than multicast,
    // then replace this string with "0.0.0.0"
#endif
    const unsigned short rtpPortNum = 6666;
    const unsigned short rtcpPortNum = rtpPortNum+1;
#ifndef USE_SSM
    const unsigned char ttl = 1; // low, in case routers don't admin scope
#endif

    struct in_addr sessionAddress;
    sessionAddress.s_addr = our_inet_addr(sessionAddressStr);
    const Port rtpPort(rtpPortNum);
    const Port rtcpPort(rtcpPortNum);

#ifdef USE_SSM
    char* sourceAddressStr = "aaa.bbb.ccc.ddd";
    // replace this with the real source address
    struct in_addr sourceFilterAddress;
    sourceFilterAddress.s_addr = our_inet_addr(sourceAddressStr);

    Groupsock rtpGroupsock(*env, sessionAddress, sourceFilterAddress, rtpPort);
    Groupsock rtcpGroupsock(*env, sessionAddress, sourceFilterAddress, rtcpPort);
    rtcpGroupsock.changeDestinationParameters(sourceFilterAddress,0,~0);
    // our RTCP "RR"s are sent back using unicast
#else
    Groupsock rtpGroupsock(*env, sessionAddress, rtpPort, ttl);
    Groupsock rtcpGroupsock(*env, sessionAddress, rtcpPort, ttl);
#endif

    RTPSource* rtpSource;
#ifndef STREAM_USING_ADUS
    // Create the data source: a "MPEG Audio RTP source"
    rtpSource = MPEG1or2AudioRTPSource::createNew(*env, &rtpGroupsock);
#else
    // Create the data source: a "MP3 *ADU* RTP source"
    unsigned char rtpPayloadFormat = 96; // a dynamic payload type
    rtpSource
    = MP3ADURTPSource::createNew(*env, &rtpGroupsock, rtpPayloadFormat);
#endif

    // Create (and start) a 'RTCP instance' for the RTP source:
    const unsigned estimatedSessionBandwidth = 160; // in kbps; for RTCP b/w share
    const unsigned maxCNAMElen = 100;
    unsigned char CNAME[maxCNAMElen+1];
    gethostname((char*)CNAME, maxCNAMElen);
    CNAME[maxCNAMElen] = '\0'; // just in case
    sessionState.rtcpInstance
    = RTCPInstance::createNew(*env, &rtcpGroupsock,
                              estimatedSessionBandwidth, CNAME,
                              NULL /* we're a client */, rtpSource);
    // Note: This starts RTCP running automatically

    sessionState.source = rtpSource;
#ifdef STREAM_USING_ADUS
    // Add a filter that deinterleaves the ADUs after depacketizing them:
    sessionState.source
    = MP3ADUdeinterleaver::createNew(*env, sessionState.source);
    if (sessionState.source == NULL) {
        *env << "Unable to create an ADU deinterleaving filter for the source\n";
        exit(1);
    }

    // Add another filter that converts these ADUs to MP3s:
    sessionState.source
    = MP3FromADUSource::createNew(*env, sessionState.source);
    if (sessionState.source == NULL) {
        *env << "Unable to create an ADU->MP3 filter for the source\n";
        exit(1);
    }
#endif

    // Finally, start receiving the multicast stream:
    *env << "Beginning receiving multicast stream...\n";
    sessionState.sink->startPlaying(*sessionState.source, afterPlaying, NULL);

    env->taskScheduler().doEventLoop(); // does not return
}

void afterPlaying(void* /*clientData*/) {
    *env << "...done receiving\n";

    // End by closing the media:
    Medium::close(sessionState.rtcpInstance); // Note: Sends a RTCP BYE
    Medium::close(sessionState.sink);
    Medium::close(sessionState.source);
}

-(void)stopButton{
    afterPlaying;
}

@end
#导入“TViewController.h”
#包括“liveMedia.hh”
#包括“GroupsockHelper.hh”
#包括“BasicUsageEnvironment.hh”
@接口TViewController()
@结束
@TViewController的实现
使用环境*env;
-(无效)viewDidLoad
{
[超级视图下载];
//加载视图后,通常从nib执行任何其他设置。
}
-(无效)未收到记忆警告
{
[超级记忆警告];
//处置所有可以重新创建的资源。
}
结构会话状态{
FramedSource*源;
文件墨水*水槽;
RTCPInstance*RTCPInstance;
}会期状态;
-(iAction)开始:(id)发送方{
如果(!已单击){
//开始
[自启动按钮];
wasClicked=true;
}否则{
//停止
[自动停止按钮];
wasClicked=false;
}
}
-(无效)开始按钮{
//首先设置我们的使用环境:
TaskScheduler*scheduler=BasicTaskScheduler::createNew();
env=BasicUsageEnvironment::createNew(*调度器);
//为“stdout”创建数据接收器:
sessionState.sink=FileSink::createNew(*env,“stdout”);
//注意:字符串“stdout”作为特例处理。
//可以使用真实的文件名来代替。
//为RTP和RTCP创建“groupsocks”:
字符常量*sessionAddressStr
#ifdef使用\u SSM
= "232.255.42.42";
#否则
= "239.255.42.42";
//注意:如果会话是单播而不是多播,
//然后将此字符串替换为“0.0.0.0”
#恩迪夫
const unsigned short rtpPortNum=6666;
const unsigned short rtcportnum=rtpPortNum+1;
#ifndef使用
const unsigned char ttl=1;//低,以防路由器没有管理作用域
#恩迪夫
会话地址中的结构;
sessionAddress.s_addr=我们的inet_addr(sessionAddress str);
常量端口rtpPort(rtpPortNum);
常量端口rtcpPort(rtcpPortNum);
#ifdef使用\u SSM
char*sourceAddressStr=“aaa.bbb.ccc.ddd”;
//将其替换为真实的源地址
地址中的结构sourceFilterAddress;
sourceFilterAddress.s\u addr=我们的\u inet\u addr(sourceAddressStr);
Groupsock rtpGroupsock(*env,sessionAddress,sourceFilterAddress,rtpPort);
Groupsock rtcpGroupsock(*env,sessionAddress,sourceFilterAddress,rtcport);
rtcpGroupsock.changeDestinationParameters(sourceFilterAddress,0,~0);
//我们的RTCP“RR”使用单播发送回
#否则
Groupsock rtpGroupsock(*env,sessionAddress,rtpPort,ttl);
Groupsock rtcpGroupsock(*env,sessionAddress,rtcpPort,ttl);
#恩迪夫
RTPSource*RTPSource;
#使用ADUS的ifndef流
//创建数据源:“MPEG音频RTP源”
rtpSource=mpeg1or2auditorpsource::createNew(*env,&rtpGroupsock);
#否则
//创建数据源:“MP3*ADU*RTP源”
unsigned char rtpPayloadFormat=96;//动态负载类型
RTP资源
=MP3ADURTPSource::createNew(*env,&rtpGroupsock,rtpPayloadFormat);
#恩迪夫
//为RTP源创建(并启动)一个“RTCP实例”:
常数unsigned estimatedSessionBandwidth=160;//以kbps为单位;用于RTCP b/w共享
常量无符号maxCNAMElen=100;
无符号字符CNAME[maxCNAMElen+1];
gethostname((char*)CNAME,maxCNAMElen);
CNAME[maxCNAMElen]='\0';//以防万一
sessionState.rtcpInstance
=RTCPInstance::createNew(*env,&rtcpGroupsock,
估计会话带宽,CNAME,
NULL/*我们是客户*/,rtpSource);
//注意:这会自动启动RTCP运行
sessionState.source=rtpSource;
#使用ADUS的ifdef流
//添加一个过滤器,用于在ADU卸包后将其逐行扫描:
sessionState.source
=MP3ADUdeinterleaver::createNew(*env,sessionState.source);
if(sessionState.source==NULL){
*env taskScheduler().doEventLoop();//不返回
}
播放后作废(作废*/*客户端数据*/){

*env可能是这样,但为什么要麻烦呢,使用armv7s并不能给你带来很多好处,只需进入“构建设置”,从arch中删除armv7s即可。@MichelleCannon我从
构建设置>算术体系结构>体系结构和
构建设置>算术体系结构>有效体系结构
中删除armv7s,但仍然存在相同的错误。现在我有同样的问题,但对于armv7@MichelleCannon Ok我的错,我添加了错误的文件。我尝试将源文件添加到我的项目中,我不应该添加从这些源文件生成的整个库吗?供您参考,我分叉了原始项目,并使用Live555的最新源代码对其进行了升级。我还将项目配置更新为iOS 8就绪(新arm体系结构)。这是我的报告