iOS OpenGL ES在2d世界中执行收缩缩放
我正在iPad上制作一个2d OpenGL应用程序。我需要实现一个收缩/缩放 我想在iOS OpenGL ES在2d世界中执行收缩缩放,ios,ipad,opengl-es,pinchzoom,Ios,Ipad,Opengl Es,Pinchzoom,我正在iPad上制作一个2d OpenGL应用程序。我需要实现一个收缩/缩放 我想在(x,y)平面内移动相机,并用捏手势控制相机x,y和z值 update方法中的每一帧我都是这样制作视图矩阵(摄影机) lookAt=glkmarix4makelookat(视图x、视图y、视图z、视图x、视图y、0.0f、0.0f、1.0f、0.0f) 其中,视图x、视图y和视图z在程序开始时定义如下: view\ux=view\uy=0.0f;视图_z=kStartZoom 现在是3000。 因此,摄像机位于(
(x,y)
平面内移动相机,并用捏手势控制相机x,y
和z
值
update
方法中的每一帧我都是这样制作视图矩阵(摄影机)
lookAt=glkmarix4makelookat(视图x、视图y、视图z、视图x、视图y、0.0f、0.0f、1.0f、0.0f)代码>
其中,视图x、视图y和视图z在程序开始时定义如下:
view\ux=view\uy=0.0f;视图_z=kStartZoom代码>
现在是3000。
因此,摄像机位于(0,03000)并朝向(0,0,0)
处理夹点事件的几乎有效的解决方案是
- (IBAction) handlePinch:(UIPinchGestureRecognizer*) recognizer {
switch (recognizer.state)
{
case UIGestureRecognizerStateBegan:
{
if (recognizer.numberOfTouches == 2)
{
prevTouchOrigin1 = [recognizer locationOfTouch:0 inView:self.view];
prevTouchOrigin2 = [recognizer locationOfTouch:1 inView:self.view];
}
} break;
case UIGestureRecognizerStateChanged:
{
if (recognizer.numberOfTouches == 2)
{
CGFloat newDistance, oldDistance;
oldDistance = distanceBetweenTwoCGPoints(&prevTouchOrigin1, &prevTouchOrigin2);
currTouchOrigin1 = [recognizer locationOfTouch:0 inView:self.view];
currTouchOrigin2 = [recognizer locationOfTouch:1 inView:self.view];
newDistance = distanceBetweenTwoCGPoints(&currTouchOrigin1, &currTouchOrigin2);
if (newDistance == 0 || oldDistance == 0)
{
scaleFactor = 1;
} else {
scaleFactor = oldDistance / newDistance;
}
GLfloat check = view_z * scaleFactor;
if (check < kMinZoom || check > kMaxZoom)
return;
view_z *= scaleFactor;
// translate
// formula: newPos = currTouchOrigin + (objectOrigin - prevTouchOrigin) * scaleFactor
static CGPoint translationDelta;
GLfloat z_ratio = view_z_old / view_z;
newPos1.x = currTouchOrigin1.x - ((prevTouchOrigin1.x - view_x) * scaleFactor);
newPos1.y = currTouchOrigin1.y - ((prevTouchOrigin1.y - view_y) * scaleFactor);
newPos2.x = currTouchOrigin2.x - ((prevTouchOrigin2.x - view_x) * scaleFactor);
newPos2.y = currTouchOrigin2.y - ((prevTouchOrigin2.y - view_y) * scaleFactor);
midpoint = CGPointMidpoint(&newPos1, &newPos2);
translationDelta = CGPointMake(midpoint.x - view_x, midpoint.y - view_y);
view_x += translationDelta.x;
view_y -= translationDelta.y;
prevTouchOrigin1 = currTouchOrigin1;
prevTouchOrigin2 = currTouchOrigin2;
}
} break;
case UIGestureRecognizerStateEnded:
{
} break;
default :
{
}
}}
-(iAction)handlePinch:(UIPinchGestureRecognitor*)识别器{
开关(识别器状态)
{
案例UIgestureRecognitzerStateStart:
{
if(recognizer.numberoftouchs==2)
{
prevTouchOrigin1=[识别器位置触摸:0在视图中:self.view];
prevTouchOrigin2=[识别器位置触摸:1在视图中:self.view];
}
}中断;
案例UIgestureRecognitzerStateChanged:
{
if(recognizer.numberoftouchs==2)
{
CGFloat newDistance,oldDistance;
oldDistance=两个CG点之间的距离(&prevTouchOrigin1和&prevTouchOrigin2);
currTouchOrigin1=[识别器位置触摸:0在视图中:self.view];
currTouchOrigin2=[识别器位置触摸:1在视图中:self.view];
newDistance=两个CG点之间的距离(&currTouchOrigin1和&currTouchOrigin2);
如果(新距离==0 | |旧距离==0)
{
scaleFactor=1;
}否则{
scaleFactor=旧距离/新距离;
}
GLfloat check=视图×缩放因子;
如果(检查kMaxZoom)
返回;
视图_z*=缩放因子;
//翻译
//公式:newPos=currTouchOrigin+(objectOrigin-prevTouchOrigin)*缩放因子
静态点平移增量;
GLfloat z_比率=视图z_旧/视图z;
newPos1.x=currtTouchOrigin1.x-((prevTouchOrigin1.x-视图×缩放因子);
newPos1.y=currtTouchOrigin1.y-((prevTouchOrigin1.y-视图×缩放因子);
newPos2.x=currtTouchOrigin2.x-((prevTouchOrigin2.x-视图×缩放因子);
newPos2.y=currtTouchOrigin2.y-((prevTouchOrigin2.y-视图×缩放因子);
中点=cgpoint中点(&newPos1,&newPos2);
translationDelta=CGPointMake(middpoint.x-view_x,middpoint.y-view_y);
视图x+=translationDelta.x;
视图y-=平移增量y;
prevTouchOrigin1=当前TouchOrigin1;
prevTouchOrigin2=当前TouchOrigin2;
}
}中断;
案例UIgestureRecognitzerStateEnded:
{
}中断;
违约:
{
}
}}
一切正常
我在x,y上有更多的运动,然后我需要,所以相机在旋转
问题是我没有应用一些从屏幕坐标到世界坐标的转换吗
问题是什么?我正在研究的其他示例仅根据上一个手指位置和最后一个手指位置之间的距离修改相机位置,这就是我正在做的。我不太确定是否理解您的问题,但对于夹点识别,我使用UIPinchGetureRecogizer:
这是我的解决方案:
我有两个类,一个负责所有OpenGL的东西(RenderViewController),另一个负责所有手势识别器以及OpenGL部分和应用程序其他部分(EditViewController)之间的通信
以下是有关手势的代码:
EdtorViewController
它捕获手势并将其信息发送到RenderViewController。你必须小心,因为坐标系不同
- (void) generateGestureRecognizers {
//Setup gesture recognizers
UIRotationGestureRecognizer *twoFingersRotate = [[UIRotationGestureRecognizer alloc] initWithTarget:self action:@selector(twoFingersRotate:)];
[self.hitView addGestureRecognizer:twoFingersRotate];
UIPinchGestureRecognizer *twoFingersScale = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(twoFingersScale:)];
[self.hitView addGestureRecognizer:twoFingersScale];
UIPanGestureRecognizer *oneFingerPan = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(oneFingerPan:)];
[self.hitView addGestureRecognizer:oneFingerPan];
[twoFingersRotate setDelegate:self];
[twoFingersScale setDelegate:self];
[oneFingerPan setDelegate:self];
}
- (void) oneFingerPan:(UIPanGestureRecognizer *) recognizer {
//Handle pan gesture
CGPoint translation = [recognizer translationInView:self.hitView];
CGPoint location = [recognizer locationInView:self.hitView];
//Send info to renderViewController
[self.renderViewController translate:traslation];
//Reset recognizer so change doesn't accumulate
[recognizer setTranslation:CGPointZero inView:self.hitView];
}
- (void) twoFingersRotate:(UIRotationGestureRecognizer *) recognizer {
//Handle rotation gesture
CGPoint locationInView = [recognizer locationInView:self.hitView];
locationInView = CGPointMake(locationInView.x - self.hitView.bounds.size.width/2, locationInView.y - self.hitView.bounds.size.height/2);
if ([recognizer state] == UIGestureRecognizerStateBegan || [recognizer state] == UIGestureRecognizerStateChanged) {
//Send info to renderViewController
[self.renderViewController rotate:locationInView degrees:recognizer.rotation];
//Reset recognizer
[recognizer setRotation:0.0];
}
}
- (void) twoFingersScale:(UIPinchGestureRecognizer *) recognizer {
//Handle scale gesture
CGPoint locationInView = [recognizer locationInView:self.hitView];
locationInView = CGPointMake(locationInView.x - self.hitView.bounds.size.width/2, locationInView.y - self.hitView.bounds.size.height/2);
if ([recognizer state] == UIGestureRecognizerStateBegan || [recognizer state] == UIGestureRecognizerStateChanged) {
//Send info to renderViewController
[self.renderViewController scale:locationInView ammount:recognizer.scale];
//reset recognizer
[recognizer setScale:1.0];
}
}
//This allows gestures recognizers to happen simultaniously
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer {
if (gestureRecognizer.view != otherGestureRecognizer.view)
return NO;
if ([gestureRecognizer isKindOfClass:[UILongPressGestureRecognizer class]] || [otherGestureRecognizer isKindOfClass:[UILongPressGestureRecognizer class]])
return NO;
return YES;
}
RenderViewController
对于每一帧,modelViewMatrix由其他三个临时矩阵(平移、缩放和旋转)计算得出
此解决方案使用在x、y平面上方移动摄影机。
世界是静止的。
我们处于某个z位置,我们通过挤压动作控制摄像机的移动
所需要的是将单位从以点表示的屏幕空间转换为以离散单位表示的开放gl空间
我通过在x,y平面上以opengl单位绘制50x50平方米得到了这个常数,
相机居中时x=0,y=0,z=100
然后我添加了两个UIView
s,并设置平移手势来移动这两个视图。
通过平移手势,我将它们的帧原点物理居中到opengl正方形的右上角和右下角。
点击手势被设置为NSLog
其原点。
这就是我得到这个常数的原因:
static const GLfloat k = 0.125f; // 445 CG units is 50 discrete OpenGL units at Z = 100; 0.112359f
所以,对于任意的摄像机z位置,我可以计算出我们从手指位置得到的缩放增量
某些init方法中的矩阵初始化:
aspect = fabsf(self.view.bounds.size.width / self.view.bounds.size.height);
projectionMatrix = GLKMatrix4MakePerspective(GLKMathDegreesToRadians(65.0f), aspect, 0.1f, 5000.0f);
lookAt = GLKMatrix4MakeLookAt(view_x, view_y, view_z, view_x, view_y, 0.0f, 0.0f, 1.0f, 0.0f);
所有ivar:
GLfloat view_x;
GLfloat view_y;
GLfloat view_z;
GLfloat view_z_base;
CGPoint currTouchOrigin1, currTouchOrigin2;
CGPoint prevTouchOrigin1, prevTouchOrigin2;
CGPoint newPos1, newPos2;
CGPoint midpoint;
CGFloat scaleFactor;
我们通过视图x、视图y和视图z来移动相机
所有常数:
static const GLfloat k = 0.125f; // 445 CG units is 50 discrete OpenGL units at Z = 100; 0.112359f
static const GLfloat default_z = 100.0f;
static const GLfloat kMinZoom = 30.0f;
static const GLfloat kMaxZoom = 4000.0f;
static const GLfloat kStartZoom = 200.0f;
这是完全收缩缩放处理程序:
- (IBAction) handlePinch:(UIPinchGestureRecognizer*) recognizer {
switch (recognizer.state)
{
case UIGestureRecognizerStateBegan:
{
if (recognizer.numberOfTouches == 2)
{
prevTouchOrigin1 = [recognizer locationOfTouch:0 inView:self.view];
prevTouchOrigin2 = [recognizer locationOfTouch:1 inView:self.view];
}
} break;
case UIGestureRecognizerStateChanged:
{
if (recognizer.numberOfTouches == 2)
{
CGFloat newDistance, oldDistance;
oldDistance = distanceBetweenTwoCGPoints(&prevTouchOrigin1, &prevTouchOrigin2);
currTouchOrigin1 = [recognizer locationOfTouch:0 inView:self.view];
currTouchOrigin2 = [recognizer locationOfTouch:1 inView:self.view];
newDistance = distanceBetweenTwoCGPoints(&currTouchOrigin1, &currTouchOrigin2);
if (newDistance == 0 || oldDistance == 0)
{
scaleFactor = 1;
//break;
} else {
//scaleFactor = oldDistance / newDistance;
scaleFactor = newDistance / oldDistance;
}
GLfloat view_z_old = view_z;
view_z /= scaleFactor;
if (view_z < kMinZoom || view_z > kMaxZoom)
{
view_z = view_z_old;
return;
}
// translate
// formula: newPos = currTouchOrigin + (objectOrigin - prevTouchOrigin) * scaleFactor
//static CGPoint tmp1, tmp2;
static CGPoint translationDelta;
newPos1.x = currTouchOrigin1.x - ((prevTouchOrigin1.x - (screenRect.size.width / 2)) * scaleFactor);
newPos1.y = currTouchOrigin1.y - ((prevTouchOrigin1.y - (screenRect.size.height / 2)) * scaleFactor);
newPos2.x = currTouchOrigin2.x - ((prevTouchOrigin2.x - (screenRect.size.width / 2)) * scaleFactor);
newPos2.y = currTouchOrigin2.y - ((prevTouchOrigin2.y - (screenRect.size.height / 2)) * scaleFactor);
midpoint = CGPointMidpoint(&newPos1, &newPos2);
translationDelta = CGPointMake(midpoint.x - (screenRect.size.width / 2), midpoint.y - (screenRect.size.height / 2));
static GLfloat r = 0.0f;
static GLfloat k2 = 0.0f;
r = view_z / default_z;
k2 = k * r;
// In openGL, coord sys if first quadrant based
view_x += -translationDelta.x * k2;
view_y += translationDelta.y * k2;
// store current coords for next event
prevTouchOrigin1 = currTouchOrigin1;
prevTouchOrigin2 = currTouchOrigin2;
}
} break;
case UIGestureRecognizerStateEnded:
{
} break;
default :
{
}
}
}
-(iAction)handlePinch:(UIPinchGestureRecognitor*)识别器{
开关(识别器状态)
{
案例UIgestureRecognitzerStateStart:
{
if(recognizer.numberoftouchs==2)
{
prevTouchOrigin1=[识别器位置触摸:0在视图中:self.view];
prevTouchOrigin2=[识别器位置触摸:1在视图中:self.view];
}
}中断;
案例UIgestureRecognitzerStateChanged:
{
if(recognizer.numberoftouchs==2)
{
CGFloat newDistance,oldDistance;
oldDistance=两个CG点之间的距离(&prevTouchOrigin1和&prevTouchOrigin2)
- (IBAction) handlePinch:(UIPinchGestureRecognizer*) recognizer {
switch (recognizer.state)
{
case UIGestureRecognizerStateBegan:
{
if (recognizer.numberOfTouches == 2)
{
prevTouchOrigin1 = [recognizer locationOfTouch:0 inView:self.view];
prevTouchOrigin2 = [recognizer locationOfTouch:1 inView:self.view];
}
} break;
case UIGestureRecognizerStateChanged:
{
if (recognizer.numberOfTouches == 2)
{
CGFloat newDistance, oldDistance;
oldDistance = distanceBetweenTwoCGPoints(&prevTouchOrigin1, &prevTouchOrigin2);
currTouchOrigin1 = [recognizer locationOfTouch:0 inView:self.view];
currTouchOrigin2 = [recognizer locationOfTouch:1 inView:self.view];
newDistance = distanceBetweenTwoCGPoints(&currTouchOrigin1, &currTouchOrigin2);
if (newDistance == 0 || oldDistance == 0)
{
scaleFactor = 1;
//break;
} else {
//scaleFactor = oldDistance / newDistance;
scaleFactor = newDistance / oldDistance;
}
GLfloat view_z_old = view_z;
view_z /= scaleFactor;
if (view_z < kMinZoom || view_z > kMaxZoom)
{
view_z = view_z_old;
return;
}
// translate
// formula: newPos = currTouchOrigin + (objectOrigin - prevTouchOrigin) * scaleFactor
//static CGPoint tmp1, tmp2;
static CGPoint translationDelta;
newPos1.x = currTouchOrigin1.x - ((prevTouchOrigin1.x - (screenRect.size.width / 2)) * scaleFactor);
newPos1.y = currTouchOrigin1.y - ((prevTouchOrigin1.y - (screenRect.size.height / 2)) * scaleFactor);
newPos2.x = currTouchOrigin2.x - ((prevTouchOrigin2.x - (screenRect.size.width / 2)) * scaleFactor);
newPos2.y = currTouchOrigin2.y - ((prevTouchOrigin2.y - (screenRect.size.height / 2)) * scaleFactor);
midpoint = CGPointMidpoint(&newPos1, &newPos2);
translationDelta = CGPointMake(midpoint.x - (screenRect.size.width / 2), midpoint.y - (screenRect.size.height / 2));
static GLfloat r = 0.0f;
static GLfloat k2 = 0.0f;
r = view_z / default_z;
k2 = k * r;
// In openGL, coord sys if first quadrant based
view_x += -translationDelta.x * k2;
view_y += translationDelta.y * k2;
// store current coords for next event
prevTouchOrigin1 = currTouchOrigin1;
prevTouchOrigin2 = currTouchOrigin2;
}
} break;
case UIGestureRecognizerStateEnded:
{
} break;
default :
{
}
}
}