Processing 使用虹吸管向VPT发送处理帧

Processing 使用虹吸管向VPT发送处理帧,processing,syphon,Processing,Syphon,我正在使用FaceOSCSyphon处理,需要使用Syphon将帧从处理发送到VPT。我需要向我的代码中添加什么才能使其工作 我没有用过FaceOSCSyphon,但玩过FaceTracker库。看看这些示例,草图充当了一个虹吸客户端 在您的情况下,根据(大型pdf链接)的第33页,您的处理草图需要是一个虹吸服务器 在处理中,运行示例>贡献库>虹吸>发送帧 在VPT中,向右向下滚动图层列表,直到找到syph部分。然后,您应该能够选择Send Frames示例运行的处理虹吸服务器: 现在,您应

我正在使用FaceOSCSyphon处理,需要使用Syphon将帧从处理发送到VPT。我需要向我的代码中添加什么才能使其工作

我没有用过FaceOSCSyphon,但玩过FaceTracker库。看看这些示例,草图充当了一个虹吸客户端

在您的情况下,根据(大型pdf链接)的第33页,您的处理草图需要是一个虹吸服务器

在处理中,运行示例>贡献库>虹吸>发送帧

在VPT中,向右向下滚动图层列表,直到找到
syph
部分。然后,您应该能够选择Send Frames示例运行的处理虹吸服务器:

现在,您应该清楚地知道如何将帧从处理中获取到VPT

关于FaceOSC部分,我建议将SendFrames示例与示例合并:读取FaceOSC数据,但不要设置Syphon客户端,而是设置Syphon服务器

例如:

//
// a template for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker
//
// this example includes a class to abstract the Face data
//
// 2012 Dan Wilcox danomatika.com
// for the IACD Spring 2012 class at the CMU School of Art
//
// adapted from from Greg Borenstein's 2011 example
// http://www.gregborenstein.com/
// https://gist.github.com/1603230
//
import codeanticode.syphon.*;
import oscP5.*;
OscP5 oscP5;

PGraphics canvas;
SyphonServer server;


// our FaceOSC tracked face dat
Face face = new Face();

void setup() {
  size(640, 480,P2D);
  frameRate(30);

  oscP5 = new OscP5(this, 8338);

  canvas = createGraphics(640, 480, P2D);

  // Create syhpon server to send frames out.
  server = new SyphonServer(this, "FaceOSC Processing Syphon");
}

void draw() {  
  canvas.beginDraw();
  canvas.background(255);
  canvas.stroke(0);

  if(face.found > 0) {
    canvas.translate(face.posePosition.x, face.posePosition.y);
    canvas.scale(face.poseScale);
    canvas.noFill();
    canvas.ellipse(-20, face.eyeLeft * -9, 20, 7);
    canvas.ellipse(20, face.eyeRight * -9, 20, 7);
    canvas.ellipse(0, 20, face.mouthWidth* 3, face.mouthHeight * 3);
    canvas.ellipse(-5, face.nostrils * -1, 7, 3);
    canvas.ellipse(5, face.nostrils * -1, 7, 3);
    canvas.rectMode(CENTER);
    canvas.fill(0);
    canvas.rect(-20, face.eyebrowLeft * -5, 25, 5);
    canvas.rect(20, face.eyebrowRight * -5, 25, 5);

    print(face.toString());
  }
  canvas.endDraw();
  image(canvas,0,0);
  server.sendImage(canvas);
}

// OSC CALLBACK FUNCTIONS

void oscEvent(OscMessage m) {
  face.parseOSC(m);
}

// a single tracked face from FaceOSC
class Face {

  // num faces found
  int found;

  // pose
  float poseScale;
  PVector posePosition = new PVector();
  PVector poseOrientation = new PVector();

  // gesture
  float mouthHeight, mouthWidth;
  float eyeLeft, eyeRight;
  float eyebrowLeft, eyebrowRight;
  float jaw;
  float nostrils;

  Face() {}

  // parse an OSC message from FaceOSC
  // returns true if a message was handled
  boolean parseOSC(OscMessage m) {

    if(m.checkAddrPattern("/found")) {
        found = m.get(0).intValue();
        return true;
    }      

    // pose
    else if(m.checkAddrPattern("/pose/scale")) {
        poseScale = m.get(0).floatValue();
        return true;
    }
    else if(m.checkAddrPattern("/pose/position")) {
        posePosition.x = m.get(0).floatValue();
        posePosition.y = m.get(1).floatValue();
        return true;
    }
    else if(m.checkAddrPattern("/pose/orientation")) {
        poseOrientation.x = m.get(0).floatValue();
        poseOrientation.y = m.get(1).floatValue();
        poseOrientation.z = m.get(2).floatValue();
        return true;
    }

    // gesture
    else if(m.checkAddrPattern("/gesture/mouth/width")) {
        mouthWidth = m.get(0).floatValue();
        return true;
    }
    else if(m.checkAddrPattern("/gesture/mouth/height")) {
        mouthHeight = m.get(0).floatValue();
        return true;
    }
    else if(m.checkAddrPattern("/gesture/eye/left")) {
        eyeLeft = m.get(0).floatValue();
        return true;
    }
    else if(m.checkAddrPattern("/gesture/eye/right")) {
        eyeRight = m.get(0).floatValue();
        return true;
    }
    else if(m.checkAddrPattern("/gesture/eyebrow/left")) {
        eyebrowLeft = m.get(0).floatValue();
        return true;
    }
    else if(m.checkAddrPattern("/gesture/eyebrow/right")) {
        eyebrowRight = m.get(0).floatValue();
        return true;
    }
    else if(m.checkAddrPattern("/gesture/jaw")) {
        jaw = m.get(0).floatValue();
        return true;
    }
    else if(m.checkAddrPattern("/gesture/nostrils")) {
        nostrils = m.get(0).floatValue();
        return true;
    }

    return false;
  }

  // get the current face values as a string (includes end lines)
  String toString() {
    return "found: " + found + "\n"
           + "pose" + "\n"
           + " scale: " + poseScale + "\n"
           + " position: " + posePosition.toString() + "\n"
           + " orientation: " + poseOrientation.toString() + "\n"
           + "gesture" + "\n"
           + " mouth: " + mouthWidth + " " + mouthHeight + "\n"
           + " eye: " + eyeLeft + " " + eyeRight + "\n"
           + " eyebrow: " + eyebrowLeft + " " + eyebrowRight + "\n"
           + " jaw: " + jaw + "\n"
           + " nostrils: " + nostrils + "\n";
  }

};
注意,这是未经测试的合并代码。 这应该能让人明白这一点,但我现在不能测试

在启动VPT之前,确保草图正在运行(否则,重新启动VPT)