SharpNow Vidoo SDK  2.02
微动开发指南

本章将介绍部分source目录下的部分范例工程:

SampleC

该范例使用 VidooSDK 中的 C 风格接口。主要演示了以下功能:

以下为范例源代码,可供参考:

#include <sharpnow/sharpnow.h>
#include <iostream>
#include <Windows.h>
int main(int argc, char** argv)
{
// 加载动态链接库
HMODULE dll = NULL;
#ifdef _DEBUG
dll = LoadLibraryA("../../lib/sharpnowD.dll");
#else
dll = LoadLibraryA("../../lib/sharpnow.dll");
#endif
if (!dll) return -1;
// 加载接口函数
RetrieveFrame = (sharpnow::Proto_RetrieveFrame)GetProcAddress(dll, "RetrieveFrame");
GetFrameInfo = (sharpnow::Proto_GetFrameInfo)GetProcAddress(dll, "GetFrameInfo");
GetHand = (sharpnow::Proto_GetHand)GetProcAddress(dll, "GetHand");
if (!RetrieveFrame || !GetFrameInfo || !GetHand) return -1;
std::cout << "等待设备连接 ... " << std::endl;
bool connected[] = { false, false };
while (true)
{
for (int device = 0; device < 2; ++device)
{
if (RetrieveFrame(device))
{
// 检测是否为新连接设备
if (!connected[device])
{
std::cout << "检测到微动 " << device << " 连接" << std::endl;
connected[device] = true;
}
// 确认收到最新数据
const sharpnow::Frame* frame = GetFrameInfo();
if (!frame->updated) continue;
// 检查当前手势类型
const sharpnow::Hand* hand = GetHand(frame->hand_focused);
if (hand
&& hand->gesture != sharpnow::HG_Unknown
&& hand->gesture_counter <= 25)
{
// 确认手势类型
std::cout << "\r当前手势姿态:";
switch (hand->gesture)
{
case sharpnow::HG_Grip: std::cout << "Grip "; break;
case sharpnow::HG_Point: std::cout << "Point "; break;
case sharpnow::HG_Gun: std::cout << "Gun "; break;
case sharpnow::HG_Victory: std::cout << "Victory "; break;
case sharpnow::HG_Metal: std::cout << "Metel "; break;
case sharpnow::HG_Phone: std::cout << "Phone "; break;
case sharpnow::HG_Fork: std::cout << "Fork "; break;
case sharpnow::HG_Three: std::cout << "Three "; break;
case sharpnow::HG_Love: std::cout << "Love "; break;
case sharpnow::HG_Four: std::cout << "Four "; break;
case sharpnow::HG_Five: std::cout << "Five "; break;
case sharpnow::HG_Good: std::cout << "Good "; break;
case sharpnow::HG_Draw: std::cout << "Draw "; break;
case sharpnow::HG_OK: std::cout << "OK "; break;
case sharpnow::HG_Stop: std::cout << "Stop "; break;
}
for (int i = 0; i < 25; ++i) std::cout << (i < hand->gesture_counter ? "." : " ");
if (hand->gesture_counter == 25) std::cout << " OK\n";
}
}
else
{
// 检测设备断开
if (connected[device])
{
std::cout << "检测到微动 " << device << " 断开" << std::endl;
connected[device] = false;
}
continue;
}
}
::Sleep(1);
}
return 0;
}

SampleCPP

该范例使用 VidooSDK 中的 C++ 风格接口。主要演示了以下功能:

以下为范例源代码,可供参考:

#include <sharpnow/sharpnow.hpp>
#include <iostream>
#include <Windows.h>
int main(int argc, char** argv)
{
// 加载动态链接库
if (!sdk.LoadInPath("../../lib")) return -1;
std::cout << "等待设备连接 ... " << std::endl;
bool connected[] = { false, false };
while (true)
{
for (int device = 0; device < 2; ++device)
{
if (sdk.RetrieveFrame(device))
{
// 检测是否为新连接设备
if (!connected[device])
{
std::cout << "检测到微动 " << device << " 连接" << std::endl;
std::cout << "伸出单指可控制屏幕光标移动" << std::endl;
connected[device] = true;
}
// 确认收到最新数据
const sharpnow::Frame* frame = sdk.GetFrameInfo();
if (!frame->updated) continue;
// 单手指时,控制屏幕光标
const sharpnow::Finger* finger = sdk.GetFinger(frame->finger_focused, 0);
if (frame->finger_stretched_number <= 2 && finger)
{
std::cout << "\r" << finger->cursor.x << " " << finger->cursor.y;
SetCursorPos(
int(finger->cursor.x * ::GetSystemMetrics(SM_CXSCREEN)),
int(finger->cursor.y * ::GetSystemMetrics(SM_CYSCREEN)));
}
}
else
{
// 检测设备断开
if (connected[device])
{
std::cout << "检测到微动 " << device << " 断开" << std::endl;
connected[device] = false;
}
continue;
}
}
::Sleep(1);
}
return 0;
}

SampleGL

该范例在OpenGL中渲染手部的3D信息。主要演示了以下功能:

以下为部分范例源代码,可供参考:

// 绘制当前场景,使用球体表示指尖空间位置,使用直线表示指尖朝向
void Draw()
{
const sharpnow::Frame* frame = sdk.GetFrameInfo();
for (int w = 0; w < frame->hand_number; ++w)
{
const sharpnow::Hand& h = *sdk.GetHand(frame->hand[w]);
sharpnow::Vector3 p = h.position, q1, q2;
float r = show_skeleton ? 0.002f : 0.006f;
float r1 = r * 1.1f, r2 = r * 1.5f;
// 绘制手指骨架信息
for (int i = 0; i < h.finger_skeleton_number; ++i)
{
const sharpnow::Finger& f = *sdk.GetFinger(h.finger_skeleton, i);
if (f.focused)
glColor3d(1.0, 1.0, 0.6);
else
glColor3d(0.6, 0.6, 1.0);
if (f.role_id == 0)
{
DrawTube(f.joint1, f.joint2, r);
DrawTube(f.joint2, f.joint3, r);
DrawSphere(f.joint1, r1);
DrawSphere(f.joint2, r1);
DrawSphere(f.joint3, r2);
}
else
{
DrawTube(f.joint0, f.joint1, r);
DrawTube(f.joint1, f.joint2, r);
DrawTube(f.joint2, f.joint3, r);
DrawSphere(f.joint0, r1);
DrawSphere(f.joint1, r1);
DrawSphere(f.joint2, r1);
DrawSphere(f.joint3, r2);
}
glColor3d(0.6, 0.6, 1.0);
if (show_skeleton)
DrawTube(f.joint3, p, r);
else
{
switch (f.role_id)
{
case 0: case 1:
DrawTube(h.wrist_thumb, f.joint3, 0.005f);
break;
case 2: case 3:
DrawTube(sdk.GetFinger(h.finger_skeleton, i - 1)->joint3, f.joint3, 0.005f);
break;
case 4:
DrawTube(sdk.GetFinger(h.finger_skeleton, i - 1)->joint3, f.joint3, 0.005f);
DrawTube(h.wrist_pinky, f.joint3, 0.005f);
break;
}
}
if (show_skeleton)
{
glColor3d(0.8, 0.8, 0.8);
DrawTube(p, p + h.arm_direction * 0.06f, 0.005f);
}
else
{
glColor3d(0.6, 0.6, 1.0);
DrawTube(h.wrist_thumb, h.wrist_pinky, 0.005f);
DrawSphere(h.wrist_thumb, 0.006f);
DrawSphere(h.wrist_pinky, 0.006f);
}
}
// 绘制掌心
glColor3d(0.9,0.9,0.9);
DrawSphere(p, 0.01f);
// 绘制坐标轴
glLineWidth(3);
glColor3d(1,0,0); DrawLine(p, p + h.rotation.GetAxisX() * 0.04f);
glColor3d(0,1,0); DrawLine(p, p + h.rotation.GetAxisY() * 0.04f);
glColor3d(0,0,1); DrawLine(p, p + h.rotation.GetAxisZ() * 0.04f);
}
// 绘制各指尖轨迹
if (show_traj)
{
glLineWidth(6);
const int traj_size = 100;
sharpnow::Vector3 traj[traj_size];
for (int i = 0; i < frame->finger_stretched_number; ++i)
{
int tid = sdk.GetFinger(frame->finger_stretched, i)->track_id;
int len = sdk.GetTrajectory(tid, traj, traj_size);
double r = tid % 2, g = tid % 3 * 0.33, b = 1 - tid % 5 * 0.2;
for (int k = len - 1; k > 0; --k)
{
glColor3d(r, g, b);
DrawLine(traj[k], traj[k-1]);
}
}
}
}
// 更新帧数据
void UpdateFrameData()
{
// 查询设备状态
for (int i = 0; i < 3; ++i) if (sdk.RetrieveFrame(i))
{
if (!sdk.GetFrameInfo()->updated) break;
const sharpnow::Frame* frame = sdk.GetFrameInfo();
glutPostWindowRedisplay(window);
break;
}
}
int main(int argc, char** argv)
{
int p = sizeof(sharpnow::Frame);
// 加载动态链接库
sdk.LoadInPath("../../lib");
// 初始化OpenGL渲染环境
glutInit(&argc, argv);
glutInitWindowPosition(10, 10);
glutInitWindowSize(window_width,window_height);
window = glutCreateWindow("SharpNow Vidoo OpenGL Sample");
glutSetWindow(window);
glutInitDisplayMode(GLUT_RGB | GLUT_DEPTH | GLUT_DOUBLE);
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);
glEnable(GL_COLOR_MATERIAL);
glEnable(GL_NORMALIZE);
// 注册OpenGL回调函数
glutDisplayFunc(CALLBACK_Display);
glutReshapeFunc(CALLBACK_CallReshape);
glutKeyboardFunc(CALLBACK_Keyboard);
char_table = glGenLists(128);
HFONT hFont = CreateFontA(32, 0, 0, 0, FW_MEDIUM, 0, 0, 0, ANSI_CHARSET, OUT_DEFAULT_PRECIS,
CLIP_DEFAULT_PRECIS, PROOF_QUALITY, DEFAULT_PITCH | FF_SWISS, "Consolas");
HFONT hOldFont = (HFONT)SelectObject(wglGetCurrentDC(), hFont);
DeleteObject(hOldFont);
wglUseFontBitmaps(wglGetCurrentDC(), 0, 128, char_table);
while (true)
{
UpdateFrameData();
glutMainLoopEvent();
::Sleep(1);
}
}

SampleOculus

该范例在Oculus的VR场景中使用增强影像。编译执行该范例之前,请先安装Oculus SDK。范例演示了以下功能:

以下为部分范例源代码,可供参考:

sharpnow::SDK vid_sdk;
sharpnow::Vector2 _tex_cood[2][2];
GLuint handTex[2];
void initHand()
{
glGenTextures(2, handTex);
for (unsigned int i = 0; i < 2; ++i)
{
glBindTexture(GL_TEXTURE_2D, handTex[i]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
void updateHand()
{
for (int i = 0; i < 3; ++i) if (vid_sdk.RetrieveFrame(i))
{
const sharpnow::Frame* frame = vid_sdk.GetFrameInfo();
if (frame && frame->updated)
{
// 获取影像数据
if (frame->ar_image_left && frame->ar_image_right)
{
glBindTexture(GL_TEXTURE_2D, handTex[0]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, frame->ar_image_width, frame->ar_image_height,
0, GL_RGBA, GL_UNSIGNED_BYTE, frame->ar_image_left);
glBindTexture(GL_TEXTURE_2D, handTex[1]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, frame->ar_image_width, frame->ar_image_height,
0, GL_RGBA, GL_UNSIGNED_BYTE, frame->ar_image_right);
_tex_cood[0][0] = frame->tex_coord1_left;
_tex_cood[0][1] = frame->tex_coord2_left;
_tex_cood[1][0] = frame->tex_coord1_right;
_tex_cood[1][1] = frame->tex_coord2_right;
}
}
break;
}
}
int main(int argc, void** argv)
{
// ...
// 加载动态链接库
vid_sdk.LoadInPath("../../../lib");
vid_sdk.ConfigVR(NULL);
ovrHmd_DismissHSWDisplay(HMD);
// Main loop
while (!(Platform.Key['Q'] && Platform.Key[VK_CONTROL]) && !Platform.Key[VK_ESCAPE])
{
Platform.HandleMessages();
ovrHmd_BeginFrame(HMD, 0);
//Get eye poses, feeding in correct IPD offset
ovrVector3f ViewOffset[2] = { EyeRenderDesc[0].HmdToEyeViewOffset, EyeRenderDesc[1].HmdToEyeViewOffset };
ovrPosef EyeRenderPose[2];
ovrHmd_GetEyePoses(HMD, 0, ViewOffset, EyeRenderPose, NULL);
updateHand();
// ...
for (int eye = 0; eye < 2; eye++)
{
Matrix4f rollPitchYaw = Matrix4f::RotationY(Yaw);
Matrix4f finalRollPitchYaw = rollPitchYaw * Matrix4f(EyeRenderPose[eye].Orientation);
Vector3f finalUp = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
Vector3f finalForward = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
Vector3f shiftedEyePos = Pos2 + rollPitchYaw.Transform(EyeRenderPose[eye].Position);
Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
Matrix4f proj = ovrMatrix4f_Projection(HMD->DefaultEyeFov[eye], 0.1f, 1000.0f, ovrProjection_RightHanded);
glActiveTexture(GL_TEXTURE0);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, sceneRenderTexture[eye]->texId);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_COMBINE);
glTexEnvi(GL_TEXTURE_ENV, GL_COMBINE_RGB, GL_REPLACE);
glTexEnvi(GL_TEXTURE_ENV, GL_SRC0_RGB, GL_TEXTURE0);
glActiveTexture(GL_TEXTURE1);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, handTex[eye]);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_COMBINE);
glTexEnvi(GL_TEXTURE_ENV, GL_COMBINE_RGB, GL_INTERPOLATE);
glTexEnvi(GL_TEXTURE_ENV, GL_SRC0_RGB, GL_TEXTURE1);
glTexEnvi(GL_TEXTURE_ENV, GL_OPERAND0_RGB, GL_SRC_COLOR);
glTexEnvi(GL_TEXTURE_ENV, GL_SRC1_RGB, GL_PREVIOUS);
glTexEnvi(GL_TEXTURE_ENV, GL_OPERAND1_RGB, GL_SRC_COLOR);
glTexEnvi(GL_TEXTURE_ENV, GL_SRC2_RGB, GL_TEXTURE1);
glTexEnvi(GL_TEXTURE_ENV, GL_OPERAND2_RGB, GL_SRC_ALPHA);
sharpnow::Vector2 d1 = _tex_cood[eye][0];
sharpnow::Vector2 d2 = _tex_cood[eye][1];
// 纹理叠加
glBegin(GL_QUADS);
glNormal3f(0, 0, 1);
glMultiTexCoord2d(GL_TEXTURE0, 0, 0);
glMultiTexCoord2d(GL_TEXTURE1, d1.x, d2.y);
glMultiTexCoord2d(GL_TEXTURE2, 0, 0);
glVertex2d(-1, -1);
glMultiTexCoord2d(GL_TEXTURE0, 0, 1);
glMultiTexCoord2d(GL_TEXTURE1, d1.x, d1.y);
glMultiTexCoord2d(GL_TEXTURE2, 0, 1);
glVertex2d(-1, 1);
glMultiTexCoord2d(GL_TEXTURE0, 1, 1);
glMultiTexCoord2d(GL_TEXTURE1, d2.x, d1.y);
glMultiTexCoord2d(GL_TEXTURE2, 1, 1);
glVertex2d(1, 1);
glMultiTexCoord2d(GL_TEXTURE0, 1, 0);
glMultiTexCoord2d(GL_TEXTURE1, d2.x, d2.y);
glMultiTexCoord2d(GL_TEXTURE2, 1, 0);
glVertex2d(1, -1);
glEnd();
glActiveTexture(GL_TEXTURE1);
glDisable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE2);
glDisable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, 0);
}
}
//Release
ovrHmd_Destroy(HMD);
ovr_Shutdown();
Platform.ReleaseWindow(hInst);
return 0;
}

SampleDecode

该范例在本地UDP端口接收微动的消息数据并进行解析。主要演示了以下功能:

以下为范例源代码,可供参考:

#include <Winsock2.h>
#include <stdio.h>
#include "../../sdk-source/vidoo.h"
#pragma comment(lib, "Ws2_32.lib")
static sharpnow::Vidoo vid;
using namespace sharpnow;
int main(int argc, char** argv)
{
const int udp_buffer_len = 16384;
const unsigned short udp_port = 15670;
char buffer[udp_buffer_len];
// 初始化
WSADATA wsadata;
if (::WSAStartup(MAKEWORD(2, 0), &wsadata) != 0) return -1;
// 创建UDP Socket
SOCKET sock = ::socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP);
if (sock == INVALID_SOCKET) return -1;
// 绑定端口
sockaddr_in addr;
addr.sin_family = AF_INET;
addr.sin_port = htons(udp_port);
addr.sin_addr.s_addr = htonl(INADDR_ANY);
if (bind(sock, (sockaddr*)&addr, sizeof(addr)) < 0) return -1;
// 循环接收广播信息
while (true)
{
int sender_size = sizeof(addr);
int n = ::recvfrom(sock, buffer, udp_buffer_len, 0, (sockaddr*)&addr, &sender_size);
if (n < 0) break;
vid.Decode(buffer + 2, n - 2); // 前两字节为序号与校验
// 打印当前信息
const sharpnow::Frame* frame = vid.GetFrameInfo();
printf("hand_num=%d streched_finger=%d ", frame->hand_number, frame->finger_stretched_number);
const sharpnow::Finger* finger = vid.GetFinger(frame->finger_focused, 0);
if (finger) printf("position=%f,%f,%f", finger->position.x, finger->position.y, finger->position.z);
printf("\n");
}
// 关闭网络
shutdown(sock, SD_BOTH);
closesocket(sock);
::WSACleanup();
return 0;
}