标签:引导 ted lag animation develop doc finger c# oca
用户操作Hololens的全息对象时需要借助手势,手势的点击类似于我们电脑的鼠标,当我们凝视到某个全息对象时,手势点击即可以得到相应的反馈。这节教程主要讲述如何追踪用户的手势,并对手势操作做出反馈。
这节对手势操作会实现一些新的功能:
1 检测什么时候手势被追踪到然后提供反馈
2 使用导航手势来旋转全息对象。
3 当用户的手移开可检测的范围时提供反馈
4 使用操纵事件来移动全息对象。
勘误表与注释
VS中 "仅我的代码"需要被禁用。在工具-》选项-》调试-》可以找到“仅我的代码”
目标:
1 注册手势跟踪事件
2 使用光标的变化反馈来显示手是否被跟踪到
步骤:
HandsManager.cs 脚本实现如下:
CursorFeedback.cs 脚本实现以下行为:
发布部署
使用导航手势(食指与大拇指捏合在一起的手势)来旋转全息对象。
步骤:
为了使用导航手势事件,你需要编辑如下四个脚本:
using HoloToolkit; using UnityEngine.VR.WSA.Input; using UnityEngine; /// <summary> /// HandsManager keeps track of when a hand is detected. /// </summary> public class HandsManager : Singleton<HandsManager> { [Tooltip("Audio clip to play when Finger Pressed.")] public AudioClip FingerPressedSound; private AudioSource audioSource; /// <summary> /// Tracks the hand detected state. /// </summary> public bool HandDetected { get; private set; } // Keeps track of the GameObject that the hand is interacting with. public GameObject FocusedGameObject { get; private set; } void Awake() { EnableAudioHapticFeedback(); InteractionManager.SourceDetected += InteractionManager_SourceDetected; InteractionManager.SourceLost += InteractionManager_SourceLost; /* TODO: DEVELOPER CODE ALONG 2.a */ // 2.a: Register for SourceManager.SourcePressed event. InteractionManager.SourcePressed += InteractionManager_SourcePressed; // 2.a: Register for SourceManager.SourceReleased event. InteractionManager.SourceReleased += InteractionManager_SourceReleased; // 2.a: Initialize FocusedGameObject as null. FocusedGameObject = null; } private void EnableAudioHapticFeedback() { // If this hologram has an audio clip, add an AudioSource with this clip. if (FingerPressedSound != null) { audioSource = GetComponent<AudioSource>(); if (audioSource == null) { audioSource = gameObject.AddComponent<AudioSource>(); } audioSource.clip = FingerPressedSound; audioSource.playOnAwake = false; audioSource.spatialBlend = 1; audioSource.dopplerLevel = 0; } } private void InteractionManager_SourceDetected(InteractionSourceState hand) { HandDetected = true; } private void InteractionManager_SourceLost(InteractionSourceState hand) { HandDetected = false; // 2.a: Reset FocusedGameObject. ResetFocusedGameObject(); } private void InteractionManager_SourcePressed(InteractionSourceState hand) { if (InteractibleManager.Instance.FocusedGameObject != null) { // Play a select sound if we have an audio source and are not targeting an asset with a select sound. if (audioSource != null && !audioSource.isPlaying && (InteractibleManager.Instance.FocusedGameObject.GetComponent<Interactible>() != null && InteractibleManager.Instance.FocusedGameObject.GetComponent<Interactible>().TargetFeedbackSound == null)) { audioSource.Play(); } // 2.a: Cache InteractibleManager‘s FocusedGameObject in FocusedGameObject. FocusedGameObject = InteractibleManager.Instance.FocusedGameObject; } } private void InteractionManager_SourceReleased(InteractionSourceState hand) { // 2.a: Reset FocusedGameObject. ResetFocusedGameObject(); } private void ResetFocusedGameObject() { // 2.a: Set FocusedGameObject to be null. FocusedGameObject = null; // 2.a: On GestureManager call ResetGestureRecognizers // to complete any currently active gestures. GestureManager.Instance.ResetGestureRecognizers(); } void OnDestroy() { InteractionManager.SourceDetected -= InteractionManager_SourceDetected; InteractionManager.SourceLost -= InteractionManager_SourceLost; // 2.a: Unregister the SourceManager.SourceReleased event. InteractionManager.SourceReleased -= InteractionManager_SourceReleased; // 2.a: Unregister for SourceManager.SourcePressed event. InteractionManager.SourcePressed -= InteractionManager_SourcePressed; } }
我们需要编辑GestureManager.cs 脚本来完成如下几步:
using HoloToolkit; using UnityEngine; using UnityEngine.VR.WSA.Input; public class GestureManager : Singleton<GestureManager> { // Tap and Navigation gesture recognizer. public GestureRecognizer NavigationRecognizer { get; private set; } // Manipulation gesture recognizer. public GestureRecognizer ManipulationRecognizer { get; private set; } // Currently active gesture recognizer. public GestureRecognizer ActiveRecognizer { get; private set; } public bool IsNavigating { get; private set; } public Vector3 NavigationPosition { get; private set; } public bool IsManipulating { get; private set; } public Vector3 ManipulationPosition { get; private set; } void Awake() { /* TODO: DEVELOPER CODING EXERCISE 2.b */ // 2.b: Instantiate the NavigationRecognizer. NavigationRecognizer = new GestureRecognizer(); // 2.b: Add Tap and NavigationX GestureSettings to the NavigationRecognizer‘s RecognizableGestures. NavigationRecognizer.SetRecognizableGestures( GestureSettings.Tap | GestureSettings.NavigationX); // 2.b: Register for the TappedEvent with the NavigationRecognizer_TappedEvent function. NavigationRecognizer.TappedEvent += NavigationRecognizer_TappedEvent; // 2.b: Register for the NavigationStartedEvent with the NavigationRecognizer_NavigationStartedEvent function. NavigationRecognizer.NavigationStartedEvent += NavigationRecognizer_NavigationStartedEvent; // 2.b: Register for the NavigationUpdatedEvent with the NavigationRecognizer_NavigationUpdatedEvent function. NavigationRecognizer.NavigationUpdatedEvent += NavigationRecognizer_NavigationUpdatedEvent; // 2.b: Register for the NavigationCompletedEvent with the NavigationRecognizer_NavigationCompletedEvent function. NavigationRecognizer.NavigationCompletedEvent += NavigationRecognizer_NavigationCompletedEvent; // 2.b: Register for the NavigationCanceledEvent with the NavigationRecognizer_NavigationCanceledEvent function. NavigationRecognizer.NavigationCanceledEvent += NavigationRecognizer_NavigationCanceledEvent; // Instantiate the ManipulationRecognizer. ManipulationRecognizer = new GestureRecognizer(); // Add the ManipulationTranslate GestureSetting to the ManipulationRecognizer‘s RecognizableGestures. ManipulationRecognizer.SetRecognizableGestures( GestureSettings.ManipulationTranslate); // Register for the Manipulation events on the ManipulationRecognizer. ManipulationRecognizer.ManipulationStartedEvent += ManipulationRecognizer_ManipulationStartedEvent; ManipulationRecognizer.ManipulationUpdatedEvent += ManipulationRecognizer_ManipulationUpdatedEvent; ManipulationRecognizer.ManipulationCompletedEvent += ManipulationRecognizer_ManipulationCompletedEvent; ManipulationRecognizer.ManipulationCanceledEvent += ManipulationRecognizer_ManipulationCanceledEvent; ResetGestureRecognizers(); } void OnDestroy() { // 2.b: Unregister the Tapped and Navigation events on the NavigationRecognizer. NavigationRecognizer.TappedEvent -= NavigationRecognizer_TappedEvent; NavigationRecognizer.NavigationStartedEvent -= NavigationRecognizer_NavigationStartedEvent; NavigationRecognizer.NavigationUpdatedEvent -= NavigationRecognizer_NavigationUpdatedEvent; NavigationRecognizer.NavigationCompletedEvent -= NavigationRecognizer_NavigationCompletedEvent; NavigationRecognizer.NavigationCanceledEvent -= NavigationRecognizer_NavigationCanceledEvent; // Unregister the Manipulation events on the ManipulationRecognizer. ManipulationRecognizer.ManipulationStartedEvent -= ManipulationRecognizer_ManipulationStartedEvent; ManipulationRecognizer.ManipulationUpdatedEvent -= ManipulationRecognizer_ManipulationUpdatedEvent; ManipulationRecognizer.ManipulationCompletedEvent -= ManipulationRecognizer_ManipulationCompletedEvent; ManipulationRecognizer.ManipulationCanceledEvent -= ManipulationRecognizer_ManipulationCanceledEvent; } /// <summary> /// Revert back to the default GestureRecognizer. /// </summary> public void ResetGestureRecognizers() { // Default to the navigation gestures. Transition(NavigationRecognizer); } /// <summary> /// Transition to a new GestureRecognizer. /// </summary> /// <param name="newRecognizer">The GestureRecognizer to transition to.</param> public void Transition(GestureRecognizer newRecognizer) { if (newRecognizer == null) { return; } if (ActiveRecognizer != null) { if (ActiveRecognizer == newRecognizer) { return; } ActiveRecognizer.CancelGestures(); ActiveRecognizer.StopCapturingGestures(); } newRecognizer.StartCapturingGestures(); ActiveRecognizer = newRecognizer; } private void NavigationRecognizer_NavigationStartedEvent(InteractionSourceKind source, Vector3 relativePosition, Ray ray) { // 2.b: Set IsNavigating to be true. IsNavigating = true; // 2.b: Set NavigationPosition to be relativePosition. NavigationPosition = relativePosition; } private void NavigationRecognizer_NavigationUpdatedEvent(InteractionSourceKind source, Vector3 relativePosition, Ray ray) { // 2.b: Set IsNavigating to be true. IsNavigating = true; // 2.b: Set NavigationPosition to be relativePosition. NavigationPosition = relativePosition; } private void NavigationRecognizer_NavigationCompletedEvent(InteractionSourceKind source, Vector3 relativePosition, Ray ray) { // 2.b: Set IsNavigating to be false. IsNavigating = false; } private void NavigationRecognizer_NavigationCanceledEvent(InteractionSourceKind source, Vector3 relativePosition, Ray ray) { // 2.b: Set IsNavigating to be false. IsNavigating = false; } private void ManipulationRecognizer_ManipulationStartedEvent(InteractionSourceKind source, Vector3 position, Ray ray) { if (HandsManager.Instance.FocusedGameObject != null) { IsManipulating = true; ManipulationPosition = position; HandsManager.Instance.FocusedGameObject.SendMessageUpwards("PerformManipulationStart", position); } } private void ManipulationRecognizer_ManipulationUpdatedEvent(InteractionSourceKind source, Vector3 position, Ray ray) { if (HandsManager.Instance.FocusedGameObject != null) { IsManipulating = true; ManipulationPosition = position; HandsManager.Instance.FocusedGameObject.SendMessageUpwards("PerformManipulationUpdate", position); } } private void ManipulationRecognizer_ManipulationCompletedEvent(InteractionSourceKind source, Vector3 position, Ray ray) { IsManipulating = false; } private void ManipulationRecognizer_ManipulationCanceledEvent(InteractionSourceKind source, Vector3 position, Ray ray) { IsManipulating = false; } private void NavigationRecognizer_TappedEvent(InteractionSourceKind source, int tapCount, Ray ray) { GameObject focusedObject = InteractibleManager.Instance.FocusedGameObject; if (focusedObject != null) { focusedObject.SendMessageUpwards("OnSelect"); } } }
接下来,需要用VS打开并编辑GestureAction.cs脚本,需要实现如下几点:
using UnityEngine; /// <summary> /// GestureAction performs custom actions based on /// which gesture is being performed. /// </summary> public class GestureAction : MonoBehaviour { [Tooltip("Rotation max speed controls amount of rotation.")] public float RotationSensitivity = 10.0f; private Vector3 manipulationPreviousPosition; private float rotationFactor; void Update() { PerformRotation(); } private void PerformRotation() { if (GestureManager.Instance.IsNavigating && (!ExpandModel.Instance.IsModelExpanded || (ExpandModel.Instance.IsModelExpanded && HandsManager.Instance.FocusedGameObject == gameObject))) { /* TODO: DEVELOPER CODING EXERCISE 2.c */ // 2.c: Calculate rotationFactor based on GestureManager‘s NavigationPosition.X and multiply by RotationSensitivity. // This will help control the amount of rotation. rotationFactor = GestureManager.Instance.NavigationPosition.x * RotationSensitivity; // 2.c: transform.Rotate along the Y axis using rotationFactor. transform.Rotate(new Vector3(0, -1 * rotationFactor, 0)); } } void PerformManipulationStart(Vector3 position) { manipulationPreviousPosition = position; } void PerformManipulationUpdate(Vector3 position) { if (GestureManager.Instance.IsManipulating) { /* TODO: DEVELOPER CODING EXERCISE 4.a */ Vector3 moveVector = Vector3.zero; // 4.a: Calculate the moveVector as position - manipulationPreviousPosition. // 4.a: Update the manipulationPreviousPosition with the current position. // 4.a: Increment this transform‘s position by the moveVector. } } }
发布部署:
1 凝视宇航员,两个箭头应该会出现在光标的两侧。 这个新的光标表示宇航员可以被旋转。
2 将你的手放在可以识别的位置(食指指向天空),然后HoloLens将开始跟踪你的手。
3 要旋转宇航员,将食指放低与大拇指合并,然后向左或向右移动手可以触发NavigationX手势。
使用 hand guidance score 来帮助预测 被检测的手势何时会丢失.当用户的手进入摄像机视角范围内时提供光标反馈。
步骤:
部署发布:
可以看到当你的手势进入到摄像机视角时,会出现一个小手图标表明你的手势被追踪到。
使用操作事件来移动你的全息图,给光标一个样式反馈到用户表明什么时候操作行为被激活。
步骤:
GestureManager.cs 和 AstronautManager.cs 脚本可以实现如下功能:
开始实现
接下来需要再次编辑GestureAction.cs脚本文件
using UnityEngine; /// <summary> /// GestureAction performs custom actions based on /// which gesture is being performed. /// </summary> public class GestureAction : MonoBehaviour { [Tooltip("Rotation max speed controls amount of rotation.")] public float RotationSensitivity = 10.0f; private Vector3 manipulationPreviousPosition; private float rotationFactor; void Update() { PerformRotation(); } private void PerformRotation() { if (GestureManager.Instance.IsNavigating && (!ExpandModel.Instance.IsModelExpanded || (ExpandModel.Instance.IsModelExpanded && HandsManager.Instance.FocusedGameObject == gameObject))) { /* TODO: DEVELOPER CODING EXERCISE 2.c */ // 2.c: Calculate rotationFactor based on GestureManager‘s NavigationPosition.X and multiply by RotationSensitivity. // This will help control the amount of rotation. rotationFactor = GestureManager.Instance.NavigationPosition.x * RotationSensitivity; // 2.c: transform.Rotate along the Y axis using rotationFactor. transform.Rotate(new Vector3(0, -1 * rotationFactor, 0)); } } void PerformManipulationStart(Vector3 position) { manipulationPreviousPosition = position; } void PerformManipulationUpdate(Vector3 position) { if (GestureManager.Instance.IsManipulating) { /* TODO: DEVELOPER CODING EXERCISE 4.a */ Vector3 moveVector = Vector3.zero; // 4.a: Calculate the moveVector as position - manipulationPreviousPosition. moveVector = position - manipulationPreviousPosition; // 4.a: Update the manipulationPreviousPosition with the current position. manipulationPreviousPosition = position; // 4.a: Increment this transform‘s position by the moveVector. transform.position += moveVector; } } }
部署发布:
1.发布完成后,在你的设备前移动你的手,伸出你的食指指向天空以便手势被追踪到
2 将凝视光标指向太空宇航员
3 说“Move Astronaut”来用操作手势移动宇航员
4光标周围应出现四个箭头,表示程序现在将响应操作事件。
5把你的食指放在你的拇指上,并保持他们捏在一起。
6当你移动你的手,宇航员也会移动(这是操作)。
7抬起你的食指,停止操作移动宇航员。
8注意:如果您在移动手之前不说“Move Astronaut”,则会改用导航手势。
使用爆炸动画效果将宇航员拆分为很多小的碎片,每一个小碎片都可以进行移动和旋转操作。
爆炸操作使用语音指令“Expand Model”
恢复原模型使用语音指令“Reset Model”
编辑AstronautManager.cs 代码
using HoloToolkit; using System.Collections.Generic; using System.Linq; using UnityEngine; using UnityEngine.Windows.Speech; public class AstronautManager : Singleton<AstronautManager> { float expandAnimationCompletionTime; // Store a bool for whether our astronaut model is expanded or not. bool isModelExpanding = false; // KeywordRecognizer object. KeywordRecognizer keywordRecognizer; // Defines which function to call when a keyword is recognized. delegate void KeywordAction(PhraseRecognizedEventArgs args); Dictionary<string, KeywordAction> keywordCollection; void Start() { keywordCollection = new Dictionary<string, KeywordAction>(); // Add keyword to start manipulation. keywordCollection.Add("Move Astronaut", MoveAstronautCommand); // Add keyword Expand Model to call the ExpandModelCommand function. keywordCollection.Add("Expand Model", ExpandModelCommand); // Add keyword Reset Model to call the ResetModelCommand function. keywordCollection.Add("Reset Model", ResetModelCommand); // Initialize KeywordRecognizer with the previously added keywords. keywordRecognizer = new KeywordRecognizer(keywordCollection.Keys.ToArray()); keywordRecognizer.OnPhraseRecognized += KeywordRecognizer_OnPhraseRecognized; keywordRecognizer.Start(); } private void KeywordRecognizer_OnPhraseRecognized(PhraseRecognizedEventArgs args) { KeywordAction keywordAction; if (keywordCollection.TryGetValue(args.text, out keywordAction)) { keywordAction.Invoke(args); } } private void MoveAstronautCommand(PhraseRecognizedEventArgs args) { GestureManager.Instance.Transition(GestureManager.Instance.ManipulationRecognizer); } private void ResetModelCommand(PhraseRecognizedEventArgs args) { // Reset local variables. isModelExpanding = false; // Disable the expanded model. ExpandModel.Instance.ExpandedModel.SetActive(false); // Enable the idle model. ExpandModel.Instance.gameObject.SetActive(true); // Enable the animators for the next time the model is expanded. Animator[] expandedAnimators = ExpandModel.Instance.ExpandedModel.GetComponentsInChildren<Animator>(); foreach (Animator animator in expandedAnimators) { animator.enabled = true; } ExpandModel.Instance.Reset(); } private void ExpandModelCommand(PhraseRecognizedEventArgs args) { // Swap out the current model for the expanded model. GameObject currentModel = ExpandModel.Instance.gameObject; ExpandModel.Instance.ExpandedModel.transform.position = currentModel.transform.position; ExpandModel.Instance.ExpandedModel.transform.rotation = currentModel.transform.rotation; ExpandModel.Instance.ExpandedModel.transform.localScale = currentModel.transform.localScale; currentModel.SetActive(false); ExpandModel.Instance.ExpandedModel.SetActive(true); // Play animation. Ensure the Loop Time check box is disabled in the inspector for this animation to play it once. Animator[] expandedAnimators = ExpandModel.Instance.ExpandedModel.GetComponentsInChildren<Animator>(); // Set local variables for disabling the animation. if (expandedAnimators.Length > 0) { expandAnimationCompletionTime = Time.realtimeSinceStartup + expandedAnimators[0].runtimeAnimatorController.animationClips[0].length * 0.9f; } // Set the expand model flag. isModelExpanding = true; ExpandModel.Instance.Expand(); } public void Update() { if (isModelExpanding && Time.realtimeSinceStartup >= expandAnimationCompletionTime) { isModelExpanding = false; Animator[] expandedAnimators = ExpandModel.Instance.ExpandedModel.GetComponentsInChildren<Animator>(); foreach (Animator animator in expandedAnimators) { animator.enabled = false; } } } }
发布部署到设备:
原文链接:https://developer.microsoft.com/EN-US/WINDOWS/HOLOGRAPHIC/holograms_211
如有翻译上的错误请指正。谢谢哦
微软Hololens学院教程-Hologram 211-Gestures(手势)
标签:引导 ted lag animation develop doc finger c# oca
原文地址:http://www.cnblogs.com/qichun/p/6056790.html