SteamVR: Unity Move/Rotate With Motion Controller Sticks

The [SteamVR] API makes the motion controller touchpad input available.

Attach `StickManipulation.cs` on the SteamVR `Player`.

using UnityEngine;
using Valve.VR;
using Valve.VR.InteractionSystem;

public class StickManipulation : MonoBehaviour
{
    private float _mMoveSpeed = 2.5f;
    private float _mHorizontalTurnSpeed = 180f;
    private float _mVerticalTurnSpeed = 2.5f;
    private bool _mInverted = false;
    private const float VERTICAL_LIMIT = 60f;

    private void OnGUI()
    {
        Player player = Player.instance;
        if (!player)
        {
            return;
        }

        EVRButtonId touchPad = EVRButtonId.k_EButton_SteamVR_Touchpad;

        if (null != player.leftController)
        {
            var touchPadVector = player.leftController.GetAxis(touchPad);
            GUILayout.Label(string.Format("Left X: {0:F2}, {1:F2}", touchPadVector.x, touchPadVector.y));
        }

        if (null != player.rightController)
        {
            var touchPadVector = player.rightController.GetAxis(touchPad);
            GUILayout.Label(string.Format("Right X: {0:F2}, {1:F2}", touchPadVector.x, touchPadVector.y));
        }
    }

    float GetAngle(float input)
    {
        if (input < 0f)
        {
            return -Mathf.LerpAngle(0, VERTICAL_LIMIT, -input);
        }
        else if (input > 0f)
        {
            return Mathf.LerpAngle(0, VERTICAL_LIMIT, input);
        }
        return 0f;
    }

    // Update is called once per frame
    void Update()
    {
        Player player = Player.instance;
        if (!player)
        {
            return;
        }

        EVRButtonId touchPad = EVRButtonId.k_EButton_SteamVR_Touchpad;

        if (null != player.leftController)
        {
            Quaternion orientation = Camera.main.transform.rotation;
            var touchPadVector = player.leftController.GetAxis(touchPad);
            Vector3 moveDirection = orientation * Vector3.forward * touchPadVector.y + orientation * Vector3.right * touchPadVector.x;
            Vector3 pos = player.transform.position;
            pos.x += moveDirection.x * _mMoveSpeed * Time.deltaTime;
            pos.z += moveDirection.z * _mMoveSpeed * Time.deltaTime;
            player.transform.position = pos;
        }

        if (null != player.rightController)
        {
            Quaternion orientation = player.transform.rotation;
            var touchPadVector = player.rightController.GetAxis(touchPad);

            Vector3 euler = transform.rotation.eulerAngles;
            float angle;
            if (_mInverted)
            {
                angle = GetAngle(touchPadVector.y);
            }
            else
            {
                angle = GetAngle(-touchPadVector.y);
            }
            euler.x = Mathf.LerpAngle(euler.x, angle, _mVerticalTurnSpeed * Time.deltaTime);
            euler.y += touchPadVector.x * _mHorizontalTurnSpeed * Time.deltaTime;
            player.transform.rotation = Quaternion.Euler(euler);
        }
    }
}

Unity: Speech Detection and Synthesis Together

To be able to call out “fire” and “stop”, I made some edits to the `F3DPlayerTurretController.cs` script.

using UnityEngine;
using System.Collections;
using UnityWebGLSpeechDetection;

namespace Forge3D
{
    public class F3DPlayerTurretController : MonoBehaviour
    {
        RaycastHit hitInfo; // Raycast structure
        public F3DTurret turret;
        bool isFiring; // Is turret currently in firing state
        public F3DFXController fxController;

        // reference to the proxy
        private ISpeechDetectionPlugin _mSpeechDetectionPlugin = null;

        enum FireState
        {
            IDLE,
            DETECTED_FIRE,
            FIRE_ONCE,
            FIRE_IDLE,
            DETECTED_STOP,
            STOP_ONCE
        }

        // detect the word once in all updates
        private static FireState _sFireState = FireState.IDLE;

        // make sure all turrets detect the async word in their update event
        private static bool _sReadyForLateUpdate = false;

        // init the speech proxy
        private IEnumerator Start()
        {
            // get the singleton instance
            _mSpeechDetectionPlugin = ProxySpeechDetectionPlugin.GetInstance();

            // check the reference to the plugin
            if (null == _mSpeechDetectionPlugin)
            {
                Debug.LogError("Proxy Speech Detection Plugin is not set!");
                yield break;
            }

            // wait for plugin to become available
            while (!_mSpeechDetectionPlugin.IsAvailable())
            {
                yield return null;
            }

            // subscribe to events
            _mSpeechDetectionPlugin.AddListenerOnDetectionResult(HandleDetectionResult);

            // abort and clear existing words
            _mSpeechDetectionPlugin.Abort();
        }

        // Handler for speech detection events
        void HandleDetectionResult(object sender, SpeechDetectionEventArgs args)
        {
            if (null == args.detectionResult)
            {
                return;
            }
            SpeechRecognitionResult[] results = args.detectionResult.results;
            if (null == results)
            {
                return;
            }
            bool doAbort = false;
            foreach (SpeechRecognitionResult result in results)
            {
                SpeechRecognitionAlternative[] alternatives = result.alternatives;
                if (null == alternatives)
                {
                    continue;
                }
                foreach (SpeechRecognitionAlternative alternative in alternatives)
                {
                    if (string.IsNullOrEmpty(alternative.transcript))
                    {
                        continue;
                    }
                    string lower = alternative.transcript.ToLower();
                    Debug.LogFormat("Detected: {0}", lower);
                    if (lower.Contains("fire"))
                    {
                        if (_sFireState == FireState.IDLE)
                        {
                            _sFireState = FireState.DETECTED_FIRE;
                        }
                        doAbort = true;
                    }

                    if (lower.Contains("stop"))
                    {
                        if (_sFireState == FireState.FIRE_IDLE)
                        {
                            _sFireState = FireState.DETECTED_STOP;
                        }
                        doAbort = true;
                    }
                }
            }

            // abort detection on match for faster matching on words instead of complete sentences
            if (doAbort)
            {
                _mSpeechDetectionPlugin.Abort();
            }
        }

        // make the async detected word, detectable at the start of all the update events
        void LateUpdate()
        {
            if (_sReadyForLateUpdate)
            {
                _sReadyForLateUpdate = false;
                switch (_sFireState)
                {
                    case FireState.DETECTED_FIRE:
                        _sFireState = FireState.FIRE_ONCE;
                        break;
                    case FireState.FIRE_ONCE:
                        _sFireState = FireState.FIRE_IDLE;
                        break;
                    case FireState.DETECTED_STOP:
                        _sFireState = FireState.STOP_ONCE;
                        break;
                    case FireState.STOP_ONCE:
                        _sFireState = FireState.IDLE;
                        break;
                }
            }
        }

        void Update()
        {
            CheckForTurn();
            CheckForFire();

            // After update, use one late update to detect the async word
            _sReadyForLateUpdate = true;
        }

        void CheckForFire()
        {
            // Fire turret
            //if (!isFiring && Input.GetKeyDown(KeyCode.Mouse0))
            if (!isFiring && _sFireState == FireState.FIRE_ONCE)
            {
                isFiring = true;
                fxController.Fire();
            }

            // Stop firing
            //if (isFiring && Input.GetKeyUp(KeyCode.Mouse0))
            if (isFiring && _sFireState == FireState.STOP_ONCE)
            {
                isFiring = false;
                fxController.Stop();
            }
        }

To be able to call out the names of weapons and to add speech, I made some edits to the `F3DFXController` script.

using System.Collections;
using System;
using UnityEngine;
using UnityEngine.UI;
using UnityWebGLSpeechDetection;
using UnityWebGLSpeechSynthesis;

namespace Forge3D
{
// Weapon types
    public enum F3DFXType
    {
        Vulcan,
        SoloGun,
        Sniper,
        ShotGun,
        Seeker,
        RailGun,
        PlasmaGun,
        PlasmaBeam,
        PlasmaBeamHeavy,
        LightningGun,
        FlameRed,
        LaserImpulse
    }

    public class F3DFXController : MonoBehaviour
    {
        /// <summary>
        /// Voices drop down
        /// </summary>
        public Dropdown _mDropdownVoices = null;

        /// <summary>
        /// Reference to the proxy
        /// </summary>
        private ISpeechDetectionPlugin _mSpeechDetectionPlugin = null;

        /// <summary>
        /// Reference to the proxy
        /// </summary>
        private ISpeechSynthesisPlugin _mSpeechSynthesisPlugin = null;

        /// <summary>
        /// Reference to the supported voices
        /// </summary>
        private VoiceResult _mVoiceResult = null;

        /// <summary>
        /// Reference to the utterance, voice, and text to speak
        /// </summary>
        private SpeechSynthesisUtterance _mSpeechSynthesisUtterance = null;

        /// <summary>
        /// Track when the utterance is created
        /// </summary>
        private bool _mUtteranceSet = false;

        /// <summary>
        /// Track when the voices are created
        /// </summary>
        private bool _mVoicesSet = false;

        enum WeaponState
        {
            IDLE,
            DETECTED_LEFT,
            LEFT_ONCE,
            DETECTED_RIGHT,
            RIGHT_ONCE
        }

        // detect the word once in all updates
        private static WeaponState _sWeaponState = WeaponState.IDLE;

        // make sure all turrets detect the async word in their update event
        private static bool _sReadyForLateUpdate = false;

        // Singleton instance
        public static F3DFXController instance;

        // init the speech proxy
        private IEnumerator Start()
        {
            // get the singleton instance
            _mSpeechDetectionPlugin = ProxySpeechDetectionPlugin.GetInstance();

            // check the reference to the plugin
            if (null == _mSpeechDetectionPlugin)
            {
                Debug.LogError("Proxy Speech Detection Plugin is not set!");
                yield break;
            }

            // wait for plugin to become available
            while (!_mSpeechDetectionPlugin.IsAvailable())
            {
                yield return null;
            }

            _mSpeechSynthesisPlugin = ProxySpeechSynthesisPlugin.GetInstance();
            if (null == _mSpeechSynthesisPlugin)
            {
                Debug.LogError("Proxy Speech Synthesis Plugin is not set!");
                yield break;
            }

            // wait for proxy to become available
            while (!_mSpeechSynthesisPlugin.IsAvailable())
            {
                yield return null;
            }

            // subscribe to events
            _mSpeechDetectionPlugin.AddListenerOnDetectionResult(HandleDetectionResult);

            // abort and clear existing words
            _mSpeechDetectionPlugin.Abort();

            // Get voices from proxy
            GetVoices();

            // Create an instance of SpeechSynthesisUtterance
            _mSpeechSynthesisPlugin.CreateSpeechSynthesisUtterance((utterance) =>
            {
                //Debug.LogFormat("Utterance created: {0}", utterance._mReference);
                _mSpeechSynthesisUtterance = utterance;

                // The utterance is set
                _mUtteranceSet = true;

                // Set the default voice if ready
                SetIfReadyForDefaultVoice();
            });
        }

        /// <summary>
        /// Get voices from the proxy
        /// </summary>
        /// <returns></returns>
        private void GetVoices()
        {
            // get voices from the proxy
            _mSpeechSynthesisPlugin.GetVoices((voiceResult) =>
            {
                _mVoiceResult = voiceResult;

                // prepare the voices drop down items
                SpeechSynthesisUtils.PopulateVoicesDropdown(_mDropdownVoices, _mVoiceResult);

                // The voices are set
                _mVoicesSet = true;

                // Set the default voice if ready
                SetIfReadyForDefaultVoice();
            });
        }

        /// <summary>
        /// Set the default voice if voices and utterance are ready
        /// </summary>
        private void SetIfReadyForDefaultVoice()
        {
            if (_mVoicesSet &&
                _mUtteranceSet)
            {
                // set the default voice
                SpeechSynthesisUtils.SetDefaultVoice(_mDropdownVoices);

                // enable voices dropdown
                SpeechSynthesisUtils.SetInteractable(true, _mDropdownVoices);

                Voice voice = SpeechSynthesisUtils.GetVoice(_mVoiceResult, SpeechSynthesisUtils.GetDefaultVoice());

                _mSpeechSynthesisPlugin.SetVoice(_mSpeechSynthesisUtterance, voice);

                // drop down reference must be set
                if (_mDropdownVoices)
                {
                    // set up the drop down change listener
                    _mDropdownVoices.onValueChanged.AddListener(delegate {
                        // handle the voice change event, and set the voice on the utterance
                        SpeechSynthesisUtils.HandleVoiceChanged(_mDropdownVoices,
                            _mVoiceResult,
                            _mSpeechSynthesisUtterance,
                            _mSpeechSynthesisPlugin);
                    });
                }
            }
        }

        /// <summary>
        /// Speak the utterance
        /// </summary>
        private void Speak(string text)
        {
            if (!_mVoicesSet ||
                !_mUtteranceSet)
            {
                // not ready
                return;
            }

            // Cancel if already speaking
            _mSpeechSynthesisPlugin.Cancel();

            // Set the text that will be spoken
            _mSpeechSynthesisPlugin.SetText(_mSpeechSynthesisUtterance, text);

            // Use the plugin to speak the utterance
            _mSpeechSynthesisPlugin.Speak(_mSpeechSynthesisUtterance);
        }

        // Handler for speech detection events
        void HandleDetectionResult(object sender, SpeechDetectionEventArgs args)
        {
            if (null == args.detectionResult)
            {
                return;
            }
            SpeechRecognitionResult[] results = args.detectionResult.results;
            if (null == results)
            {
                return;
            }
            bool doAbort = false;
            foreach (SpeechRecognitionResult result in results)
            {
                SpeechRecognitionAlternative[] alternatives = result.alternatives;
                if (null == alternatives)
                {
                    continue;
                }
                foreach (SpeechRecognitionAlternative alternative in alternatives)
                {
                    if (string.IsNullOrEmpty(alternative.transcript))
                    {
                        continue;
                    }
                    string lower = alternative.transcript.ToLower();
                    Debug.LogFormat("Detected: {0}", lower);
                    if (lower.Contains("left"))
                    {
                        if (_sWeaponState == WeaponState.IDLE)
                        {
                            _sWeaponState = WeaponState.DETECTED_LEFT;
                        }
                        doAbort = true;
                        break;
                    }

                    else if (lower.Contains("right"))
                    {
                        if (_sWeaponState == WeaponState.IDLE)
                        {
                            _sWeaponState = WeaponState.DETECTED_RIGHT;
                        }
                        doAbort = true;
                        break;
                    }

                    else if (lower.Contains("lightning"))
                    {
                        if (DefaultFXType != F3DFXType.LightningGun)
                        {
                            DefaultFXType = F3DFXType.LightningGun;
                            Speak(string.Format("{0} is active, sir", DefaultFXType));
                        }
                        doAbort = true;
                        break;
                    }

                    else if (lower.Contains("beam"))
                    {
                        if (DefaultFXType != F3DFXType.PlasmaBeam)
                        {
                            DefaultFXType = F3DFXType.PlasmaBeam;
                            Speak(string.Format("{0} is active, sir", DefaultFXType));
                        }
                        doAbort = true;
                        break;
                    }
                }
            }

            // abort detection on match for faster matching on words instead of complete sentences
            if (doAbort)
            {
                _mSpeechDetectionPlugin.Abort();
            }
        }

        // make the async detected word, detectable at the start of all the update events
        void LateUpdate()
        {
            if (_sReadyForLateUpdate)
            {
                _sReadyForLateUpdate = false;
                switch (_sWeaponState)
                {
                    case WeaponState.DETECTED_LEFT:
                        _sWeaponState = WeaponState.LEFT_ONCE;
                        break;
                    case WeaponState.LEFT_ONCE:
                        _sWeaponState = WeaponState.IDLE;
                        break;
                    case WeaponState.DETECTED_RIGHT:
                        _sWeaponState = WeaponState.RIGHT_ONCE;
                        break;
                    case WeaponState.RIGHT_ONCE:
                        _sWeaponState = WeaponState.IDLE;
                        break;
                }
            }
        }

        void Update()
        {
            // Switch weapon types using keyboard keys
            //if (Input.GetKeyDown(KeyCode.RightArrow))
            if (_sWeaponState == WeaponState.LEFT_ONCE)
                NextWeapon();
            //else if (Input.GetKeyDown(KeyCode.LeftArrow))
            if (_sWeaponState == WeaponState.RIGHT_ONCE)
                PrevWeapon();

            // After update, use one late update to detect the async word
            _sReadyForLateUpdate = true;
        }

Unity: OSVR Rendering Plugin For Unity

[OSVR-Unity-Rendering]

Which is similar to:

[Unity Sample Plugin Source]

Dependencies:

[OSVR SDK for Windows]

* Includes: Add `C:\Program Files\OSVR\SDK\x86\include`

* Includes: Add `C:\Program Files\OSVR\SDK\x64\include`

* (32-bit) Libraries: Add `C:\Program Files\OSVR\SDK\x86\lib`

* (64-bit) Libraries: Add `C:\Program Files\OSVR\SDK\x64\lib`

* Add Library: osvrRenderManager.lib

[Boost C++ Libraries] [binaries]

* (32-bit) Includes: Add `C:\local\boost_1_63_0_x86`

* (64-bit) Includes: Add `C:\local\boost_1_63_0_x64`

* (32-bit) Include: Add `C:\local\boost_1_63_0_x86\libs`

* (64-bit) Include: Add `C:\local\boost_1_63_0_x64\libs`

[Glew]

* Includes: Add `C:\local\glew-2.0.0\include`

* (32-bit) Libraries: Add `C:\local\glew-2.0.0\lib\Release\Win32`

* (64-bit) Libraries: Add `C:\local\glew-2.0.0\lib\Release\x64`

Unity: SteamVR Center View

The Unity [SteamVR Plugin] has an API to center the VR view. It’s great when VR apps have a key mapped to do this.

using UnityEngine;
using Valve.VR;

public class SteamVRRecenter : MonoBehaviour
{
    // Keep the script around
    private void Start()
    {
        DontDestroyOnLoad(gameObject);
    }

    // Update is called once per frame
    void FixedUpdate()
    {
        if (Input.GetKeyUp(KeyCode.L))
        {
            var system = OpenVR.System;
            if (system != null)
            {
                system.ResetSeatedZeroPose();
            }
        }
    }
}

Unity: Drive Turrets With Speech

The [Sci-Fi Effects] assets comes with some great looking turrets and effects.

I used the [WebGL Speech Detection] package to add speech commands.

And to make speech work in the Unity editor, I added the [Chrome Speech Proxy].

To make Speech Detection work in the Turret example, I made some edits to the `F3DPlayerTurretController.cs` script.

        // reference to the proxy
        private ProxySpeechDetectionPlugin _mProxySpeechDetectionPlugin = null;

        enum FireState
        {
            IDLE,
            DETECTED_FIRE,
            FIRE_ONCE,
            FIRE_IDLE,
            DETECTED_STOP,
            STOP_ONCE
        }

        // detect the word once in all updates
        private static FireState _sFireState = FireState.IDLE;

        // make sure all turrets detect the async word in their update event
        private static bool _sReadyForLateUpdate = false;

        // init the speech proxy
        private IEnumerator Start()
        {
            while (null == WebGLSpeechDetectionPlugin.GetInstance() ||
                null == ProxySpeechDetectionPlugin.GetInstance() ||
                !ProxySpeechDetectionPlugin.GetInstance().IsAvailable())
            {
                yield return null;
            }

            // reference to the plugin
            WebGLSpeechDetectionPlugin plugin = WebGLSpeechDetectionPlugin.GetInstance();

            // subscribe to events
            plugin.OnDetectionResult += HandleDetectionResult;

            // reference to the proxy
            _mProxySpeechDetectionPlugin = ProxySpeechDetectionPlugin.GetInstance();

            // abort and clear existing words
            _mProxySpeechDetectionPlugin.Abort();
        }

        // Handler for speech detection events
        void HandleDetectionResult(object sender, WebGLSpeechDetectionPlugin.SpeechDetectionEventArgs args)
        {
            if (null == args.detectionResult)
            {
                return;
            }
            WebGLSpeechDetectionPlugin.SpeechRecognitionResult[] results = args.detectionResult.results;
            if (null == results)
            {
                return;
            }
            bool doAbort = false;
            foreach (WebGLSpeechDetectionPlugin.SpeechRecognitionResult result in results)
            {
                WebGLSpeechDetectionPlugin.SpeechRecognitionAlternative[] alternatives = result.alternatives;
                if (null == alternatives)
                {
                    continue;
                }
                foreach (WebGLSpeechDetectionPlugin.SpeechRecognitionAlternative alternative in alternatives)
                {
                    if (string.IsNullOrEmpty(alternative.transcript))
                    {
                        continue;
                    }
                    string lower = alternative.transcript.ToLower();
                    Debug.LogFormat("Detected: {0}", lower);
                    if (lower.Contains("fire"))
                    {
                        if (_sFireState == FireState.IDLE)
                        {
                            _sFireState = FireState.DETECTED_FIRE;
                        }
                        doAbort = true;
                    }

                    if (lower.Contains("stop"))
                    {
                        if (_sFireState == FireState.FIRE_IDLE)
                        {
                            _sFireState = FireState.DETECTED_STOP;
                        }
                        doAbort = true;
                    }
                }
            }

            // abort detection on match for faster matching on words instead of complete sentences
            if (doAbort)
            {
                _mProxySpeechDetectionPlugin.Abort();
            }
        }

        // make the async detected word, detectable at the start of all the update events
        void LateUpdate()
        {
            if (_sReadyForLateUpdate)
            {
                _sReadyForLateUpdate = false;
                switch (_sFireState)
                {
                    case FireState.DETECTED_FIRE:
                        _sFireState = FireState.FIRE_ONCE;
                        break;
                    case FireState.FIRE_ONCE:
                        _sFireState = FireState.FIRE_IDLE;
                        break;
                    case FireState.DETECTED_STOP:
                        _sFireState = FireState.STOP_ONCE;
                        break;
                    case FireState.STOP_ONCE:
                        _sFireState = FireState.IDLE;
                        break;
                }
            }
        }

        void Update()
        {
            CheckForTurn();
            CheckForFire();

            // After update, use one late update to detect the async word
            _sReadyForLateUpdate = true;
        }

        void CheckForFire()
        {
            // Fire turret
            //if (!isFiring && Input.GetKeyDown(KeyCode.Mouse0))
            if (!isFiring && _sFireState == FireState.FIRE_ONCE)
            {
                isFiring = true;
                fxController.Fire();
            }

            // Stop firing
            //if (isFiring && Input.GetKeyUp(KeyCode.Mouse0))
            if (isFiring && _sFireState == FireState.STOP_ONCE)
            {
                isFiring = false;
                fxController.Stop();
            }
        }

To be able to call out the names of weapons, I made some edits to the `F3DFXController` script.

        // reference to the proxy
        private ProxySpeechDetectionPlugin _mProxySpeechDetectionPlugin = null;

        enum WeaponState
        {
            IDLE,
            DETECTED_LEFT,
            LEFT_ONCE,
            DETECTED_RIGHT,
            RIGHT_ONCE
        }

        // detect the word once in all updates
        private static WeaponState _sWeaponState = WeaponState.IDLE;

        // make sure all turrets detect the async word in their update event
        private static bool _sReadyForLateUpdate = false;

        // Singleton instance
        public static F3DFXController instance;

        // init the speech proxy
        private IEnumerator Start()
        {
            while (null == WebGLSpeechDetectionPlugin.GetInstance() ||
                null == ProxySpeechDetectionPlugin.GetInstance() ||
                !ProxySpeechDetectionPlugin.GetInstance().IsAvailable())
            {
                yield return null;
            }

            // reference to the plugin
            WebGLSpeechDetectionPlugin plugin = WebGLSpeechDetectionPlugin.GetInstance();

            // subscribe to events
            plugin.OnDetectionResult += HandleDetectionResult;

            // reference to the proxy
            _mProxySpeechDetectionPlugin = ProxySpeechDetectionPlugin.GetInstance();

            // abort and clear existing words
            _mProxySpeechDetectionPlugin.Abort();
        }

        // Handler for speech detection events
        void HandleDetectionResult(object sender, WebGLSpeechDetectionPlugin.SpeechDetectionEventArgs args)
        {
            if (null == args.detectionResult)
            {
                return;
            }
            WebGLSpeechDetectionPlugin.SpeechRecognitionResult[] results = args.detectionResult.results;
            if (null == results)
            {
                return;
            }
            bool doAbort = false;
            foreach (WebGLSpeechDetectionPlugin.SpeechRecognitionResult result in results)
            {
                WebGLSpeechDetectionPlugin.SpeechRecognitionAlternative[] alternatives = result.alternatives;
                if (null == alternatives)
                {
                    continue;
                }
                foreach (WebGLSpeechDetectionPlugin.SpeechRecognitionAlternative alternative in alternatives)
                {
                    if (string.IsNullOrEmpty(alternative.transcript))
                    {
                        continue;
                    }
                    string lower = alternative.transcript.ToLower();
                    Debug.LogFormat("Detected: {0}", lower);
                    if (lower.Contains("left"))
                    {
                        if (_sWeaponState == WeaponState.IDLE)
                        {
                            _sWeaponState = WeaponState.DETECTED_LEFT;
                        }
                        doAbort = true;
                    }

                    else if (lower.Contains("right"))
                    {
                        if (_sWeaponState == WeaponState.IDLE)
                        {
                            _sWeaponState = WeaponState.DETECTED_RIGHT;
                        }
                        doAbort = true;
                    }

                    else if (lower.Contains("lightning"))
                    {
                        DefaultFXType = F3DFXType.LightningGun;
                        doAbort = true;
                    }

                    else if (lower.Contains("beam"))
                    {
                        DefaultFXType = F3DFXType.PlasmaBeam;
                        doAbort = true;
                    }
                }
            }

            // abort detection on match for faster matching on words instead of complete sentences
            if (doAbort)
            {
                _mProxySpeechDetectionPlugin.Abort();
            }
        }

        // make the async detected word, detectable at the start of all the update events
        void LateUpdate()
        {
            if (_sReadyForLateUpdate)
            {
                _sReadyForLateUpdate = false;
                switch (_sWeaponState)
                {
                    case WeaponState.DETECTED_LEFT:
                        _sWeaponState = WeaponState.LEFT_ONCE;
                        break;
                    case WeaponState.LEFT_ONCE:
                        _sWeaponState = WeaponState.IDLE;
                        break;
                    case WeaponState.DETECTED_RIGHT:
                        _sWeaponState = WeaponState.RIGHT_ONCE;
                        break;
                    case WeaponState.RIGHT_ONCE:
                        _sWeaponState = WeaponState.IDLE;
                        break;
                }
            }
        }

        void Update()
        {
            // Switch weapon types using keyboard keys
            //if (Input.GetKeyDown(KeyCode.RightArrow))
            if (_sWeaponState == WeaponState.LEFT_ONCE)
                NextWeapon();
            //else if (Input.GetKeyDown(KeyCode.LeftArrow))
            if (_sWeaponState == WeaponState.RIGHT_ONCE)
                PrevWeapon();

            // After update, use one late update to detect the async word
            _sReadyForLateUpdate = true;
        }

Unity: CEF Proxy

I used a proxy with a custom build of [CEF] to send speech data to the [Word Detection Unity Plugin].

Copy a recent TestApp build from `Branch 2704` (Windows 64-bit) from [Chromium Embedded Framework 3 Builds] into the `CefGlue.Demo.WinFormsProxy` output folder to get all the dependent libraries.

Or download a build of the [SpeechProxy].

Quick Start Guide:

1. Command-line arguments are used to enable speech detection

CefGlue.Demo.WinFormsProxy.exe --enable-speech-input

2. Use the help menu to [acquire keys] for the Speech API

3. Enable the `Speech API Private API`

4. The Speech API has a `queries per day` quota.

5. Assign the Speech API keys

6. Open the [Chrome Speech Demo] to verify that the Speech API is working

7. The [Google Developers Console] shows traffic hitting the Speech API.

8. The Unity Proxy Speech Detection Plugin communicates with the Speech Proxy (email support@theylovegames.com to get access to the beta)

9. Use a proxy port that matches the port in Unity

10. The proxy URL should be pointed at the HTTPS proxy script https://theylovegames.com/speech_proxy/ when connecting to Unity.