本文整理汇总了C#中UnityEngine.WebCamTexture.Stop方法的典型用法代码示例。如果您正苦于以下问题:C# WebCamTexture.Stop方法的具体用法?C# WebCamTexture.Stop怎么用?C# WebCamTexture.Stop使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类UnityEngine.WebCamTexture
的用法示例。
在下文中一共展示了WebCamTexture.Stop方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Start
void Start()
{
CameraTexture = new WebCamTexture("Remote Front Camera");
CameraTexture.Stop();
CameraTexture.Play();
Display.texture = CameraTexture;
Display.material.mainTexture = CameraTexture;
}
示例2: createInstance
/// <summary>
/// This function creates WebCamTexture wraped Sensor.
/// </summary>
/// <param name="i_wtx">
/// A <see cref="WebCamTexture"/>
/// </param>
/// <returns>
/// A <see cref="NyARUnityWebCam"/>
/// </returns>
public static NyARUnityWebCam createInstance(WebCamTexture i_wtx)
{
if(i_wtx.isPlaying)
{
//起動中
return new NyARUnityWebCam(i_wtx);
}else{
//一時的にON
NyARUnityWebCam ret;
i_wtx.Play();
ret=new NyARUnityWebCam(i_wtx);
i_wtx.Stop();
return ret;
}
}
示例3: WebCam
void WebCam(int CameraNum)
{
WebCamDevice[] devices = WebCamTexture.devices;
//webcamTexture.Stop();
print (devices.Length);
print (devices[CameraNum].name);
if (devices.Length > 0) {
webcamTexture = new WebCamTexture(devices[CameraNum].name, Width, Height, FPS);
renderer.material.mainTexture = webcamTexture;
if(webcamTexture.isPlaying==true){
print (webcamTexture.isPlaying);
webcamTexture.Stop();
}
webcamTexture.Play();
} else {
Debug.Log("no camera");
}
}
示例4: Start
void Start()
{
WebCamDevice[] devices = WebCamTexture.devices;
for( int i = 0 ; i < devices.Length ; i++ ) {
Debug.Log(devices[i].name);
}
devices = WebCamTexture.devices;
webcamTexture = new WebCamTexture();
if (devices.Length>0) {
Renderer renderer = GetComponent<Renderer>();
webcamTexture.deviceName = devices[0].name;
renderer.material.mainTexture = webcamTexture;
if (webcamTexture.isPlaying) {
webcamTexture.Stop();
}
webcamTexture.Play();
}
}
示例5: Start
protected virtual void Start()
{
GlobalState.Instance.SceneToSwitchTo = Config.Scenes.None;
// Keep a static reference to this class to be able to display toast messages from other components (namely QrCodeCollection.cs).
CameraScriptInstance = this;
_coin = GetComponent<AudioSource>();
_qrCodeCollection = new QrCodeCollection();
// Reset the global state current question and coin every time we restart the camera scene.
GlobalState.Instance.Reset();
Application.RequestUserAuthorization(UserAuthorization.WebCam);
if (Application.HasUserAuthorization(UserAuthorization.WebCam))
{
Debug.Log("access to webcam granted!");
Debug.Log("#WebCamDevices: " + WebCamTexture.devices.GetLength(0));
if (GlobalState.Instance.WebCamTexture == null)
{
// Find a backfacing camera device.
foreach (WebCamDevice device in WebCamTexture.devices)
{
Debug.Log("WebCamDevice: " + device.name);
Debug.Log("FrontFacing? " + device.isFrontFacing);
if (!device.isFrontFacing)
{
_backFacing = device;
}
}
// Try to obtain a 1024x768 texture from the webcam.
_webcamTexture = new WebCamTexture(_backFacing.name, 1024, 768);
_webcamTexture.Play();
// The device might not support the requested resolution, so we try again with a lower one.
if (_webcamTexture.width != 1024 || _webcamTexture.height != 768)
{
_webcamTexture.Stop();
_webcamTexture = new WebCamTexture(_backFacing.name, 640, 480);
_webcamTexture.Play();
}
// Keep a global reference to the WebCamTexture to speed up scene initialization next time.
GlobalState.Instance.WebCamTexture = _webcamTexture;
}
else
{
_webcamTexture = GlobalState.Instance.WebCamTexture;
_webcamTexture.Play();
}
GetComponent<Renderer>().material.SetTexture("_MainTex", _webcamTexture);
Debug.Log(string.Format("Actual camera dimens: {0}x{1}", _webcamTexture.width, _webcamTexture.height));
GlobalState.Instance.CamWidth = _webcamTexture.width;
GlobalState.Instance.CamHeight = _webcamTexture.height;
float camRatio = (float) _webcamTexture.width/_webcamTexture.height;
float screenRatio = (float) Screen.width/Screen.height;
// Scale plane so it fills the screen while keeping the camera's aspect ratio.
// If the camera's aspect ratio differs from the screen's,
// one side will match exactly and the other side will be larger than the screen's dimension.
var idealHeight = 0.7f;
if (screenRatio > camRatio)
{
gameObject.transform.localScale = new Vector3(screenRatio*idealHeight, 1,
screenRatio*idealHeight/camRatio);
}
else
{
gameObject.transform.localScale = new Vector3(camRatio*idealHeight, 1, idealHeight);
}
GlobalState.Instance.PlaneWidth = gameObject.transform.localScale.x*10;
GlobalState.Instance.PlaneHeight = gameObject.transform.localScale.z*10;
_qrCodeThread = new Thread(DecodeQr);
_qrCodeThread.Start();
}
else
{
Debug.LogError("No User Authorization for Camera Device.");
}
// Check win condition: if the user has already collected all the coins, show a toast.
if (GlobalState.Instance.AllQuestions.questions.Length == GlobalState.Instance.CollectedCoinCount())
{
SetToastToShow(StringResources.WinToastMessage, ToastLengthLong);
}
}
示例6: Start
void Start()
{
if (Data.Instance.isPhoto4Room)
Data.Instance.lastPhotoTexture = null;
else
Data.Instance.lastArtTexture = null;
//webCamTexture = new WebCamTexture();
//webCamTexture.requestedHeight = 1280;
//webCamTexture.requestedWidth = 720;
webCamTexture = new WebCamTexture(WebCamTexture.devices[0].name, (int)Data.Instance.defaultCamSize.x, (int)Data.Instance.defaultCamSize.y, 30);
if (webCamTexture.isPlaying)
{
webCamTexture.Stop();
} else
webCamTexture.Play();
Vector3 scale = rawImage.transform.localScale;
#if UNITY_IOS
scale.x *= -1;
rawImage.transform.localEulerAngles = new Vector3(0, 0, 180);
#endif
rawImage.transform.localScale = scale;
}