240 lines
7.9 KiB
C#
240 lines
7.9 KiB
C#
using System.Collections;
|
|
using System.Collections.Generic;
|
|
using System.Threading.Tasks;
|
|
using Cysharp.Threading.Tasks;
|
|
using Cysharp.Threading.Tasks.Linq;
|
|
using UnityEngine;
|
|
using UnityEngine.Networking;
|
|
using UnityEngine.Video;
|
|
|
|
public class FixedImageSource : MonoBehaviour
|
|
{
|
|
#region Public property
|
|
|
|
public Texture Texture => OutputBuffer;
|
|
|
|
#endregion
|
|
|
|
#region Editable attributes
|
|
|
|
// Source type options
|
|
public enum SourceType { Texture, Video, Webcam, Card, Gradient, Camera }
|
|
SourceType _sourceType = SourceType.Webcam;
|
|
|
|
// Webcam options
|
|
[SerializeField] string _webcamName = "";
|
|
[SerializeField] Vector2Int _webcamResolution;
|
|
[SerializeField] int _webcamFrameRate = 30;
|
|
|
|
// Output options
|
|
[SerializeField] RenderTexture _outputTexture = null;
|
|
|
|
#endregion
|
|
|
|
#region Package asset reference
|
|
|
|
[SerializeField, HideInInspector] Shader _shader = null;
|
|
|
|
#endregion
|
|
|
|
#region Private members
|
|
|
|
UnityWebRequest _webTexture;
|
|
WebCamTexture _webcam;
|
|
Material _material;
|
|
RenderTexture _buffer;
|
|
|
|
RenderTexture OutputBuffer
|
|
=> _outputTexture != null ? _outputTexture : _buffer;
|
|
|
|
private Texture2D AdjustWebCamTextureWidth(WebCamTexture webcamTexture, int reductionPixels)
|
|
{
|
|
int width = webcamTexture.width;
|
|
int height = webcamTexture.height;
|
|
|
|
int newWidth = width - (reductionPixels * 2);
|
|
if (newWidth <= 0)
|
|
{
|
|
Debug.LogError("Reduction pixels exceed the width of the texture.");
|
|
return null;
|
|
}
|
|
|
|
Color32[] pixels = webcamTexture.GetPixels32();
|
|
Color32[] adjustedPixels = new Color32[newWidth * height];
|
|
|
|
for (int y = 0; y < height; y++)
|
|
{
|
|
for (int x = 0; x < newWidth; x++)
|
|
{
|
|
int originalX = x + reductionPixels;
|
|
|
|
// Проверяем, чтобы не выйти за границы исходной ширины
|
|
if (originalX >= 0 && originalX < width)
|
|
{
|
|
adjustedPixels[y * newWidth + x] = pixels[y * width + originalX];
|
|
}
|
|
}
|
|
}
|
|
|
|
Texture2D adjustedTexture = new Texture2D(newWidth, height);
|
|
adjustedTexture.SetPixels32(adjustedPixels);
|
|
adjustedTexture.Apply();
|
|
|
|
Debug.LogWarning($"{width}x{height} / {adjustedTexture.width}x{adjustedTexture.height}");
|
|
|
|
return adjustedTexture;
|
|
}
|
|
|
|
private Texture FlipTexture(WebCamTexture texture, bool clockwiseRotation = false)
|
|
{
|
|
int width = texture.width;
|
|
int height = texture.height;
|
|
|
|
Color32[] pixels = texture.GetPixels32();
|
|
Color32[] rotatedPixels = new Color32[pixels.Length];
|
|
Texture2D rotatedTexture = new Texture2D(height, width);
|
|
|
|
for(int y = 0; y < height; y++)
|
|
{
|
|
for(int x = 0; x < width; x++)
|
|
{
|
|
rotatedPixels[width * height - (height * x + height) + y] =
|
|
pixels[width * height - (width * y + width) + x];
|
|
}
|
|
}
|
|
|
|
rotatedTexture.SetPixels32(rotatedPixels);
|
|
rotatedTexture.Apply();
|
|
|
|
Debug.LogWarning($"input: {width}x{height}, output: {rotatedTexture.width}x{rotatedTexture.height}");
|
|
|
|
return rotatedTexture;
|
|
|
|
#region unsafe handling
|
|
// int width = texture.width;
|
|
// int height = texture.height;
|
|
|
|
// // Создаем новую текстуру с измененными размерами, если переворачиваем изображение
|
|
// Texture2D flippedTexture = new Texture2D(clockwiseRotation ? height : width, clockwiseRotation ? width : height);
|
|
// flippedTexture.filterMode = FilterMode.Point;
|
|
// flippedTexture.wrapMode = TextureWrapMode.Clamp;
|
|
|
|
// // Получаем пиксельный буфер из исходной текстуры
|
|
// Color32[] pixels = texture.GetPixels32();
|
|
|
|
// // Создаем новый пиксельный буфер для измененных данных пикселей
|
|
// Color32[] flippedPixels = new Color32[flippedTexture.width * flippedTexture.height];
|
|
|
|
// // Создаем указатель на пиксельный буфер исходной текстуры
|
|
// unsafe
|
|
// {
|
|
// fixed (Color32* pixelsPtr = pixels)
|
|
// {
|
|
// // Создаем указатель на пиксельный буфер измененной текстуры
|
|
// fixed (Color32* flippedPixelsPtr = flippedPixels)
|
|
// {
|
|
// // Выполняем поворот текстуры
|
|
// for (int y = 0; y < height; y++)
|
|
// {
|
|
// for (int x = 0; x < width; x++)
|
|
// {
|
|
// int sourceIndex = y * width + x;
|
|
// int targetIndex = clockwiseRotation ? (height - y - 1) + x * flippedTexture.width : y + (width - x - 1) * flippedTexture.width;
|
|
// flippedPixelsPtr[targetIndex] = pixelsPtr[sourceIndex];
|
|
// }
|
|
// }
|
|
// }
|
|
// }
|
|
// }
|
|
|
|
// // Применяем измененные данные пикселей к текстуре
|
|
// flippedTexture.SetPixels32(flippedPixels);
|
|
// flippedTexture.Apply();
|
|
|
|
// return flippedTexture;
|
|
#endregion
|
|
}
|
|
|
|
// Blit a texture into the output buffer with aspect ratio compensation.
|
|
void Blit(Texture source, bool vflip = false)
|
|
{
|
|
if (source == null) return;
|
|
|
|
var aspect1 = (float)source.width / source.height;
|
|
var aspect2 = (float)OutputBuffer.width / OutputBuffer.height;
|
|
|
|
var scale = new Vector2(aspect2 / aspect1, aspect1 / aspect2);
|
|
scale = Vector2.Min(Vector2.one, scale);
|
|
if (vflip) scale.y *= -1;
|
|
|
|
var offset = (Vector2.one - scale) / 2;
|
|
|
|
Graphics.Blit(source, OutputBuffer, scale, offset);
|
|
}
|
|
|
|
#endregion
|
|
|
|
#region MonoBehaviour implementation
|
|
|
|
void Start()
|
|
{
|
|
// Create a material for the shader (only on Card and Gradient)
|
|
if (_sourceType == SourceType.Card || _sourceType == SourceType.Gradient)
|
|
_material = new Material(_shader);
|
|
|
|
// Webcam source type:
|
|
// Create a WebCamTexture and start capturing.
|
|
if (_sourceType == SourceType.Webcam)
|
|
{
|
|
if(_webcamName == "")
|
|
_webcamName = WebCamTexture.devices[0].name;
|
|
|
|
_webcam = new WebCamTexture
|
|
(_webcamName,
|
|
_webcamResolution.x, _webcamResolution.y, _webcamFrameRate);
|
|
_webcam.Play();
|
|
}
|
|
|
|
// Card source type:
|
|
// Run the card shader to generate a test card image.
|
|
if (_sourceType == SourceType.Card)
|
|
{
|
|
var dims = new Vector2(OutputBuffer.width, OutputBuffer.height);
|
|
_material.SetVector("_Resolution", dims);
|
|
Graphics.Blit(null, OutputBuffer, _material, 0);
|
|
}
|
|
}
|
|
|
|
void OnDestroy()
|
|
{
|
|
if (_webcam != null) Destroy(_webcam);
|
|
if (_buffer != null) Destroy(_buffer);
|
|
if (_material != null) Destroy(_material);
|
|
}
|
|
|
|
void Update()
|
|
{
|
|
if (_sourceType == SourceType.Video)
|
|
Blit(GetComponent<VideoPlayer>().texture);
|
|
|
|
if (_sourceType == SourceType.Webcam && _webcam.didUpdateThisFrame)
|
|
{
|
|
//Texture texture = AdjustWebCamTextureWidth(_webcam, 800);
|
|
//Blit(texture, _webcam.videoVerticallyMirrored);
|
|
Blit(_webcam, _webcam.videoVerticallyMirrored);
|
|
}
|
|
|
|
// Asynchronous image downloading
|
|
if (_webTexture != null && _webTexture.isDone)
|
|
{
|
|
var texture = DownloadHandlerTexture.GetContent(_webTexture);
|
|
_webTexture.Dispose();
|
|
_webTexture = null;
|
|
Blit(texture);
|
|
Destroy(texture);
|
|
}
|
|
}
|
|
|
|
#endregion
|
|
}
|