Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- using System.Collections;
- using System.Collections.Generic;
- using UnityEngine;
- using Unity.Jobs;
- using Unity.Collections;
- using Unity.Burst;
- using UnityEngine.UI;
- public class TestParallel : MonoBehaviour
- {
- private Texture2D _inputTexture; // Texture that we are reading, should have read/write enabled set to true and Compression set to None
- private RawImage rawImage;
- private Texture2D _outputTexture;
- private NativeArray<byte> inputArray;
- private NativeArray<byte> outputArray;
- public ZEDToOpenCVRetriever imageRetriever;
- private NativeArray<byte> historyArray;
- private NativeArray<byte> averageArray;
- private int _history_scan_depth;
- private int _history_capacity;
- private int _data_delta;
- private int _layer;
- private const int width = 1280;
- private const int height = 720;
- private int history_capacity = 3;
- private int layer_size = width * height * 6;
- private int layer = 0;
- public int history_scan_depth = 50;
- public int data_delta = 50;
- private void Awake()
- {
- if (!imageRetriever) imageRetriever = ZEDToOpenCVRetriever.GetInstance();
- imageRetriever.OnImageUpdated_LeftGrayscale += ImageUpdated;
- // Copy original texture to a new Texture2D for testing
- _outputTexture = new Texture2D(1280, 720);
- // Graphics.CopyTexture(_inputTexture, _outputTexture);
- // Get the underlying NativeArrays which can be used by the jobs system
- // Assign outputtexture to a rawimage
- // rawImage.texture = _outputTexture;
- historyArray = new NativeArray<byte>(history_capacity * layer_size, Allocator.Persistent);
- averageArray = new NativeArray<byte>(layer_size, Allocator.Persistent);
- outputArray = new NativeArray<byte>(layer_size, Allocator.Persistent);
- }
- private void OnDestroy()
- {
- historyArray.Dispose(); // We need to manually dispose our NativeArrays. The arrays retrieved from the Textures are disposed by the textures so we don't care here
- averageArray.Dispose();
- outputArray.Dispose();
- }
- private void Update()
- {
- }
- [BurstCompile] // <-- For this you need to add the Burst package using the Unity package manager, this attribute is completely optional but it improves performance by a lot (like 10 times faster)
- public struct ConvertJobParallel : IJobParallelFor
- {
- [ReadOnly("input_texture")] public NativeArray<byte> input_texture;
- public NativeArray<byte> output;
- [NativeDisableParallelForRestriction] public NativeArray<byte> history;
- [NativeDisableParallelForRestriction] public NativeArray<byte> average;
- public int _history_scan_depth;
- public int _history_capacity;
- public int _layer_size;
- public int _data_delta;
- public int _layer;
- // This gets called by the job system. index ranges between 0 and the 'arrayLength' specified in the first parameter of Schedule()
- public void Execute(int index)
- {
- // This is simply a demo test, it swaps the red and green channels, here you put your logic
- }
- }
- private void ImageUpdated(ref Texture2D zedTextureDepth)
- {
- inputArray = zedTextureDepth.GetRawTextureData<byte>(); // Could use GetPixelData instead when working with mipmaps
- var job = new ConvertJobParallel()
- {
- input_texture = inputArray,
- output = outputArray,
- history = historyArray,
- average = averageArray,
- _history_scan_depth = history_scan_depth,
- _history_capacity = history_capacity,
- _layer_size = layer_size,
- _data_delta = data_delta,
- _layer = layer
- };
- JobHandle handle1 = job.Schedule(inputArray.Length, 100); // The 100 here is a magic number, it is basically a number that says how many items (Color32 in our example) are handled in one 'batch' where each batch can run async on any thread
- JobHandle.ScheduleBatchedJobs(); // Start running (all) our previously scheduled jobs
- handle1.Complete(); // Wait on the main thread fot this job to complete
- //_outputTexture.Apply(); // Copy changes to gpu
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement