-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcameraPub.cs
112 lines (100 loc) · 4.95 KB
/
cameraPub.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using Unity.Robotics.ROSTCPConnector;
public class cameraPub : MonoBehaviour
{
ROSConnection m_Ros;
public Camera sensorCamera;
public RenderTexture targetTex;
string topicName = "/XRCam";
int resolutionWidth = 256;
int resolutionHeight = 256;
public int qualityLevel = 50;
WaitForEndOfFrame frameEnd = new WaitForEndOfFrame();
// Start is called before the first frame update
void Start()
{
m_Ros = ROSConnection.GetOrCreateInstance();
// register topic name
m_Ros.RegisterPublisher<RosMessageTypes.Sensor.ImageMsg>(topicName);
Debug.Log(m_Ros.RosIPAddress);
}
// Update is called once per frame
void Update()
{
Debug.Log("I update");
SendImage();
}
void SendImage() {
Texture2D texture2D = new Texture2D(resolutionWidth, resolutionHeight, TextureFormat.RGB24, false);
Rect rect = new Rect(0, 0, resolutionWidth, resolutionHeight);
sensorCamera.targetTexture = new RenderTexture(resolutionWidth, resolutionHeight, 24);
uint imageHeight = (uint)targetTex.height;
uint imageWidth = (uint)targetTex.width;
// RosMessageTypes.Sensor.ImageMsg rosImage = new RosMessageTypes.Sensor.ImageMsg(new RosMessageTypes.Std.HeaderMsg(), imageWidth, imageHeight, "rgba8", 0, imageBytes);
RosMessageTypes.Sensor.ImageMsg message = new RosMessageTypes.Sensor.ImageMsg();
message.header.frame_id = "xrcamera_out";
message.encoding = "rgb24";
// message.format = "jpeg";
// message.header.
texture2D.ReadPixels(rect, 0, 0);
message.data = texture2D.EncodeToJPG(qualityLevel);
// Publish(message);
m_Ros.Publish(topicName, message);
// Debug.Log("I sart send img");
// // var oldRT = RenderTexture.active;
// // RenderTexture.active = sensorCamera.targetTexture;
// // var oldRT = sensorCamera.targetTexture;
// // sensorCamera.Render();
// // targetTex.
// // forcing the script to wait for the frame to end (error with readPixels)
// // yield return frameEnd;
// Debug.Log("I sent img");
// uint imageHeight = (uint)targetTex.height;
// uint imageWidth = (uint)targetTex.width;
// // // Copy the pixels from the GPU into a texture so we can work with them
// // // For more efficiency you should reuse this texture, instead of creating a new one every time
// // Texture2D camText = new Texture2D(targetTex.width, targetTex.height);
// // camText.ReadPixels(new Rect(0, 0, targetTex.width, targetTex.height), 0, 0);
// // camText.Apply();
// // RenderTexture.active = targetTex;
// const int isBigEndian = 0;
// uint step = imageWidth * 4;
// // // Encode the texture as a PNG, and send to ROS
// // // camText.format = TextureFormat.RGB24;
// // byte[] imageBytes = camText.EncodeToPNG();
// // var message = new MCompressedImage(new MHeader(), "png", imageBytes);
// byte[] imageBytes = getTex();
// RosMessageTypes.Sensor.ImageMsg rosImage = new RosMessageTypes.Sensor.ImageMsg(new RosMessageTypes.Std.HeaderMsg(), imageWidth, imageHeight, "rgba8", isBigEndian, step, imageBytes);
// // Debug.Log(rosImage);
// m_Ros.Publish(topicName, rosImage);
}
// private byte[] getTex() {
// Texture2D texture2D = new Texture2D(resolutionWidth, resolutionHeight, TextureFormat.RGB24, false);
// Rect rect = new Rect(0, 0, resolutionWidth, resolutionHeight);
// sensorCamera.targetTexture = new RenderTexture(resolutionWidth, resolutionHeight, 24);
// RosMessageTypes.Sensor.ImageMsg message = new RosMessageTypes.Sensor.ImageMsg();
// message.header.frame_id = "xrcamera_out";
// // message.format = "jpeg";
// // message.header.
// texture2D.ReadPixels(rect, 0, 0);
// message.data = texture2D.EncodeToJPG(qualityLevel);
// // Publish(message);
// m_Ros.Publish(topicName, message);
// // sensorCamera.targetTexture = targetTex;
// // RenderTexture currentRT = RenderTexture.active;
// // RenderTexture.active = targetTex;
// // sensorCamera.Render();
// Texture2D mainCameraTexture = new Texture2D(targetTex.width, targetTex.height);
// RenderTexture.active = targetTex;
// mainCameraTexture.ReadPixels(new Rect(0, 0, targetTex.width, targetTex.height), 0, 0);
// mainCameraTexture.Apply(); // this is ready texture 2d
// Rect rect = new Rect(0, 0, (uint)targetTex.width, (uint)targetTex.height);
// // RenderTexture.active = currentRT;
// // Get the raw byte info from the screenshot
// byte[] imageBytes = mainCameraTexture.GetRawTextureData();
// sensorCamera.targetTexture = null;
// return imageBytes;
// }
}