diff --git a/ZMQ Examples/CarServerMQ.cs b/ZMQ Examples/CarServerMQ.cs new file mode 100644 index 0000000..edd0b2b --- /dev/null +++ b/ZMQ Examples/CarServerMQ.cs @@ -0,0 +1,50 @@ +using UnityEngine; +using NetMQ; +using NetMQ.Sockets; + +public class CarServer : MonoBehaviour +{ + // ... (existing code) + + private void Start() + { + // ... (existing code) + + // Set useZMQ to true to enable ZMQ + useZMQ = true; + + if (useZMQ) + { + Task.Run(() => Publish()); + } + else + { + // ... (existing code) + } + } + + private void Publish() + { + using (var publisher = new PublisherSocket()) + { + publisher.Bind("tcp://*:25005"); + + while (true) + { + // Publish simulated data + publisher.SendMoreFrame("/car/steering").SendFrame(Steering.ToString()); + publisher.SendMoreFrame("/car/velocity").SendFrame(Speed.ToString()); + publisher.SendMoreFrame("/car/frontsensor/right").SendFrame(sensorsClass.distanceRight.ToString()); + publisher.SendMoreFrame("/car/frontsensor/middle").SendFrame(sensorsClass.distanceCenter.ToString()); + publisher.SendMoreFrame("/car/frontsensor/left").SendFrame(sensorsClass.distanceLeft.ToString()); + publisher.SendMoreFrame("/car/frontsensor/config/angle").SendFrame(sensor_angle.ToString()); + publisher.SendMoreFrame("/car/camera/rgb").SendFrame(cameraSens.cameraImageString); + publisher.SendMoreFrame("/car/speed").SendFrame(current_speed.ToString()); + + Thread.Sleep(100); // Adjust the sleep time as needed + } + } + } + + // ... (existing code) +} \ No newline at end of file diff --git a/ZMQ Examples/IMU/IMU.cs b/ZMQ Examples/IMU/IMU.cs new file mode 100644 index 0000000..660aae6 --- /dev/null +++ b/ZMQ Examples/IMU/IMU.cs @@ -0,0 +1,47 @@ +using UnityEngine; + +public class IMUSimulator : MonoBehaviour +{ + public Vector3 accelerometerNoise = new Vector3(0.05f, 0.05f, 0.05f); // Adjust accelerometer noise + public Vector3 gyroscopeNoise = new Vector3(0.1f, 0.1f, 0.1f); // Adjust gyroscope noise + + private Vector3 accelerometer; + private Vector3 gyroscope; + + void Update() + { + SimulateIMU(); + } + + void SimulateIMU() + { + // Simulate accelerometer data with noise + accelerometer = new Vector3( + -Input.acceleration.x + Random.Range(-accelerometerNoise.x, accelerometerNoise.x), + -Input.acceleration.y + Random.Range(-accelerometerNoise.y, accelerometerNoise.y), + -Input.acceleration.z + Random.Range(-accelerometerNoise.z, accelerometerNoise.z) + ); + + // Simulate gyroscope data with noise + gyroscope = new Vector3( + -Input.gyro.rotationRate.x + Random.Range(-gyroscopeNoise.x, gyroscopeNoise.x), + -Input.gyro.rotationRate.y + Random.Range(-gyroscopeNoise.y, gyroscopeNoise.y), + -Input.gyro.rotationRate.z + Random.Range(-gyroscopeNoise.z, gyroscopeNoise.z) + ); + + // Use the simulated data in your application as needed + // For example, you can integrate accelerometer data to calculate velocity or position. + // Gyroscope data can be used to update orientation. + } + + // Accessors for getting simulated data + public Vector3 GetAccelerometerData() + { + return accelerometer; + } + + public Vector3 GetGyroscopeData() + { + return gyroscope; + } +} \ No newline at end of file diff --git a/ZMQ Examples/Image/netmq.cs b/ZMQ Examples/Image/netmq.cs new file mode 100644 index 0000000..2150c42 --- /dev/null +++ b/ZMQ Examples/Image/netmq.cs @@ -0,0 +1,70 @@ +using UnityEngine; +using NetMQ; +using NetMQ.Sockets; + +public class FramePublisher : MonoBehaviour +{ + public Camera captureCamera; + public RenderTexture renderTexture; + + private PublisherSocket publisherSocket; + private Texture2D texture2D; + + private void Start() + { + // Create and bind the ZeroMQ publisher socket + publisherSocket = new PublisherSocket(); + publisherSocket.Bind("tcp://*:5555"); + + // Create a reusable Texture2D object + texture2D = new Texture2D(renderTexture.width, renderTexture.height, TextureFormat.RGB24, false); + } + + private void LateUpdate() + { + // Capture frames from the camera and publish them + Graphics.Blit(captureCamera.targetTexture, renderTexture); + + byte[] encodedBytes = EncodeFrame(); + + // Publish the encoded frame with a topic + string topic = "CameraFrames"; + publisherSocket.SendMoreFrame(topic).SendFrame(encodedBytes); + } + + private byte[] EncodeFrame() + { + // Encode the frame as a more efficient format, such as WebP or PNG + // You can use the FrameEncoder code from the previous response + + // Example: Encoding as WebP + RenderTexture.active = renderTexture; + texture2D.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0); + texture2D.Apply(); + + byte[] encodedBytes = WebPEncoder.Encode(texture2D, quality: 80); + + return encodedBytes; + } + + public byte[] EncodeFrameJPG() + { + // Read pixel data from RenderTexture into Texture2D + RenderTexture.active = captureTexture; + texture2D.ReadPixels(new Rect(0, 0, captureTexture.width, captureTexture.height), 0, 0); + texture2D.Apply(); + + // Encode Texture2D as JPEG + byte[] encodedBytes = texture2D.EncodeToJPG(); + + return encodedBytes; + } + private void OnDestroy() + { + // Close the ZeroMQ publisher socket + publisherSocket.Close(); + + // Clean up the Texture2D object + Destroy(texture2D); + } +} \ No newline at end of file diff --git a/ZMQ Examples/LIDAR/LIDAR.cs b/ZMQ Examples/LIDAR/LIDAR.cs new file mode 100644 index 0000000..0b6359c --- /dev/null +++ b/ZMQ Examples/LIDAR/LIDAR.cs @@ -0,0 +1,61 @@ +using UnityEngine; +using System.Collections.Generic; +using System.IO; + +public class LidarSimulator : MonoBehaviour +{ + public int numRays = 360; + public float maxDistance = 10f; + + private List pointCloud = new List(); + + void Update() + { + ClearLines(); + SimulateLidar(); + CreatePointCloud(); + } + + void SimulateLidar() + { + for (int i = 0; i < numRays; i++) + { + float angle = i * 360f / numRays; + Vector3 direction = Quaternion.Euler(0, angle, 0) * transform.forward; + + RaycastHit hit; + + if (Physics.Raycast(transform.position, direction, out hit, maxDistance)) + { + Debug.DrawLine(transform.position, hit.point, Color.red); + pointCloud.Add(hit.point); + } + else + { + Vector3 endPoint = transform.position + direction * maxDistance; + Debug.DrawLine(transform.position, endPoint, Color.green); + } + } + } + + void CreatePointCloud() + { + // Export the point cloud to a CSV file + string filePath = Application.dataPath + "/pointCloud.csv"; + + using (StreamWriter writer = new StreamWriter(filePath)) + { + foreach (Vector3 point in pointCloud) + { + writer.WriteLine($"{point.x},{point.y},{point.z}"); + } + } + + Debug.Log($"Point cloud saved to: {filePath}"); + } + + void ClearLines() + { + Debug.ClearDeveloperConsole(); + } +} diff --git a/ZMQ Examples/LIDAR/pointcloud.py b/ZMQ Examples/LIDAR/pointcloud.py new file mode 100644 index 0000000..9be181c --- /dev/null +++ b/ZMQ Examples/LIDAR/pointcloud.py @@ -0,0 +1,19 @@ +import numpy as np +import matplotlib.pyplot as plt +from mpl_toolkits.mplot3d import Axes3D + +# Read point cloud data from the CSV file +file_path = "path/to/your/pointCloud.csv" +point_cloud = np.genfromtxt(file_path, delimiter=',') + +# Visualize the point cloud +fig = plt.figure() +ax = fig.add_subplot(111, projection='3d') +ax.scatter(point_cloud[:, 0], point_cloud[:, 1], point_cloud[:, 2], c='b', marker='o') + +ax.set_xlabel('X') +ax.set_ylabel('Y') +ax.set_zlabel('Z') +ax.set_title('Point Cloud Visualization') + +plt.show() diff --git a/ZMQ Examples/zmq_client.py b/ZMQ Examples/zmq_client.py new file mode 100644 index 0000000..03d89dd --- /dev/null +++ b/ZMQ Examples/zmq_client.py @@ -0,0 +1,17 @@ +import zmq + +def subscribe(topic): + context = zmq.Context() + subscriber = context.socket(zmq.SUB) + subscriber.connect("tcp://127.0.0.1:25005") + subscriber.setsockopt_string(zmq.SUBSCRIBE, topic) + + while True: + topic = subscriber.recv_string() + message = subscriber.recv_string() + print(f"Received message on topic {topic}: {message}") + +if __name__ == "__main__": + subscribe("/car/steering") + # Add more topics as needed + \ No newline at end of file diff --git a/ZMQ Examples/zmq_test.py b/ZMQ Examples/zmq_test.py new file mode 100644 index 0000000..5510551 --- /dev/null +++ b/ZMQ Examples/zmq_test.py @@ -0,0 +1,23 @@ +import zmq +import cv2 +import numpy as np + +context = zmq.Context() +subscriber_socket = context.socket(zmq.SUB) +subscriber_socket.connect("tcp://localhost:5555") + +# Subscribe to a specific topic +topic = "CameraFrames" +subscriber_socket.setsockopt_string(zmq.SUBSCRIBE, topic) + +while True: + # Receive the topic and encoded frame + [received_topic, encoded_frame] = subscriber_socket.recv_multipart() + + # Decode the frame + nparr = np.frombuffer(encoded_frame, np.uint8) + frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR) + + # Process the frame (e.g., display or save it) + cv2.imshow("Frame", frame) + cv2.waitKey(1) \ No newline at end of file diff --git a/avisengine.py b/avisengine.py index 6c7fa7f..86308f8 100644 --- a/avisengine.py +++ b/avisengine.py @@ -69,25 +69,24 @@ def connect(self,server,port): print("Failed to connect to ", server, port) return False - def recvall(self, socket): ''' Function to receive all the data chunks ''' - BUFFER_SIZE = 65536 # Increased buffer size for better performance - data = bytearray() # Use a bytearray for better performance + BUFFER_SIZE = 131072 + data_parts = [] + eof_found = False - while True: + while not eof_found: part = socket.recv(BUFFER_SIZE) - data.extend(part) + data_parts.append(part) + data = b"".join(data_parts).decode("utf-8") - # Use KMP search to find the , KMPSearch() returns -1 if the pattern was not found - # It is 9 times faster than the simple python search - if utils.KMPSearch(b"", data) > -1: # Convert "" to bytes - break - - return data.decode("utf-8") + # Use string search to find the + if "" in data: + eof_found = True + return data def setSteering(self,steering): '''