diff --git a/android/app/build.gradle b/android/app/build.gradle index 9baa20414..c6d37193c 100755 --- a/android/app/build.gradle +++ b/android/app/build.gradle @@ -80,6 +80,9 @@ dependencies { implementation 'com.github.pedroSG94:RTSP-Server:1.0.8' implementation 'com.github.pedroSG94.rtmp-rtsp-stream-client-java:rtplibrary:2.0.2' + //RTP Client + implementation 'com.github.ar-android:libstreaming:1.0.0' + // RxJava implementation 'io.reactivex.rxjava3:rxjava:3.0.0' implementation 'io.reactivex.rxjava3:rxandroid:3.0.0' diff --git a/android/app/src/main/java/org/openbot/env/ConnectionSelector.java b/android/app/src/main/java/org/openbot/env/ConnectionSelector.java index 46a7d28ba..fc77abffb 100644 --- a/android/app/src/main/java/org/openbot/env/ConnectionSelector.java +++ b/android/app/src/main/java/org/openbot/env/ConnectionSelector.java @@ -3,6 +3,7 @@ import android.content.Context; import android.net.ConnectivityManager; import android.net.NetworkInfo; +import android.util.Log; public class ConnectionSelector { private static final String TAG = "ConnectionManager"; @@ -11,6 +12,7 @@ public class ConnectionSelector { private ILocalConnection connection; private final ILocalConnection networkConnection = new NetworkServiceConnection(); + private final ILocalConnection mobileConnection = new MobileNetworkConnection(); private final ILocalConnection nearbyConnection = new NearbyConnection(); private ConnectionSelector() { @@ -38,8 +40,13 @@ ILocalConnection getConnection() { } if (isConnectedViaWifi()) { + Log.i(TAG, "Connected via Wifi"); connection = networkConnection; + } else if (isConnectedViaMobile()) { + Log.i(TAG, "Connected via mobile network"); + connection = mobileConnection; } else { + Log.i(TAG, "Connected via Peer-to-Peer"); connection = nearbyConnection; } @@ -52,4 +59,11 @@ private boolean isConnectedViaWifi() { NetworkInfo mWifi = connectivityManager.getNetworkInfo(ConnectivityManager.TYPE_WIFI); return mWifi.isConnected(); } + + private boolean isConnectedViaMobile() { + ConnectivityManager connectivityManager = + (ConnectivityManager) _context.getSystemService(Context.CONNECTIVITY_SERVICE); + NetworkInfo mMobile = connectivityManager.getNetworkInfo(ConnectivityManager.TYPE_MOBILE); + return mMobile.isConnected(); + } } diff --git a/android/app/src/main/java/org/openbot/env/ControllerConfig.java b/android/app/src/main/java/org/openbot/env/ControllerConfig.java index 85ae4091d..f5cce8022 100644 --- a/android/app/src/main/java/org/openbot/env/ControllerConfig.java +++ b/android/app/src/main/java/org/openbot/env/ControllerConfig.java @@ -10,7 +10,8 @@ public class ControllerConfig { enum VIDEO_SERVER_TYPE { WEBRTC, - RTSP + RTSP, + RTP } private String currentServerType; @@ -27,7 +28,7 @@ public static ControllerConfig getInstance() { void init(Context context) { preferences = PreferenceManager.getDefaultSharedPreferences(context); - currentServerType = get("video_server", "WEBRTC"); + currentServerType = get("video_server", "RTP"); } private void set(String name, String value) { @@ -59,7 +60,14 @@ private void setBoolean(String name, boolean value) { } public String getVideoServerType() { - return get("video_server", "WEBRTC"); + return get("video_server", "RTP"); + } + + public String[] getVideoServerAddress() { + String ip = get("ip", "127.0.0.1"); + String port_stream = get("port_stream", "8046"); + String port_control = get("port_control", "8040"); + return new String[] {ip, port_stream, port_control}; } public void setVideoServerType(String type) { diff --git a/android/app/src/main/java/org/openbot/env/ILocalConnection.java b/android/app/src/main/java/org/openbot/env/ILocalConnection.java index 6137ed397..da7310bab 100644 --- a/android/app/src/main/java/org/openbot/env/ILocalConnection.java +++ b/android/app/src/main/java/org/openbot/env/ILocalConnection.java @@ -20,5 +20,7 @@ public interface ILocalConnection { void start(); + void setServerAddress(String ip, String port); + boolean isVideoCapable(); } diff --git a/android/app/src/main/java/org/openbot/env/IVideoServer.java b/android/app/src/main/java/org/openbot/env/IVideoServer.java index 9e3ed966f..ae7af09bf 100644 --- a/android/app/src/main/java/org/openbot/env/IVideoServer.java +++ b/android/app/src/main/java/org/openbot/env/IVideoServer.java @@ -29,4 +29,6 @@ public interface IVideoServer { void setView(com.pedro.rtplibrary.view.OpenGlView view); void setCanStart(boolean canStart); + + void setServerAddress(String ip, String port); } diff --git a/android/app/src/main/java/org/openbot/env/MobileNetworkConnection.java b/android/app/src/main/java/org/openbot/env/MobileNetworkConnection.java new file mode 100644 index 000000000..bc5d3a778 --- /dev/null +++ b/android/app/src/main/java/org/openbot/env/MobileNetworkConnection.java @@ -0,0 +1,236 @@ +package org.openbot.env; + +import static timber.log.Timber.e; +import static timber.log.Timber.i; + +import android.annotation.SuppressLint; +import android.app.Activity; +import android.content.Context; +import java.io.BufferedInputStream; +import java.io.DataInputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.net.Socket; +import java.nio.charset.StandardCharsets; +import java.util.Scanner; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import org.openbot.utils.ConnectionUtils; +import timber.log.Timber; + +public class MobileNetworkConnection implements ILocalConnection { + + private static final String TAG = "MobileNetworkConn"; + private Context context; + + private String HOST; + private int PORT; + private IDataReceived dataReceivedCallback; + private SocketHandler socketHandler; + private BlockingQueue messageQueue = new ArrayBlockingQueue<>(25); + private boolean stopped = true; + + @Override + public void init(Context context) { + socketHandler = new SocketHandler(messageQueue); + } + + @Override + public void setDataCallback(IDataReceived dataCallback) { + this.dataReceivedCallback = dataCallback; + } + + @Override + public void connect(Context context) { + this.context = context; + start(); + runConnection(); + } + + @Override + public void disconnect(Context context) { + stop(); + + if (socketHandler == null) { + return; + } + socketHandler.close(); + } + + @Override + public void stop() { + stopped = true; + BotToControllerEventBus.emitEvent(ConnectionUtils.createStatus("CONNECTION_ACTIVE", false)); + } + + @Override + public void start() { + + stopped = false; + BotToControllerEventBus.emitEvent(ConnectionUtils.createStatus("CONNECTION_ACTIVE", true)); + } + + @Override + public void setServerAddress(String ip, String port) { + this.HOST = ip; + this.PORT = Integer.parseInt(port); + } + + @Override + public boolean isVideoCapable() { + return true; + } + + @Override + public boolean isConnected() { + return socketHandler != null && socketHandler.isConnected(); + } + + @Override + public void sendMessage(String message) { + if (socketHandler != null) { + socketHandler.put(message); + } + } + // end of interface + + private void runConnection() { + Timber.d("PORT: " + PORT + ", address: " + HOST); + + ((Activity) context) + .runOnUiThread( + () -> { + ControllerToBotEventBus.emitEvent("{command: \"CONNECTED\"}"); + }); + + new Thread("Receiver Thread") { + public void run() { + SocketHandler.ClientInfo clientInfo = socketHandler.connect(HOST, PORT); + if (clientInfo == null) { + Timber.d("Could not get a connection"); + return; + } + startReceiver(socketHandler, clientInfo.reader); + startSender(socketHandler, clientInfo.writer); + } + }.start(); + } + + private void startReceiver(SocketHandler socketHandler, Scanner reader) { + new Thread("startReceiver Thread") { + public void run() { + socketHandler.runReceiver(reader); + } + }.start(); + } + + private void startSender(SocketHandler socketHandler, OutputStream writer) { + new Thread("startSender Thread") { + public void run() { + try { + socketHandler.runSender(writer); + } catch (Exception e) { + e.printStackTrace(); + } + } + }.start(); + } + + class SocketHandler { + private BlockingQueue messageQueue; + private Socket client; + + boolean isConnected() { + return client != null && !client.isClosed(); + } + + SocketHandler(BlockingQueue messageQueue) { + this.messageQueue = messageQueue; + } + + class ClientInfo { + Scanner reader; + OutputStream writer; + + ClientInfo(Scanner reader, OutputStream writer) { + this.reader = reader; + this.writer = writer; + } + } + + ClientInfo connect(String host, int port) { + ClientInfo clientInfo; + + try { + client = new Socket(host, port); + clientInfo = + new ClientInfo( + new Scanner(new DataInputStream(new BufferedInputStream(client.getInputStream()))), + client.getOutputStream()); + } catch (Exception e) { + return null; + } + + return clientInfo; + } + + void runReceiver(Scanner reader) { + try { + while (true) { + String msg = reader.nextLine().trim(); + + if (!stopped) { + ((Activity) context).runOnUiThread(() -> dataReceivedCallback.dataReceived(msg)); + } + } + } catch (Exception e) { + close(); + } + } + + void put(String message) { + try { + this.messageQueue.put(message); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + + @SuppressLint("TimberArgCount") + void runSender(OutputStream writer) { + while (true) { + try { + String message = messageQueue.take(); + i(TAG, "queue capacity: " + messageQueue.remainingCapacity()); + writer.write((message + "\n").getBytes(StandardCharsets.UTF_8)); + } catch (InterruptedException | IOException e) { + i(TAG, "runSender got exception: " + e); + close(); + + // reconnect again + if (isConnected()) { + runConnection(); + } + break; + } + } + } + + void close() { + try { + if (client == null || client.isClosed()) { + return; + } + client.close(); + + ((Activity) context) + .runOnUiThread( + () -> { + ControllerToBotEventBus.emitEvent("{command: \"DISCONNECTED\"}"); + }); + } catch (IOException e) { + e.printStackTrace(); + } + } + } +} diff --git a/android/app/src/main/java/org/openbot/env/NearbyConnection.java b/android/app/src/main/java/org/openbot/env/NearbyConnection.java index 96ae8b323..825700e3b 100755 --- a/android/app/src/main/java/org/openbot/env/NearbyConnection.java +++ b/android/app/src/main/java/org/openbot/env/NearbyConnection.java @@ -262,6 +262,9 @@ public void start() { BotToControllerEventBus.emitEvent(ConnectionUtils.createStatus("CONNECTION_ACTIVE", true)); } + @Override + public void setServerAddress(String ip, String port) {} + @Override public boolean isVideoCapable() { return false; diff --git a/android/app/src/main/java/org/openbot/env/NetworkServiceConnection.java b/android/app/src/main/java/org/openbot/env/NetworkServiceConnection.java index 400de6edb..5401a4c16 100644 --- a/android/app/src/main/java/org/openbot/env/NetworkServiceConnection.java +++ b/android/app/src/main/java/org/openbot/env/NetworkServiceConnection.java @@ -88,6 +88,9 @@ public void start() { BotToControllerEventBus.emitEvent(ConnectionUtils.createStatus("CONNECTION_ACTIVE", true)); } + @Override + public void setServerAddress(String ip, String port) {} + @Override public boolean isVideoCapable() { return true; diff --git a/android/app/src/main/java/org/openbot/env/PhoneController.java b/android/app/src/main/java/org/openbot/env/PhoneController.java index e749ef31b..95691b7c6 100644 --- a/android/app/src/main/java/org/openbot/env/PhoneController.java +++ b/android/app/src/main/java/org/openbot/env/PhoneController.java @@ -2,7 +2,10 @@ import android.app.Activity; import android.content.Context; +import android.os.Build; import android.util.Log; +import android.util.Size; +import android.view.SurfaceView; import android.view.View; import android.view.ViewGroup; import android.widget.LinearLayout; @@ -42,21 +45,39 @@ public void dataReceived(String commandStr) { } private void init(Context context) { + Log.d(TAG, "initialized"); ControllerConfig.getInstance().init(context); - videoServer = - "RTSP".equals(ControllerConfig.getInstance().getVideoServerType()) - ? new RtspServer() - : new WebRtcServer(); + switch (ControllerConfig.getInstance().getVideoServerType()) { + case "RTP": + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { + videoServer = new RtpServer(); + } + break; + case "RTSP": + videoServer = new RtspServer(); + break; + case "WebRTC": + videoServer = new WebRtcServer(); + } videoServer.init(context); videoServer.setCanStart(true); + // Video Stream port + String[] videoServerAddress = ControllerConfig.getInstance().getVideoServerAddress(); + videoServer.setServerAddress(videoServerAddress[0], videoServerAddress[1]); + + // Controller port this.connectionSelector = ConnectionSelector.getInstance(context); + connectionSelector + .getConnection() + .setServerAddress(videoServerAddress[0], videoServerAddress[2]); connectionSelector.getConnection().setDataCallback(new DataReceived()); - android.util.Size resolution = - CameraUtils.getClosestCameraResolution(context, new android.util.Size(640, 360)); + // 1280 x 960 is the best resolution that is desirable for both video streaming quality and + // efficiency + Size resolution = CameraUtils.getClosestCameraResolution(context, new Size(1280, 960)); videoServer.setResolution(resolution.getWidth(), resolution.getHeight()); handleBotEvents(); @@ -69,6 +90,8 @@ private void createAndSetView(Context context) { view = new org.openbot.customview.WebRTCSurfaceView(context); } else if (videoServer instanceof RtspServer) { view = new org.openbot.customview.AutoFitSurfaceGlView(context); + } else if (videoServer instanceof RtpServer) { + view = new org.openbot.customview.AutoFitSurfaceView(context); } if (view != null) { addVideoView(view, context); @@ -90,6 +113,8 @@ private void addVideoView(View videoView, Context context) { videoServer.setView((WebRTCSurfaceView) videoView); } else if (videoView instanceof AutoFitSurfaceGlView) { videoServer.setView((AutoFitSurfaceGlView) videoView); + } else if (videoView instanceof SurfaceView) { + videoServer.setView((SurfaceView) videoView); } } diff --git a/android/app/src/main/java/org/openbot/env/RtpClientCamera2.java b/android/app/src/main/java/org/openbot/env/RtpClientCamera2.java new file mode 100644 index 000000000..225370d0c --- /dev/null +++ b/android/app/src/main/java/org/openbot/env/RtpClientCamera2.java @@ -0,0 +1,421 @@ +package org.openbot.env; + +import android.annotation.SuppressLint; +import android.content.Context; +import android.content.SharedPreferences; +import android.content.res.Configuration; +import android.graphics.ImageFormat; +import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CaptureFailure; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.hardware.camera2.params.StreamConfigurationMap; +import android.media.ImageReader; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.os.Build; +import android.os.Handler; +import android.os.HandlerThread; +import android.util.Base64; +import android.util.Log; +import android.util.Range; +import android.util.Size; +import android.view.Surface; +import android.view.SurfaceHolder; +import androidx.annotation.RequiresApi; +import androidx.preference.PreferenceManager; +import java.io.IOException; +import java.net.InetAddress; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; +import net.majorkernelpanic.streaming.hw.EncoderDebugger; +import net.majorkernelpanic.streaming.mp4.MP4Config; +import net.majorkernelpanic.streaming.rtp.H264Packetizer; +import net.majorkernelpanic.streaming.rtp.MediaCodecInputStream; +import org.openbot.customview.AutoFitSurfaceView; +import org.openbot.utils.CameraUtils; + +@RequiresApi(Build.VERSION_CODES.R) +public class RtpClientCamera2 { + private static final String TAG = "camera2"; + private static final Logger LOGGER = new Logger(); + private AutoFitSurfaceView mSurfaceView; + private SurfaceHolder mSurfaceHolder; + private Surface mediaCodecSurface; + + // Preview Resolution + private int previewWidth = 1280; + private int previewHeight = 960; + private int inferenceInputWidth = 640; + private int inferenceInputHeight = 360; + private static final int MINIMUM_PREVIEW_SIZE = 320; + + // Camera2 setup + private final Semaphore mCameraOpenCloseLock = new Semaphore(1); + private String mCameraId; + private int mCameraFacing = CameraCharacteristics.LENS_FACING_BACK; + private CameraManager mCameraManager; + private CameraDevice mCameraDevice; + private CameraCaptureSession mCaptureSession; + private CaptureRequest.Builder mPreviewRequestBuilder; + /** An additional thread for running tasks that shouldn't block the UI. */ + private HandlerThread backgroundThread; + /** A {@link Handler} for running tasks in the background. */ + private Handler backgroundHandler; + + private ImageReader inferenceImageReader; + private final ImageReader.OnImageAvailableListener imageListener; + + private final int mOrientation = Configuration.ORIENTATION_LANDSCAPE; + private Range range; + private MediaCodec mediaCodec; + private H264Packetizer mPacketizer; + private byte mChannelIdentifier = 0; + private int mTTL = 64; + private InetAddress IP = null; + + private int PORT = 8046; + private final Context context; + private SharedPreferences settings = null; + private boolean streaming = false; + + public RtpClientCamera2( + AutoFitSurfaceView surfaceView, ImageReader.OnImageAvailableListener imageListener) { + Log.d(TAG, "RtpClientCamera2 constructed"); + mSurfaceView = surfaceView; + this.imageListener = imageListener; + context = surfaceView.getContext(); + assert context != null; + prepareCamera(); // initialize camera manager and choose back camera as default + createBackgroundThread(); + settings = PreferenceManager.getDefaultSharedPreferences(context); + createBackgroundThread(); + } + + public void setSurfaceView(AutoFitSurfaceView surfaceView) { + mSurfaceView = surfaceView; + mSurfaceHolder = mSurfaceView.getHolder(); + } + + private void createBackgroundThread() { + if (backgroundThread == null) { + backgroundThread = new HandlerThread("cameraBackground"); + backgroundThread.start(); + backgroundHandler = new Handler(backgroundThread.getLooper()); + } + } + + private void destroyBackgroundThread() { + backgroundThread.quitSafely(); + try { + backgroundThread.join(); + backgroundThread = null; + backgroundHandler = null; + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + + public void chooseCamera(int selectedFacing) { + Log.d(TAG, "chooseCamera"); + try { + for (final String cameraId : mCameraManager.getCameraIdList()) { + final CameraCharacteristics characteristics = + mCameraManager.getCameraCharacteristics(cameraId); + + LOGGER.i( + "CAMERA ID: " + + cameraId + + " FACING: " + + characteristics.get(CameraCharacteristics.LENS_FACING)); + // We don't use a front facing camera in this sample. + final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); + if (facing != null && facing != selectedFacing) { + continue; + } + + final StreamConfigurationMap map = + characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + + if (map == null) { + continue; + } + mCameraId = cameraId; + return; + } + } catch (CameraAccessException e) { + e.printStackTrace(); + } + } + + public void prepareCamera() { + Log.d(TAG, "prepareCamera"); + if (mCameraManager == null) { + mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE); + } + chooseCamera(mCameraFacing); + } + + public void setResolution(int width, int height) { + Log.d(TAG, "setResolution"); + previewWidth = width; + previewHeight = height; + } + + public void setInferenceInputSize(int width, int height) { + Log.d(TAG, "setInferenceInputSize"); + inferenceInputWidth = width; + inferenceInputHeight = height; + } + + public void setDestination(InetAddress inetAddress, int port) { + Log.d(TAG, "setDestination"); + IP = inetAddress; + PORT = port; + } + + public boolean isStreaming() { + return streaming; + } + + private void setUpCameraOutputs(int width, int height) { + Log.d(TAG, "setUpCameraOutputs"); + try { + CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(mCameraId); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { + range = characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE); + } + StreamConfigurationMap map = + characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + if (map == null) { + return; + } + int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); + Size mPreviewSize = + CameraUtils.chooseOptimalSize( + map.getOutputSizes(SurfaceTexture.class), + new Size(previewWidth, previewHeight), + new Size(MINIMUM_PREVIEW_SIZE, MINIMUM_PREVIEW_SIZE)); + if (sensorOrientation == mOrientation) { + mSurfaceView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); + } else { + mSurfaceView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); + } + } catch (CameraAccessException | NullPointerException e) { + e.printStackTrace(); + } + } + + public void closeCamera() { + Log.d(TAG, "closeCamera"); + try { + mCameraOpenCloseLock.acquire(); + if (null != mCaptureSession) { + mCaptureSession.abortCaptures(); + mCaptureSession.close(); + mCaptureSession = null; + } + if (null != mCameraDevice) { + mCameraDevice.close(); + mCameraDevice = null; + } + } catch (InterruptedException e) { + throw new RuntimeException("Interrupted while trying to lock camera closing.", e); + } catch (CameraAccessException e) { + e.printStackTrace(); + } finally { + mCameraOpenCloseLock.release(); + } + destroyBackgroundThread(); + } + + @SuppressLint("MissingPermission") + public void openCamera(int width, int height) { + Log.d(TAG, "OpenCamera"); + setUpCameraOutputs(width, height); + try { + if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { + throw new RuntimeException("Time out waiting to lock camera opening."); + } + Log.d(TAG, "[openCamera]"); + Log.d(TAG, "[openCamera] cameraId:" + mCameraId); + mCameraManager.openCamera(mCameraId, mStateCallback, backgroundHandler); + Log.d(TAG, "OpenCamera successfully"); + } catch (CameraAccessException e) { + e.printStackTrace(); + } catch (InterruptedException e) { + throw new RuntimeException("Interrupted while trying to lock camera opening.", e); + } + } + + public void startPreview() { + Log.d(TAG, "[startPreview]"); + try { + setUpCaptureRequestBuilder(mPreviewRequestBuilder); + mCaptureSession.setRepeatingRequest( + mPreviewRequestBuilder.build(), mSessionCaptureCallback, backgroundHandler); + } catch (CameraAccessException e) { + e.printStackTrace(); + } + } + + private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) { + Log.d(TAG, "setUpCaptureRequestBuilder"); + builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); + builder.set( + CaptureRequest.CONTROL_ZOOM_RATIO, + range.getLower()); // zoom out to get the largest field of view + } + + public void start() { + Log.d(TAG, "started"); + openCamera(mSurfaceView.getWidth(), mSurfaceView.getHeight()); + } + + public void startStream() { + Log.d(TAG, "startStream"); + mPacketizer.setInputStream(new MediaCodecInputStream(mediaCodec)); + mPacketizer.start(); + streaming = true; + } + + public void stopStream() { + Log.d(TAG, "stopStream"); + if (streaming) { + streaming = false; + } + mPacketizer.stop(); + if (mediaCodec != null) { + mediaCodec.stop(); + mediaCodec.release(); + mediaCodec = null; + } + } + + public void configureEncoder() throws IOException { + Log.d(TAG, "configureEncoder"); + EncoderDebugger debugger = EncoderDebugger.debug(settings, previewWidth, previewHeight); + mPacketizer = new H264Packetizer(); + mPacketizer.setDestination(IP, PORT, 19401); + mPacketizer.getRtpSocket().setOutputStream(null, mChannelIdentifier); + mPacketizer.setTimeToLive(mTTL); + MP4Config mConfig = new MP4Config(debugger.getB64SPS(), debugger.getB64PPS()); + byte[] pps = Base64.decode(mConfig.getB64PPS(), Base64.NO_WRAP); + byte[] sps = Base64.decode(mConfig.getB64SPS(), Base64.NO_WRAP); + mPacketizer.setStreamParameters(pps, sps); + + if (IP == null) + throw new IllegalStateException("No destination ip address set for the stream !"); + + if (PORT <= 0) throw new IllegalStateException("No destination ports set for the stream !"); + Log.d("Stream", "Started"); + + mediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName()); + MediaFormat mediaFormat = + MediaFormat.createVideoFormat("video/avc", previewWidth, previewHeight); + mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2000000); + mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); + mediaFormat.setInteger( + MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); + mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mediaCodecSurface = mediaCodec.createInputSurface(); + mediaCodec.start(); + } + + private final CameraDevice.StateCallback mStateCallback = + new CameraDevice.StateCallback() { + + @Override + public void onOpened(CameraDevice cameraDevice) { + Log.d(TAG, "[open camera onOpened]"); + mCameraOpenCloseLock.release(); + mCameraDevice = cameraDevice; + try { + try { + configureEncoder(); + } catch (IOException e) { + e.printStackTrace(); + } + mPreviewRequestBuilder = + mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); + mPreviewRequestBuilder.addTarget(mediaCodecSurface); + + inferenceImageReader = + ImageReader.newInstance( + inferenceInputWidth, inferenceInputHeight, ImageFormat.YUV_420_888, 1); + inferenceImageReader.setOnImageAvailableListener(imageListener, backgroundHandler); + mPreviewRequestBuilder.addTarget(inferenceImageReader.getSurface()); + List mSurfaces = + Arrays.asList(mediaCodecSurface, inferenceImageReader.getSurface()); + cameraDevice.createCaptureSession(mSurfaces, mSessionStateCallback, backgroundHandler); + } catch (CameraAccessException e) { + e.printStackTrace(); + } + } + + @Override + public void onDisconnected(CameraDevice cameraDevice) { + Log.d(TAG, "[open camera onDisconnected]"); + mCameraOpenCloseLock.release(); + } + + @Override + public void onError(CameraDevice cameraDevice, int error) { + Log.d(TAG, "[open camera onError]"); + mCameraOpenCloseLock.release(); + } + }; + + private final CameraCaptureSession.StateCallback mSessionStateCallback = + new CameraCaptureSession.StateCallback() { + @Override + public void onConfigured(CameraCaptureSession session) { + Log.d(TAG, "[captureSession onConfigured]"); + mCaptureSession = session; + startPreview(); + } + + @Override + public void onConfigureFailed(CameraCaptureSession session) {} + }; + + private final CameraCaptureSession.CaptureCallback mSessionCaptureCallback = + new CameraCaptureSession.CaptureCallback() { + @Override + public void onCaptureStarted( + CameraCaptureSession session, + CaptureRequest request, + long timestamp, + long frameNumber) { + super.onCaptureStarted(session, request, timestamp, frameNumber); + } + + @Override + public void onCaptureProgressed( + CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult) { + super.onCaptureProgressed(session, request, partialResult); + } + + @Override + public void onCaptureCompleted( + CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { + super.onCaptureCompleted(session, request, result); + } + + @Override + public void onCaptureFailed( + CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) { + super.onCaptureFailed(session, request, failure); + } + }; +} diff --git a/android/app/src/main/java/org/openbot/env/RtpServer.java b/android/app/src/main/java/org/openbot/env/RtpServer.java new file mode 100644 index 000000000..b08838f76 --- /dev/null +++ b/android/app/src/main/java/org/openbot/env/RtpServer.java @@ -0,0 +1,202 @@ +package org.openbot.env; + +import android.content.Context; +import android.content.pm.PackageManager; +import android.media.ImageReader; +import android.os.Build; +import android.util.Log; +import android.util.Size; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import android.view.TextureView; +import android.view.View; +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; +import androidx.core.content.ContextCompat; +import com.pedro.rtplibrary.view.OpenGlView; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import org.openbot.customview.AutoFitSurfaceView; +import org.openbot.utils.AndGate; +import org.openbot.utils.ConnectionUtils; +import org.webrtc.SurfaceViewRenderer; +import timber.log.Timber; + +@RequiresApi(Build.VERSION_CODES.R) +public class RtpServer implements IVideoServer, SurfaceHolder.Callback { + + private static final String TAG = "RtpServer"; + private RtpClientCamera2 rtpClientCamera2; + private View view; + private ImageReader.OnImageAvailableListener imageListener; + private AndGate andGate; + private Context context; + private Size resolution = new Size(1280, 960); + private Size inputSizeForInference = + new Size(1280, 720); // This has to be the same as the DESIRED_PREVIEW_SIZE in DefaultActivity + private InetAddress IP; + private int PORT; + + public RtpServer() {} + + @Override + public void init(Context context) { + Log.d(TAG, "initialized"); + this.context = context; + try { + imageListener = + (ImageReader.OnImageAvailableListener) + context; // image listener for camera2 in DefaultActivity + } catch (Exception e) { + imageListener = imageReader -> {}; // dummy imageListener + } + // imageListener = (ImageReader.OnImageAvailableListener) context; + /* + AndGate will run 'startServer()' if all its input conditions are met. + This is useful if we do not know the order of the updates to the conditions. + */ + AndGate.Action startAction = () -> startServer(); + AndGate.Action stopAction = () -> stopServer(); + andGate = new AndGate(startAction, stopAction); + andGate.addCondition("connected"); + andGate.addCondition("server address set"); + andGate.addCondition("surfaceCreated"); + andGate.addCondition("view set"); + andGate.addCondition("camera permission"); + andGate.addCondition("resolution set"); + andGate.addCondition("can start"); + + int camera = ContextCompat.checkSelfPermission(context, android.Manifest.permission.CAMERA); + andGate.set("camera permission", camera == PackageManager.PERMISSION_GRANTED); + } + + private void stopServer() { + // stop streaming and close camera + try { + if (rtpClientCamera2 != null) { + if (rtpClientCamera2.isStreaming()) { + rtpClientCamera2.stopStream(); + } + rtpClientCamera2.closeCamera(); + rtpClientCamera2 = null; + } + } catch (Exception e) { + Log.d(TAG, "Got error stopping server: " + e); + } + } + + private void startServer() { + // create camera and start streaming here + Log.d(TAG, "startServer"); + if (rtpClientCamera2 == null) { + Timber.d("Resolution %dx%d", resolution.getWidth(), resolution.getHeight()); + rtpClientCamera2 = new RtpClientCamera2((AutoFitSurfaceView) view, imageListener); + rtpClientCamera2.setResolution(resolution.getWidth(), resolution.getHeight()); + rtpClientCamera2.setInferenceInputSize( + inputSizeForInference.getWidth(), inputSizeForInference.getHeight()); + rtpClientCamera2.setDestination(IP, PORT); + rtpClientCamera2.start(); + } + + // Wait for the rtpClientCamera2 to finish configuration + ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); + Runnable action = + () -> { + rtpClientCamera2.startStream(); + startClient(); + }; + executorService.schedule(action, 500, TimeUnit.MILLISECONDS); + } + + public void setServerAddress(String ip, String port) { + Log.d(TAG, "setServerAddress: " + ip + " : " + port); + try { + this.IP = InetAddress.getByName(ip); + this.PORT = Integer.parseInt(port); + andGate.set("server address set", true); + } catch (UnknownHostException e) { + andGate.set("server address set", false); + e.printStackTrace(); + } + } + + @Override + public void setResolution(int w, int h) { + resolution = new Size(w, h); + andGate.set("resolution set", true); + } + + public void setInputSizeForInference(int w, int h) { + inputSizeForInference = new Size(w, h); + } + + @Override + public void setConnected(boolean connected) { + int camera = ContextCompat.checkSelfPermission(context, android.Manifest.permission.CAMERA); + andGate.set("camera permission", camera == PackageManager.PERMISSION_GRANTED); + + andGate.set("connected", connected); + } + + @Override + public boolean isRunning() { + return rtpClientCamera2 != null && rtpClientCamera2.isStreaming(); + } + + @Override + public void startClient() { + BotToControllerEventBus.emitEvent(ConnectionUtils.createStatus("VIDEO_PROTOCOL", "RTP")); + BotToControllerEventBus.emitEvent(ConnectionUtils.createStatus("VIDEO_COMMAND", "START")); + } + + @Override + public void sendServerUrl() {} + + @Override + public void sendVideoStoppedStatus() { + BotToControllerEventBus.emitEvent(ConnectionUtils.createStatus("VIDEO_COMMAND", "STOP")); + } + + @Override + public void setView(SurfaceView view) { + this.view = view; + ((AutoFitSurfaceView) this.view).getHolder().addCallback(this); + andGate.set("view set", true); + } + + @Override + public void setView(TextureView view) {} + + @Override + public void setView(SurfaceViewRenderer view) {} + + @Override + public void setView(OpenGlView view) {} + + @Override + public void setCanStart(boolean canStart) { + andGate.set("can start", canStart); + } + + @Override + public void surfaceCreated(@NonNull SurfaceHolder holder) { + Log.d(TAG, "Surface created..."); + andGate.set("surfaceCreated", true); + } + + @Override + public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) { + Log.d(TAG, "Surface changed..."); + } + + @Override + public void surfaceDestroyed(@NonNull SurfaceHolder holder) { + andGate.set("surfaceCreated", false); + sendVideoStoppedStatus(); + // TODO: stop camera2 streaming + andGate.set("surfaceCreated", false); + } +} diff --git a/android/app/src/main/java/org/openbot/env/RtspServer.java b/android/app/src/main/java/org/openbot/env/RtspServer.java index 0098a1209..b94047951 100644 --- a/android/app/src/main/java/org/openbot/env/RtspServer.java +++ b/android/app/src/main/java/org/openbot/env/RtspServer.java @@ -124,6 +124,9 @@ public void setCanStart(boolean canStart) { andGate.set("can start", canStart); } + @Override + public void setServerAddress(String ip, String port) {} + @Override public void setConnected(boolean connected) { int camera = ContextCompat.checkSelfPermission(context, android.Manifest.permission.CAMERA); diff --git a/android/app/src/main/java/org/openbot/env/WebRtcServer.java b/android/app/src/main/java/org/openbot/env/WebRtcServer.java index fd49e2d57..6b344ec33 100644 --- a/android/app/src/main/java/org/openbot/env/WebRtcServer.java +++ b/android/app/src/main/java/org/openbot/env/WebRtcServer.java @@ -117,6 +117,9 @@ public void setCanStart(boolean canStart) { andGate.set("can start", canStart); } + @Override + public void setServerAddress(String ip, String port) {} + @Override public void startClient() { BotToControllerEventBus.emitEvent(ConnectionUtils.createStatus("VIDEO_PROTOCOL", "WEBRTC")); diff --git a/android/app/src/main/java/org/openbot/original/CameraActivity.java b/android/app/src/main/java/org/openbot/original/CameraActivity.java index e35551b83..b862ed65a 100755 --- a/android/app/src/main/java/org/openbot/original/CameraActivity.java +++ b/android/app/src/main/java/org/openbot/original/CameraActivity.java @@ -1323,6 +1323,14 @@ private void handleControllerEvents() { controllerHandler.handleIndicatorStop(); break; + case "SPEED_UP": + controllerHandler.handleSpeedUp(); + break; + + case "SPEED_DOWN": + controllerHandler.handleSpeedDown(); + break; + case "NETWORK": controllerHandler.handleNetwork(); break; diff --git a/android/app/src/main/res/values/strings.xml b/android/app/src/main/res/values/strings.xml index 6d7877646..5ba044057 100755 --- a/android/app/src/main/res/values/strings.xml +++ b/android/app/src/main/res/values/strings.xml @@ -142,6 +142,7 @@ WebRTC RTSP + RTP diff --git a/android/app/src/main/res/xml/root_preferences.xml b/android/app/src/main/res/xml/root_preferences.xml index f4e1e168d..e0c788290 100644 --- a/android/app/src/main/res/xml/root_preferences.xml +++ b/android/app/src/main/res/xml/root_preferences.xml @@ -1,4 +1,5 @@ - + @@ -38,12 +39,33 @@ + + + diff --git a/controller/python/README.md b/controller/python/README.md index cfacd03a1..3775a260d 100644 --- a/controller/python/README.md +++ b/controller/python/README.md @@ -86,3 +86,56 @@ Here is the usage: SPACE: Toggle logging ESC: Quit ``` + +# Python Controller with Remote RTP Video Stream and TCP/IP control +This python program allows to control the robot from a [joystick controller](joystick_controller.py) (you can also make this work with the keyboard controller), and receive [video stream](camera.py) from the camera via both mobile network connection and wifi connection. This program also allows to toggle data logging and autopilot on the robot's phone remotely. The program can run on computers that are necessarily in the same network as the robot's phone. It was developed and tested on Aruidno Uno and a Linux computer. + +## Dependencies +In addition to the dependencies above, you need to first build OpenCV with GStreamer support, which will be used to decode RTP packets for video stream. Then you need to pip install the wheel file generated by the opencv build process. + +### GStreamer + +You can find instructions for installing GStreamer for your OS [here](https://gstreamer.freedesktop.org/documentation/installing/index.html). + +If you are using Mac, you can also install GStreamer using homebrew: +``` +brew install gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad gst-plugins-ugly gst-libav +``` + +### OpenCV + +You can compile and install OpenCV with GStreamer support by executing the `install_opencv.sh` script from your terminal. If you are using Windows you can probably use [WSL](https://ubuntu.com/tutorials/install-ubuntu-on-wsl2-on-windows-10) (not tested). + +``` +sh install_opencv.sh +``` + +## Network Connection +1. Go to `Settings` and select `RTP` as the `Stream Mode`. +2. Then type in your IP and port numbers for control and video stream. (Remote control uses TCP/IP, and video stream uses RTP) +3. Restart the app to load the new settings + +**If your android device uses mobile network, and connects to the PC via Internet, then you need to do port forwarding on your router for both ports of remote control and remote video stream**. **!!!This must be done before you connect the android devide with the PC!!!** + +## Using this remote controller + +First connect your joystick with the computer. + +Then, run the controller +``` +python joystick_controller.py +``` + +Once the robot app successfully connects to the Python script, then run the video stream: +``` +python camera.py +``` + +Controller mapping +``` +▲: autodrive +■: data logging +●: turn off the program +L3: speed down (press the left joystick) +R3: speed up (press the right joystick) +``` \ No newline at end of file diff --git a/controller/python/camera.py b/controller/python/camera.py new file mode 100644 index 000000000..b15603f22 --- /dev/null +++ b/controller/python/camera.py @@ -0,0 +1,45 @@ +import cv2 + + +def setup_camera(): + PORT = 8046 + return cv2.VideoCapture( + f"udpsrc port={PORT} ! application/x-rtp, media=video, encoding-name=H264 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! queue ! appsink", + cv2.CAP_GSTREAMER, + ) + + +def video_play(): + camera = setup_camera() + if not camera.isOpened(): + print("Failed to open camera") + exit + # Get some basic debug facts for the camera to check initialization + w = camera.get(cv2.CAP_PROP_FRAME_WIDTH) + h = camera.get(cv2.CAP_PROP_FRAME_HEIGHT) + fps = camera.get(cv2.CAP_PROP_FPS) + + # Show vision of 16x9 aspect ratio images stored on the phone + start_point = (0, 120) + end_point = (1280, 840) + color = (0, 255, 0) + thickness = 2 + # resize image window to make it larger, while keeping the same aspect ratio + cv2.namedWindow("OpenBot Camera stream", cv2.WINDOW_NORMAL) + cv2.resizeWindow("OpenBot Camera stream", 1536, 1152) + print("Src opened, %dx%d @ %d fps" % (w, h, fps)) + try: + while True: + ret, img = camera.read() + if ret: + img = cv2.rectangle(img, start_point, end_point, color, thickness) + cv2.imshow("OpenBot Camera stream", img) + cv2.waitKey(1) + finally: + camera.release() + print("camera release") + cv2.destroyAllWindows() + + +if __name__ == "__main__": + video_play() diff --git a/controller/python/common.py b/controller/python/common.py index 149023fe2..0491b14bc 100644 --- a/controller/python/common.py +++ b/controller/python/common.py @@ -13,11 +13,12 @@ def get_ip(): class ServerSocket: MSGLEN = 512 - def __init__(self, sock=None): + def __init__(self, sock=None, PORT=19400): + self.PORT = PORT if sock is None: self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - self.sock.bind(("0.0.0.0", 19400)) + self.sock.bind(("0.0.0.0", self.PORT)) self.sock.listen() else: diff --git a/controller/python/install_opencv.sh b/controller/python/install_opencv.sh new file mode 100644 index 000000000..3693624b1 --- /dev/null +++ b/controller/python/install_opencv.sh @@ -0,0 +1,16 @@ +#!/bin/bash +# Reference: https://discuss.bluerobotics.com/t/opencv-python-with-gstreamer-backend/8842 +BASEDIR="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +echo "Script location: ${BASEDIR}" +cd $BASEDIR/../../.. +echo "Cloning opencv-python at $PWD" +git clone --recursive https://github.com/skvark/opencv-python.git +echo "Building and installing opencv_python with GStreamer support..." +echo "This can take from 5 mins to > 2 hrs depending on your computer hardware." +cd opencv-python +export CMAKE_ARGS="-DWITH_GSTREAMER=ON" +pip install --upgrade pip wheel +pip wheel . --verbose +pip install opencv_python*.whl +# note, wheel may be generated in dist/ directory, so check there if the install fails +cd $BASEDIR \ No newline at end of file diff --git a/controller/python/joystick_controller.py b/controller/python/joystick_controller.py new file mode 100644 index 000000000..5a8190f2c --- /dev/null +++ b/controller/python/joystick_controller.py @@ -0,0 +1,185 @@ +from approxeng.input.selectbinder import ControllerResource +import sys +import threading +import json +import time +import traitlets +from common import * +from racer import OpenBotRacer +import numpy as np + +s_socket = ServerSocket(PORT=8040) # port for TCP/IP control data + +# Create publishers with the ServerSocket +racer = OpenBotRacer(s_socket) + + +class CommandHandler: + def __init__(self): + # Variable initialization + self.gear = 1 + self.autodrive = False + self.running = True + self.record = False + self.throttle = traitlets.Float() + self.steering = traitlets.Float() + + def send_command(self, command): + s_socket.send("{{command: {command} }}\n".format(command=command)) + + def reset(self): + self.send_drive_command(self.left.reset(), self.right.reset()) + + def set_steering(self, value, racer): + self.steering = value + racer.steering = self.steering + + def set_throttle(self, value, racer): + self.throttle = value + racer.throttle = self.throttle + + def stop_service(self): + if self.autodrive: + self.autodrive = not self.autodrive + self.send_command("NETWORK") + print("AUTODRIVE:" + str(self.autodrive)) + if self.record: + self.record = not self.record + self.send_command("LOGS") + print("RECORD = " + str(self.record)) + + def handle_keys(self): + while self.running: + try: + with ControllerResource() as joystick: + # t2 = int(time.time()*10**9) + print("Found a joystick 🎮 and connected") + while joystick.connected and self.running: + t1 = int(time.time() * 10**9) + # Get a corrected value for the left stick y-axis + # set the value to throttle + left_y = np.round(joystick.l[1], 2) + self.set_throttle(left_y, racer) + + # Get a corrected value for the right stick x-axis + # set the value to steering + right_x = np.round(joystick.r[0], 2) + self.set_steering(right_x, racer) + + joystick.check_presses() + # Print out any buttons that were pressed, if we had any + if joystick.has_presses: + # If home was pressed, raise a RobotStopException to bail out of the loop + # Home is generally the PS button for playstation controllers, XBox for XBox etc + if "triangle" in joystick.presses: + self.autodrive = not self.autodrive + # video_player.set_autodrive(self.autodrive) + self.send_command("NETWORK") + print("AUTODRIVE:" + str(self.autodrive)) + if "square" in joystick.presses: + self.record = not self.record + self.send_command("LOGS") + print("RECORD = " + str(self.record)) + if "ls" in joystick.presses: + self.send_command("SPEED_DOWN") + elif "rs" in joystick.presses: + self.send_command("SPEED_UP") + if "circle" in joystick.presses: + self.running = False + self.stop_service() + time.sleep(0.1) + print("Shutting Down\n") + break + except IOError: + # No joystick found, wait for a bit before trying again + self.stop_service() + print("Unable to find any joysticks") + time.sleep(10.0) + + +(zc, info) = register("OPEN_BOT_CONTROLLER", 8040) + + +def run_receiver(): + while True: + try: + data = s_socket.receive() + print(f"Received: {data}\r") + if data in ["", None]: + break + handle_status(data) + except Exception as e: + print(f"run_receiver: Got exception: {e}\r") + break + + +def handle_status(data): + json_object = json.dumps(data, indent=4) + # Writing to sample.json + with open("sample.json", "w") as outfile: + outfile.write(json_object) + + parsed_data = json.loads(data) + if not "status" in parsed_data: + return + + status = parsed_data["status"] + + try: + if "VIDEO_COMMAND" in status: + if status["VIDEO_COMMAND"] == "START": + print(f"Starting video...") + if status["VIDEO_COMMAND"] == "STOP": + print(f"Stopping video...") + + if "VIDEO_PROTOCOL" in status: + if ( + status["VIDEO_PROTOCOL"] == "WEBRTC" + or status["VIDEO_PROTOCOL"] == "RTSP" + ): + print( + "WebRTC/RTSP video not supported here. Please set your Android app to use RTP." + ) + + except Exception as e: + print(f"handle_status exception: {e}") + + +def print_usage(): + usageStr = """ + Make sure to keep the terminal window in focus!\r + + Use the following keys to drive the robot:\r + + \t▲: autodrive\r + \t■: data logging\r + \t●: turn off the program\r + \tL3: speed down (press the left joystick)\r + \tR3: speed up (press the right joystick)\r + """ + print(usageStr) + + +def run(): + print_usage() + + print("Waiting for connection...\r\n") + s_socket.accept() + print("Connected! 😃\n\r") + + t = threading.Thread(target=run_receiver, daemon=True) + t.start() + + cmd_handler = CommandHandler() + cmd_handler.handle_keys() + + s_socket.close() + zc.unregister_service(info) + zc.close() + print("Exiting...\r\n") + sys.exit() + + +if __name__ == "__main__": + # cli + run() diff --git a/controller/python/racer.py b/controller/python/racer.py new file mode 100644 index 000000000..bf61afd7c --- /dev/null +++ b/controller/python/racer.py @@ -0,0 +1,111 @@ +# ========================================== +# Name : racer.py +# Desc : Abstraction of the physical car +# setting the traits of the car corresponds +# to setting them on the physical car +# ========================================== +import traitlets + + +class Racecar(traitlets.HasTraits): + """Racecar class -> abstracted class for validating steering and throttle""" + + steering = traitlets.Float() + throttle = traitlets.Float() + # thr_scale = traitlets.Float() + + @traitlets.observe("steering") + def _clip_steering(self, proposal): + """clips the steering if it exceeds the range [-1.0,1.0] + Parameters: + ---------- + proposal : traitlets + Proposed value for the steering + Returns: + -------- + traitlets + Validated value for steering + """ + if proposal["new"] > 1.0: + return 1.0 + elif proposal["new"] < -1.0: + return -1.0 + else: + return proposal["new"] + + @traitlets.observe("throttle") + def _clip_throttle(self, proposal): + """clips the throttle if it exceeds the range [-1.0,1.0] + Parameters: + ---------- + proposal : traitlets + Proposed value for the throttle + Returns: + -------- + traitlets + Validated value for throttle + """ + if proposal["new"] > 1.0: + return 1.0 + elif proposal["new"] < -1.0: + return -1.0 + else: + return proposal["new"] + + # @traitlets.validate('thr_scale') + # def _clip_throttle(self, proposal): + # """clips the thr_scale if it exceeds the range [0.0,3.0] + # Parameters: + # ---------- + # proposal : traitlets + # Proposed value for the thr_scale + # Returns: + # -------- + # traitlets + # Validated value for thr_scale + # """ + # if proposal['value'] > 3.0: + # return 3.0 + # elif proposal['value'] < -0.1: + # return 0.0 + # else: + # return proposal['value'] + + +class OpenBotRacer(Racecar): + """OpenBotRacer Class -> inherits from Racecar and implements the publishing for steering and throttle, + provides abstraction so every function that needs to control the car can use socket to publish control commands + Attributes: + ----------- + autodrive_agent : bool + Shows where publishing is coming from (joystick or autodrive) + """ + + CONTROL_CMD = ["switch_on", "record", "autodrive", "turn", "reverse"] + + def __init__(self, socket, autodrive_agent=False, *args, **kwargs): + """ + Default init class + Args: + autodrive_agent (bool): Indicate whether the agent is in autodrive mode + """ + super(OpenBotRacer, self).__init__(*args, **kwargs) + self.autodrive_agent = autodrive_agent + self.socket = socket + + def publish_control(self, left, right): + self.socket.send("{{driveCmd: {{l:{l}, r:{r} }} }}\n".format(l=left, r=right)) + + @traitlets.observe("steering") + def _on_steering(self, change): + l = (self.throttle + change["new"]) / 2 + r = (self.throttle - change["new"]) / 2 + self.publish_control(l, r) + print("Steering:", change["new"]) + + @traitlets.observe("throttle") + def _on_throttle(self, change): + l = (change["new"] + self.steering) / 2 + r = (change["new"] - self.steering) / 2 + self.publish_control(l, r) + print("Throttle:", change["new"]) diff --git a/controller/python/requirements.txt b/controller/python/requirements.txt index bb316c3c8..66ae371d4 100644 --- a/controller/python/requirements.txt +++ b/controller/python/requirements.txt @@ -3,3 +3,5 @@ opencv-python zeroconf click black +traitlets +approxeng.input \ No newline at end of file