haixuantao
commited on
Commit
•
037f860
1
Parent(s):
3eab118
Adding S1 hack into the readme and improving navigation points
Browse files- README.md +37 -0
- graphs/dataflow_robot_vlm.yml +2 -0
- operators/constants.py +20 -0
- operators/planning_op.py +12 -13
- operators/policy.py +3 -10
README.md
CHANGED
@@ -16,6 +16,43 @@ pip install -r requirements.txt
|
|
16 |
pip install -e <PATH TO LATEST TRANSFOMERS VERSION>
|
17 |
```
|
18 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
## Robomaster Connection
|
20 |
|
21 |
Make sure to be connected using the wifi hotspot of the robomaster which is the most stable one.
|
|
|
16 |
pip install -e <PATH TO LATEST TRANSFOMERS VERSION>
|
17 |
```
|
18 |
|
19 |
+
## Robomaster Jailbreak
|
20 |
+
|
21 |
+
### Installation of the Robomaster S1 Hack
|
22 |
+
|
23 |
+
This guide is an updated version of the original [Robomaster S1 SDK Hack Guide](https://www.bug-br.org.br/s1_sdk_hack.zip) and is intended for use on a Windows 11 system.
|
24 |
+
|
25 |
+
#### Prerequisites
|
26 |
+
|
27 |
+
Before you get started, you'll need the following:
|
28 |
+
|
29 |
+
- Robomaster S1 (do not update it to the latest version, as it may block the hack).
|
30 |
+
- [Robomaster App](https://www.dji.com/fr/robomaster-s1/downloads).
|
31 |
+
- [Android SDK Platform-Tools](https://developer.android.com/tools/releases/platform-tools). Simply unzip it and keep the path handy.
|
32 |
+
- A micro USB cable. If this guide doesn't work, there might be an issue with the cable, and you may need to replace it with one that supports data transfer.
|
33 |
+
|
34 |
+
#### Instructions
|
35 |
+
|
36 |
+
1. Start the Robomaster App and connect the Robomaster S1 using one of the two options provided (via router or via Wi-Fi).
|
37 |
+
2. While connected, use a micro USB cable to connect the robot to the computer's USB port. You should hear a beep sound, similar to when you connect any device. (Please note that no other Android device should be connected via USB during this process).
|
38 |
+
3. In the Lab section of the app, create a new Python application and paste the following code:
|
39 |
+
|
40 |
+
```python
|
41 |
+
def root_me(module):
|
42 |
+
__import__ = rm_define.__dict__['__builtins__']['__import__']
|
43 |
+
return __import__(module, globals(), locals(), [], 0)
|
44 |
+
|
45 |
+
builtins = root_me('builtins')
|
46 |
+
subprocess = root_me('subprocess')
|
47 |
+
proc = subprocess.Popen('/system/bin/adb_en.sh', shell=True, executable='/system/bin/sh', stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
48 |
+
```
|
49 |
+
|
50 |
+
4. Run the code; there should be no errors, and the console should display **Execution Complete**
|
51 |
+
5. Without closing the app, navigate to the folder containing the Android SDK Platform-Tools and open a terminal inside it.
|
52 |
+
6. Run the ADP command `.\adb.exe devices `. If everything is working correctly, you should see output similar to this: ![image](https://github.com/Felixhuangsiling/Dora-Robomaster/assets/77993249/dc6368ec-052c-4b18-8fdc-0ec314adb073)
|
53 |
+
7. Execute the upload.sh script located in the folder `s1_SDK`.
|
54 |
+
8. Once everything has been executed, restart the S1 by turning it off and then back on. While it's booting up, you should hear two chimes instead of the usual single chime, indicating that the hack has been successful.
|
55 |
+
|
56 |
## Robomaster Connection
|
57 |
|
58 |
Make sure to be connected using the wifi hotspot of the robomaster which is the most stable one.
|
graphs/dataflow_robot_vlm.yml
CHANGED
@@ -52,6 +52,7 @@ nodes:
|
|
52 |
init: llm/init
|
53 |
reached_kitchen: planning/reached_kitchen
|
54 |
reached_living_room: planning/reached_living_room
|
|
|
55 |
outputs:
|
56 |
- go_to
|
57 |
- reloaded
|
@@ -70,6 +71,7 @@ nodes:
|
|
70 |
- control
|
71 |
- reached_kitchen
|
72 |
- reached_living_room
|
|
|
73 |
|
74 |
|
75 |
|
|
|
52 |
init: llm/init
|
53 |
reached_kitchen: planning/reached_kitchen
|
54 |
reached_living_room: planning/reached_living_room
|
55 |
+
reached_office: planning/reached_office
|
56 |
outputs:
|
57 |
- go_to
|
58 |
- reloaded
|
|
|
71 |
- control
|
72 |
- reached_kitchen
|
73 |
- reached_living_room
|
74 |
+
- reached_office
|
75 |
|
76 |
|
77 |
|
operators/constants.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
|
3 |
+
# LOCATION = {
|
4 |
+
# "LIVING_ROOM": np.array([[0.0, 0.3], [-1.5, 0.7]]),
|
5 |
+
# "KITCHEN": np.array([[0.0, 0.5], [1.5, -1.0]]),
|
6 |
+
# }
|
7 |
+
|
8 |
+
LOCATION = {
|
9 |
+
"HOME": {
|
10 |
+
"OFFICE": np.array([[-1.3, 0.0], [-1.3, 0.5]]),
|
11 |
+
"KITCHEN": np.array([[-1.0, -0.5], [1.5, 0.0], [1.5, 0.5]]),
|
12 |
+
"LIVING_ROOM": np.array([[0.0, 0.3], [-1.5, 0.7]]),
|
13 |
+
},
|
14 |
+
"OFFICE": {
|
15 |
+
"KITCHEN": np.array([[-1.0, -0.5], [2.5, 0.0], [2.5, 0.5]]),
|
16 |
+
},
|
17 |
+
"KITCHEN": {
|
18 |
+
"OFFICE": np.array([[2.5, 0.0], [-1.5, -0.5], [-1.5, 0.5]]),
|
19 |
+
},
|
20 |
+
}
|
operators/planning_op.py
CHANGED
@@ -2,7 +2,7 @@ import time
|
|
2 |
import numpy as np
|
3 |
import pyarrow as pa
|
4 |
from dora import DoraStatus
|
5 |
-
from constants import
|
6 |
|
7 |
CAMERA_WIDTH = 960
|
8 |
CAMERA_HEIGHT = 540
|
@@ -79,6 +79,8 @@ class Operator:
|
|
79 |
self.count = 0
|
80 |
self.completed = True
|
81 |
self.image = None
|
|
|
|
|
82 |
|
83 |
def on_event(
|
84 |
self,
|
@@ -96,10 +98,12 @@ class Operator:
|
|
96 |
if value == self.count:
|
97 |
self.completed = True
|
98 |
elif id == "set_goal":
|
99 |
-
|
|
|
100 |
|
101 |
if len(dora_event["value"]) > 0:
|
102 |
-
self.
|
|
|
103 |
|
104 |
elif id == "position":
|
105 |
print("got position:", dora_event["value"], flush=True)
|
@@ -124,15 +128,10 @@ class Operator:
|
|
124 |
and np.linalg.norm(self.waypoints[0] - np.array([x, y])) < 0.2
|
125 |
):
|
126 |
print("goal reached", flush=True)
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
send_output("reached_living_room", pa.array(self.image.ravel()))
|
132 |
-
else:
|
133 |
-
raise ValueError(
|
134 |
-
"Could not find goal reached: ", goal, "pos:", self.position
|
135 |
-
)
|
136 |
self.waypoints = None
|
137 |
return DoraStatus.CONTINUE
|
138 |
elif (
|
@@ -172,7 +171,7 @@ class Operator:
|
|
172 |
[
|
173 |
{
|
174 |
"action": "gimbal",
|
175 |
-
"value": [10.0, goal_angle],
|
176 |
"count": self.count,
|
177 |
},
|
178 |
{
|
|
|
2 |
import numpy as np
|
3 |
import pyarrow as pa
|
4 |
from dora import DoraStatus
|
5 |
+
from constants import LOCATION
|
6 |
|
7 |
CAMERA_WIDTH = 960
|
8 |
CAMERA_HEIGHT = 540
|
|
|
79 |
self.count = 0
|
80 |
self.completed = True
|
81 |
self.image = None
|
82 |
+
self.goal = ""
|
83 |
+
self.current_location = "HOME"
|
84 |
|
85 |
def on_event(
|
86 |
self,
|
|
|
98 |
if value == self.count:
|
99 |
self.completed = True
|
100 |
elif id == "set_goal":
|
101 |
+
self.goal = dora_event["value"][0].as_py()
|
102 |
+
print("got goal:", self.goal, flush=True)
|
103 |
|
104 |
if len(dora_event["value"]) > 0:
|
105 |
+
if self.goal != "":
|
106 |
+
self.waypoints = LOCATION[self.current_location][self.goal]
|
107 |
|
108 |
elif id == "position":
|
109 |
print("got position:", dora_event["value"], flush=True)
|
|
|
128 |
and np.linalg.norm(self.waypoints[0] - np.array([x, y])) < 0.2
|
129 |
):
|
130 |
print("goal reached", flush=True)
|
131 |
+
self.current_location = self.goal
|
132 |
+
send_output(
|
133 |
+
f"reached_{self.goal.lower()}", pa.array(self.image.ravel())
|
134 |
+
)
|
|
|
|
|
|
|
|
|
|
|
135 |
self.waypoints = None
|
136 |
return DoraStatus.CONTINUE
|
137 |
elif (
|
|
|
171 |
[
|
172 |
{
|
173 |
"action": "gimbal",
|
174 |
+
"value": [10.0, float(int(goal_angle))],
|
175 |
"count": self.count,
|
176 |
},
|
177 |
{
|
operators/policy.py
CHANGED
@@ -1,28 +1,21 @@
|
|
1 |
-
import numpy as np
|
2 |
import pyarrow as pa
|
3 |
from dora import DoraStatus
|
4 |
from utils import ask_vlm, speak
|
5 |
from time import sleep
|
6 |
|
7 |
-
LOCATION = ["LIVING_ROOM", "KITCHEN"]
|
8 |
|
9 |
-
|
10 |
-
## Policy Operator
|
11 |
class Operator:
|
12 |
def __init__(self):
|
|
|
13 |
pass
|
14 |
|
15 |
-
def
|
16 |
-
text = ask_vlm(image, text)
|
17 |
return text
|
18 |
|
19 |
def speak(self, text: str):
|
20 |
speak(text)
|
21 |
|
22 |
-
def check(self, image, text: str) -> bool:
|
23 |
-
text = ask_vlm(image, text)
|
24 |
-
return "Yes, " in text
|
25 |
-
|
26 |
def on_event(self, event: dict, send_output) -> DoraStatus:
|
27 |
if event["type"] == "INPUT":
|
28 |
id = event["id"]
|
|
|
|
|
1 |
import pyarrow as pa
|
2 |
from dora import DoraStatus
|
3 |
from utils import ask_vlm, speak
|
4 |
from time import sleep
|
5 |
|
|
|
6 |
|
|
|
|
|
7 |
class Operator:
|
8 |
def __init__(self):
|
9 |
+
self.location = ["KITCHEN", "LIVING_ROOM"]
|
10 |
pass
|
11 |
|
12 |
+
def ask_model(self, image, text: str) -> str:
|
13 |
+
text = ask_vlm(image, text).lower()
|
14 |
return text
|
15 |
|
16 |
def speak(self, text: str):
|
17 |
speak(text)
|
18 |
|
|
|
|
|
|
|
|
|
19 |
def on_event(self, event: dict, send_output) -> DoraStatus:
|
20 |
if event["type"] == "INPUT":
|
21 |
id = event["id"]
|