xfys commited on
Commit
47af768
1 Parent(s): c1dde4c

Upload 645 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +5 -0
  2. .github/ISSUE_TEMPLATE/bug.yml +65 -0
  3. .github/ISSUE_TEMPLATE/enhancement.yml +32 -0
  4. .github/ISSUE_TEMPLATE/question.yml +24 -0
  5. .github/workflows/ci-testing.yml +71 -0
  6. .github/workflows/stale.yml +20 -0
  7. .gitignore +11 -0
  8. .gitmodules +6 -0
  9. .idea/.gitignore +3 -0
  10. .idea/inspectionProfiles/Project_Default.xml +26 -0
  11. .idea/inspectionProfiles/profiles_settings.xml +6 -0
  12. .idea/misc.xml +4 -0
  13. .idea/modules.xml +8 -0
  14. .idea/vcs.xml +6 -0
  15. .idea/workspace.xml +181 -0
  16. .idea/yolov5_tracking-8.0.iml +12 -0
  17. LICENSE +674 -0
  18. __pycache__/track.cpython-38.pyc +0 -0
  19. demo.py +98 -0
  20. flagged/log.csv +2 -0
  21. reid_export.py +314 -0
  22. requirements.txt +0 -0
  23. test.py +17 -0
  24. test_image/FLIR.yaml +29 -0
  25. test_image/video-2SReBn5LtAkL5HMj2-frame-000317-HTgPBFgZyPdwQnNvE.jpg +0 -0
  26. test_image/video-2SReBn5LtAkL5HMj2-frame-005072-MA7NCLQGoqq9aHaiL.jpg +0 -0
  27. test_image/video-2rsjnZFyGQGeynfbv-frame-003708-6fPQbB7jtibwaYAE7.jpg +0 -0
  28. test_image/video-jNQtRj6NGycZDEXpe-frame-002515-J3YntG8ntvZheKK3P.jpg +0 -0
  29. test_image/video-kDDWXrnLSoSdHCZ7S-frame-003063-eaKjPvPskDPjenZ8S.jpg +0 -0
  30. test_image/video-r68Yr9RPWEp5fW2ZF-frame-000333-X6K5iopqbmjKEsSqN.jpg +0 -0
  31. track.py +308 -0
  32. trackers/__init__.py +0 -0
  33. trackers/__pycache__/__init__.cpython-38.pyc +0 -0
  34. trackers/__pycache__/__init__.cpython-39.pyc +0 -0
  35. trackers/__pycache__/multi_tracker_zoo.cpython-38.pyc +0 -0
  36. trackers/__pycache__/multi_tracker_zoo.cpython-39.pyc +0 -0
  37. trackers/bytetrack/__pycache__/basetrack.cpython-38.pyc +0 -0
  38. trackers/bytetrack/__pycache__/basetrack.cpython-39.pyc +0 -0
  39. trackers/bytetrack/__pycache__/byte_tracker.cpython-38.pyc +0 -0
  40. trackers/bytetrack/__pycache__/byte_tracker.cpython-39.pyc +0 -0
  41. trackers/bytetrack/__pycache__/kalman_filter.cpython-38.pyc +0 -0
  42. trackers/bytetrack/__pycache__/kalman_filter.cpython-39.pyc +0 -0
  43. trackers/bytetrack/__pycache__/matching.cpython-38.pyc +0 -0
  44. trackers/bytetrack/__pycache__/matching.cpython-39.pyc +0 -0
  45. trackers/bytetrack/basetrack.py +52 -0
  46. trackers/bytetrack/byte_tracker.py +353 -0
  47. trackers/bytetrack/kalman_filter.py +270 -0
  48. trackers/bytetrack/matching.py +180 -0
  49. trackers/multi_tracker_zoo.py +44 -0
  50. trackers/ocsort/__pycache__/association.cpython-38.pyc +0 -0
.gitattributes CHANGED
@@ -32,3 +32,8 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
35
+ video/4.mp4 filter=lfs diff=lfs merge=lfs -text
36
+ video/5.mp4 filter=lfs diff=lfs merge=lfs -text
37
+ video/9.mp4 filter=lfs diff=lfs merge=lfs -text
38
+ video/bicyclecity.mp4 filter=lfs diff=lfs merge=lfs -text
39
+ video/car.mp4 filter=lfs diff=lfs merge=lfs -text
.github/ISSUE_TEMPLATE/bug.yml ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Bug
2
+ # title: " "
3
+ description: Report a Yolov5_StrongSORT_OSNet bug
4
+ labels: [bug]
5
+ body:
6
+
7
+ - type: checkboxes
8
+ attributes:
9
+ label: Search before asking
10
+ description: >
11
+ Please search the [issues](https://github.com/mikel-brostrom/Yolov5_StrongSORT_OSNet/issues) and [discussions](https://github.com/mikel-brostrom/Yolov5_StrongSORT_OSNet/discussions) to see if a similar question already exists.
12
+ options:
13
+ - label: >
14
+ I have searched the Yolov5_StrongSORT_OSNet [issues](https://github.com/mikel-brostrom/Yolov5_StrongSORT_OSNet/issues) and [discussions](https://github.com/mikel-brostrom/Yolov5_StrongSORT_OSNet/discussions) and found no similar questions.
15
+ required: true
16
+
17
+ - type: dropdown
18
+ attributes:
19
+ label: Yolov5_StrongSORT_OSNet Component
20
+ description: |
21
+ Please select the part of Yolov5_StrongSORT_OSNet where you found the bug.
22
+ multiple: true
23
+ options:
24
+ - "Tracking"
25
+ - "Evaluation"
26
+ - "Integrations"
27
+ - "Other"
28
+ validations:
29
+ required: false
30
+
31
+ - type: textarea
32
+ attributes:
33
+ label: Bug
34
+ description: Provide console output with error messages and/or screenshots of the bug.
35
+ placeholder: |
36
+ 💡 ProTip! Include as much information as possible (screenshots, logs, tracebacks etc.) to receive the most helpful response.
37
+ validations:
38
+ required: true
39
+
40
+ - type: textarea
41
+ attributes:
42
+ label: Environment
43
+ description: Please specify the software and hardware you used to produce the bug.
44
+ placeholder: |
45
+ - YOLO: YOLOv5 🚀 v6.0-67-g60e42e1 torch 1.9.0+cu111 CUDA:0 (A100-SXM4-40GB, 40536MiB)
46
+ - StrongSORT: osnet_x0_25
47
+ - OS: Ubuntu 20.04
48
+ - Python: 3.9.0
49
+ - ...
50
+ validations:
51
+ required: false
52
+
53
+ - type: textarea
54
+ attributes:
55
+ label: Minimal Reproducible Example
56
+ description: >
57
+ When asking a question, people will be better able to provide help if you provide code that they can easily understand and use to **reproduce** the problem.
58
+ This is referred to by community members as creating a [minimal reproducible example](https://stackoverflow.com/help/minimal-reproducible-example).
59
+ placeholder: |
60
+ ```
61
+ # Code to reproduce your issue here
62
+ ```
63
+ validations:
64
+ required: false
65
+
.github/ISSUE_TEMPLATE/enhancement.yml ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Enhancement
2
+ description: Suggest a Yolov5_StrongSORT_OSNet enhancement
3
+ # title: " "
4
+ labels: [enhancement]
5
+ body:
6
+
7
+ - type: checkboxes
8
+ attributes:
9
+ label: Search before asking
10
+ description: >
11
+ Please search the [issues](https://github.com/mikel-brostrom/Yolov5_StrongSORT_OSNet/issues) to see if a similar enhancement request already exists.
12
+ options:
13
+ - label: >
14
+ I have searched the YOLOv5 [issues](https://github.com/mikel-brostrom/Yolov5_StrongSORT_OSNet/issues) and found no similar enhancement requests.
15
+ required: true
16
+
17
+ - type: textarea
18
+ attributes:
19
+ label: Description
20
+ description: A short description of your enhancement.
21
+ placeholder: |
22
+ What new enhancement would you like to see in Yolov5_StrongSORT_OSNet?
23
+ validations:
24
+ required: true
25
+
26
+ - type: textarea
27
+ attributes:
28
+ label: Use case
29
+ description: |
30
+ Describe the use case of your feature request. It will help us understand and prioritize the feature request.
31
+ placeholder: |
32
+ How would this feature be used, and who would use it?
.github/ISSUE_TEMPLATE/question.yml ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Question
2
+ description: Ask a Yolov5_StrongSORT_OSNet question
3
+ # title: " "
4
+ labels: [question]
5
+ body:
6
+
7
+ - type: checkboxes
8
+ attributes:
9
+ label: Search before asking
10
+ description: >
11
+ Please search the [issues](https://github.com/mikel-brostrom/Yolov5_StrongSORT_OSNet/issues) to see if a similar question already exists.
12
+ options:
13
+ - label: >
14
+ I have searched the Yolov5_StrongSORT_OSNet [issues](https://github.com/mikel-brostrom/Yolov5_StrongSORT_OSNet/issues) and found no similar bug report.
15
+ required: true
16
+
17
+ - type: textarea
18
+ attributes:
19
+ label: Question
20
+ description: What is your question?
21
+ placeholder: |
22
+ 💡 ProTip! Include as much information as possible (screenshots, logs, tracebacks etc.) to receive the most helpful response.
23
+ validations:
24
+ required: true
.github/workflows/ci-testing.yml ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: CI CPU testing
2
+
3
+ on: # https://help.github.com/en/actions/reference/events-that-trigger-workflows
4
+ push:
5
+ branches: [master, CIdebug]
6
+ pull_request:
7
+ # The branches below must be a subset of the branches above
8
+ branches: [master, CIdebug]
9
+
10
+ jobs:
11
+ cpu-tests:
12
+
13
+ runs-on: ${{ matrix.os }}
14
+ strategy:
15
+ fail-fast: false
16
+ matrix:
17
+ os: [ubuntu-latest, windows-latest] # Error: Container action is only supported on Linux
18
+ python-version: [3.9]
19
+ model: ['yolov5s'] # models to test
20
+
21
+ # Timeout: https://stackoverflow.com/a/59076067/4521646
22
+ timeout-minutes: 50
23
+ steps:
24
+ - name: Set up Repository
25
+ uses: actions/checkout@v2
26
+ with:
27
+ submodules: recursive
28
+ - name: Set up Python ${{ matrix.python-version }}
29
+ uses: actions/setup-python@v2
30
+ with:
31
+ python-version: ${{ matrix.python-version }}
32
+
33
+ # Note: This uses an internal pip API and may not always work
34
+ # https://github.com/actions/cache/blob/master/examples.md#multiple-oss-in-a-workflow
35
+ - name: Get pip cache
36
+ id: pip-cache
37
+ run: |
38
+ python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)"
39
+ - name: Cache pip
40
+ uses: actions/cache@v1
41
+ with:
42
+ path: ${{ steps.pip-cache.outputs.dir }}
43
+ key: ${{ runner.os }}-${{ matrix.python-version }}-pip-${{ hashFiles('requirements.txt') }}
44
+ restore-keys: |
45
+ ${{ runner.os }}-${{ matrix.python-version }}-pip-
46
+ - name: Install dependencies
47
+ run: |
48
+ python -m pip install --upgrade pip setuptools wheel
49
+ # ImportError: lap requires numpy, please "pip install numpy".
50
+ # ImportError: lap requires Cython, please "pip install Cython".
51
+ pip install numpy Cython
52
+ pip install -qr requirements.txt -f https://download.pytorch.org/whl/cpu/torch_stable.html
53
+ pip install requests
54
+ python --version
55
+ pip --version
56
+ pip list
57
+ cd
58
+ shell: bash
59
+ - name: Tests workflow
60
+ run: |
61
+ # STRONGSORT
62
+ python track.py --tracking-method strongsort --yolo-weights ./weights/yolov5n.pt --source yolov5/data/images/bus.jpg
63
+ # OCSORT
64
+ python track.py --tracking-method ocsort --yolo-weights ./weights/yolov5n.pt --source yolov5/data/images/bus.jpg
65
+ # BYTETRACK
66
+ python track.py --tracking-method bytetrack --yolo-weights ./weights/yolov5n.pt --source yolov5/data/images/bus.jpg
67
+ # EXPORT
68
+ python reid_export.py --weights ./weights/osnet_x0_25_msmt17.pt # export deafults to torchscript
69
+ # STRONGSORT w. EXPORTED REID MODEL
70
+ python track.py --reid-weights ./weights/osnet_x0_25_msmt17.torchscript --source yolov5/data/images/bus.jpg
71
+ shell: bash
.github/workflows/stale.yml ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Close stale issues
2
+ on:
3
+ schedule:
4
+ - cron: "0 0 * * *" # At the end of every day
5
+
6
+ jobs:
7
+ stale:
8
+ runs-on: ubuntu-latest
9
+ steps:
10
+ - uses: actions/stale@v3
11
+ with:
12
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
13
+ stale-issue-message: |
14
+ 👋 Hello, this issue has been automatically marked as stale because it has not had recent activity. Please note it will be closed if no further activity occurs.
15
+ Feel free to inform us of any other **issues** you discover or **feature requests** that come to mind in the future. Pull Requests (PRs) are also always welcomed!
16
+ stale-pr-message: 'This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs.'
17
+ days-before-stale: 30
18
+ days-before-close: 5
19
+ exempt-issue-labels: 'documentation,tutorial'
20
+ operations-per-run: 100 # The maximum number of operations per run, used to control rate limiting.
.gitignore ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .vscode/
2
+
3
+ # interpreter bytecode
4
+ __pycache__/
5
+
6
+ # exports
7
+ *_openvino_model
8
+ *.torchscript
9
+ *.pt
10
+ *.onnx
11
+ *.engine
.gitmodules ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ [submodule "yolov5"]
2
+ path = yolov5
3
+ url = https://github.com/ultralytics/yolov5.git
4
+ [submodule "trackers/strong_sort/deep/reid"]
5
+ path = trackers/strong_sort/deep/reid
6
+ url = https://github.com/KaiyangZhou/deep-person-reid
.idea/.gitignore ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ # 默认忽略的文件
2
+ /shelf/
3
+ /workspace.xml
.idea/inspectionProfiles/Project_Default.xml ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <component name="InspectionProjectProfileManager">
2
+ <profile version="1.0">
3
+ <option name="myName" value="Project Default" />
4
+ <inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
5
+ <option name="ignoredPackages">
6
+ <value>
7
+ <list size="6">
8
+ <item index="0" class="java.lang.String" itemvalue="filterpy" />
9
+ <item index="1" class="java.lang.String" itemvalue="motmetrics" />
10
+ <item index="2" class="java.lang.String" itemvalue="lap" />
11
+ <item index="3" class="java.lang.String" itemvalue="yolov5" />
12
+ <item index="4" class="java.lang.String" itemvalue="torchreid" />
13
+ <item index="5" class="java.lang.String" itemvalue="strongsort" />
14
+ </list>
15
+ </value>
16
+ </option>
17
+ </inspection_tool>
18
+ <inspection_tool class="PyPep8NamingInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
19
+ <option name="ignoredErrors">
20
+ <list>
21
+ <option value="N806" />
22
+ </list>
23
+ </option>
24
+ </inspection_tool>
25
+ </profile>
26
+ </component>
.idea/inspectionProfiles/profiles_settings.xml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <component name="InspectionProjectProfileManager">
2
+ <settings>
3
+ <option name="USE_PROJECT_PROFILE" value="false" />
4
+ <version value="1.0" />
5
+ </settings>
6
+ </component>
.idea/misc.xml ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (yolov5deepsort)" project-jdk-type="Python SDK" />
4
+ </project>
.idea/modules.xml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="ProjectModuleManager">
4
+ <modules>
5
+ <module fileurl="file://$PROJECT_DIR$/.idea/yolov5_tracking-8.0.iml" filepath="$PROJECT_DIR$/.idea/yolov5_tracking-8.0.iml" />
6
+ </modules>
7
+ </component>
8
+ </project>
.idea/vcs.xml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="VcsDirectoryMappings">
4
+ <mapping directory="$PROJECT_DIR$/val_utils" vcs="Git" />
5
+ </component>
6
+ </project>
.idea/workspace.xml ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="ChangeListManager">
4
+ <list default="true" id="22cd1d58-ba55-443f-bf91-c1fd073300cb" name="变更" comment="" />
5
+ <option name="SHOW_DIALOG" value="false" />
6
+ <option name="HIGHLIGHT_CONFLICTS" value="true" />
7
+ <option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
8
+ <option name="LAST_RESOLUTION" value="IGNORE" />
9
+ </component>
10
+ <component name="FileTemplateManagerImpl">
11
+ <option name="RECENT_TEMPLATES">
12
+ <list>
13
+ <option value="Python Script" />
14
+ </list>
15
+ </option>
16
+ </component>
17
+ <component name="Git.Settings">
18
+ <option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$/val_utils" />
19
+ </component>
20
+ <component name="GitSEFilterConfiguration">
21
+ <file-type-list>
22
+ <filtered-out-file-type name="LOCAL_BRANCH" />
23
+ <filtered-out-file-type name="REMOTE_BRANCH" />
24
+ <filtered-out-file-type name="TAG" />
25
+ <filtered-out-file-type name="COMMIT_BY_MESSAGE" />
26
+ </file-type-list>
27
+ </component>
28
+ <component name="MarkdownSettingsMigration">
29
+ <option name="stateVersion" value="1" />
30
+ </component>
31
+ <component name="ProjectId" id="2Q0TDjaIWsZllNAxRhm630GOlwY" />
32
+ <component name="ProjectLevelVcsManager" settingsEditedManually="true" />
33
+ <component name="ProjectViewState">
34
+ <option name="hideEmptyMiddlePackages" value="true" />
35
+ <option name="showLibraryContents" value="true" />
36
+ </component>
37
+ <component name="PropertiesComponent">
38
+ <property name="RunOnceActivity.OpenProjectViewOnStart" value="true" />
39
+ <property name="RunOnceActivity.ShowReadmeOnStart" value="true" />
40
+ <property name="last_opened_file_path" value="$PROJECT_DIR$/weights" />
41
+ <property name="settings.editor.selected.configurable" value="com.jetbrains.python.configuration.PyActiveSdkModuleConfigurable" />
42
+ </component>
43
+ <component name="RecentsManager">
44
+ <key name="CopyFile.RECENT_KEYS">
45
+ <recent name="E:\github\tracker\yolov5_tracking-8.0\weights" />
46
+ <recent name="E:\github\tracker\yolov5_tracking-8.0\yolov5" />
47
+ <recent name="E:\github\tracker\yolov5_tracking-8.0\video" />
48
+ <recent name="E:\github\tracker\yolov5_tracking-8.0" />
49
+ </key>
50
+ <key name="MoveFile.RECENT_KEYS">
51
+ <recent name="E:\github\tracker\yolov5_tracking-8.0" />
52
+ <recent name="E:\github\tracker\yolov5_tracking-8.0\wieghts" />
53
+ </key>
54
+ </component>
55
+ <component name="RunManager" selected="Python.demo">
56
+ <configuration name="demo" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
57
+ <module name="yolov5_tracking-8.0" />
58
+ <option name="INTERPRETER_OPTIONS" value="" />
59
+ <option name="PARENT_ENVS" value="true" />
60
+ <envs>
61
+ <env name="PYTHONUNBUFFERED" value="1" />
62
+ </envs>
63
+ <option name="SDK_HOME" value="" />
64
+ <option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
65
+ <option name="IS_MODULE_SDK" value="true" />
66
+ <option name="ADD_CONTENT_ROOTS" value="true" />
67
+ <option name="ADD_SOURCE_ROOTS" value="true" />
68
+ <option name="SCRIPT_NAME" value="$PROJECT_DIR$/demo.py" />
69
+ <option name="PARAMETERS" value="" />
70
+ <option name="SHOW_COMMAND_LINE" value="false" />
71
+ <option name="EMULATE_TERMINAL" value="false" />
72
+ <option name="MODULE_MODE" value="false" />
73
+ <option name="REDIRECT_INPUT" value="false" />
74
+ <option name="INPUT_FILE" value="" />
75
+ <method v="2" />
76
+ </configuration>
77
+ <configuration name="demo2" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
78
+ <module name="yolov5_tracking-8.0" />
79
+ <option name="INTERPRETER_OPTIONS" value="" />
80
+ <option name="PARENT_ENVS" value="true" />
81
+ <envs>
82
+ <env name="PYTHONUNBUFFERED" value="1" />
83
+ </envs>
84
+ <option name="SDK_HOME" value="" />
85
+ <option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
86
+ <option name="IS_MODULE_SDK" value="true" />
87
+ <option name="ADD_CONTENT_ROOTS" value="true" />
88
+ <option name="ADD_SOURCE_ROOTS" value="true" />
89
+ <option name="SCRIPT_NAME" value="$PROJECT_DIR$/demo2.py" />
90
+ <option name="PARAMETERS" value="" />
91
+ <option name="SHOW_COMMAND_LINE" value="false" />
92
+ <option name="EMULATE_TERMINAL" value="false" />
93
+ <option name="MODULE_MODE" value="false" />
94
+ <option name="REDIRECT_INPUT" value="false" />
95
+ <option name="INPUT_FILE" value="" />
96
+ <method v="2" />
97
+ </configuration>
98
+ <configuration name="demo" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
99
+ <module name="yolov5_tracking-8.0" />
100
+ <option name="INTERPRETER_OPTIONS" value="" />
101
+ <option name="PARENT_ENVS" value="true" />
102
+ <envs>
103
+ <env name="PYTHONUNBUFFERED" value="1" />
104
+ </envs>
105
+ <option name="SDK_HOME" value="" />
106
+ <option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
107
+ <option name="IS_MODULE_SDK" value="true" />
108
+ <option name="ADD_CONTENT_ROOTS" value="true" />
109
+ <option name="ADD_SOURCE_ROOTS" value="true" />
110
+ <option name="SCRIPT_NAME" value="E:\github\tracker\yolov5_tracking-8.0\demo.py" />
111
+ <option name="PARAMETERS" value="" />
112
+ <option name="SHOW_COMMAND_LINE" value="false" />
113
+ <option name="EMULATE_TERMINAL" value="false" />
114
+ <option name="MODULE_MODE" value="false" />
115
+ <option name="REDIRECT_INPUT" value="false" />
116
+ <option name="INPUT_FILE" value="" />
117
+ <method v="2" />
118
+ </configuration>
119
+ <configuration name="test" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
120
+ <module name="yolov5_tracking-8.0" />
121
+ <option name="INTERPRETER_OPTIONS" value="" />
122
+ <option name="PARENT_ENVS" value="true" />
123
+ <envs>
124
+ <env name="PYTHONUNBUFFERED" value="1" />
125
+ </envs>
126
+ <option name="SDK_HOME" value="" />
127
+ <option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
128
+ <option name="IS_MODULE_SDK" value="true" />
129
+ <option name="ADD_CONTENT_ROOTS" value="true" />
130
+ <option name="ADD_SOURCE_ROOTS" value="true" />
131
+ <option name="SCRIPT_NAME" value="$PROJECT_DIR$/test.py" />
132
+ <option name="PARAMETERS" value="" />
133
+ <option name="SHOW_COMMAND_LINE" value="false" />
134
+ <option name="EMULATE_TERMINAL" value="false" />
135
+ <option name="MODULE_MODE" value="false" />
136
+ <option name="REDIRECT_INPUT" value="false" />
137
+ <option name="INPUT_FILE" value="" />
138
+ <method v="2" />
139
+ </configuration>
140
+ <configuration name="test1" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
141
+ <module name="yolov5_tracking-8.0" />
142
+ <option name="INTERPRETER_OPTIONS" value="" />
143
+ <option name="PARENT_ENVS" value="true" />
144
+ <envs>
145
+ <env name="PYTHONUNBUFFERED" value="1" />
146
+ </envs>
147
+ <option name="SDK_HOME" value="" />
148
+ <option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
149
+ <option name="IS_MODULE_SDK" value="true" />
150
+ <option name="ADD_CONTENT_ROOTS" value="true" />
151
+ <option name="ADD_SOURCE_ROOTS" value="true" />
152
+ <option name="SCRIPT_NAME" value="$PROJECT_DIR$/test1.py" />
153
+ <option name="PARAMETERS" value="" />
154
+ <option name="SHOW_COMMAND_LINE" value="false" />
155
+ <option name="EMULATE_TERMINAL" value="false" />
156
+ <option name="MODULE_MODE" value="false" />
157
+ <option name="REDIRECT_INPUT" value="false" />
158
+ <option name="INPUT_FILE" value="" />
159
+ <method v="2" />
160
+ </configuration>
161
+ <recent_temporary>
162
+ <list>
163
+ <item itemvalue="Python.demo2" />
164
+ <item itemvalue="Python.demo" />
165
+ <item itemvalue="Python.test1" />
166
+ <item itemvalue="Python.test" />
167
+ </list>
168
+ </recent_temporary>
169
+ </component>
170
+ <component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="应用程序级" UseSingleDictionary="true" transferred="true" />
171
+ <component name="TaskManager">
172
+ <task active="true" id="Default" summary="默认任务">
173
+ <changelist id="22cd1d58-ba55-443f-bf91-c1fd073300cb" name="变更" comment="" />
174
+ <created>1684489383896</created>
175
+ <option name="number" value="Default" />
176
+ <option name="presentableId" value="Default" />
177
+ <updated>1684489383896</updated>
178
+ </task>
179
+ <servers />
180
+ </component>
181
+ </project>
.idea/yolov5_tracking-8.0.iml ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <module type="PYTHON_MODULE" version="4">
3
+ <component name="NewModuleRootManager">
4
+ <content url="file://$MODULE_DIR$" />
5
+ <orderEntry type="jdk" jdkName="Python 3.8 (yolov5deepsort)" jdkType="Python SDK" />
6
+ <orderEntry type="sourceFolder" forTests="false" />
7
+ </component>
8
+ <component name="PyDocumentationSettings">
9
+ <option name="format" value="PLAIN" />
10
+ <option name="myDocStringFormat" value="Plain" />
11
+ </component>
12
+ </module>
LICENSE ADDED
@@ -0,0 +1,674 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ GNU GENERAL PUBLIC LICENSE
2
+ Version 3, 29 June 2007
3
+
4
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
5
+ Everyone is permitted to copy and distribute verbatim copies
6
+ of this license document, but changing it is not allowed.
7
+
8
+ Preamble
9
+
10
+ The GNU General Public License is a free, copyleft license for
11
+ software and other kinds of works.
12
+
13
+ The licenses for most software and other practical works are designed
14
+ to take away your freedom to share and change the works. By contrast,
15
+ the GNU General Public License is intended to guarantee your freedom to
16
+ share and change all versions of a program--to make sure it remains free
17
+ software for all its users. We, the Free Software Foundation, use the
18
+ GNU General Public License for most of our software; it applies also to
19
+ any other work released this way by its authors. You can apply it to
20
+ your programs, too.
21
+
22
+ When we speak of free software, we are referring to freedom, not
23
+ price. Our General Public Licenses are designed to make sure that you
24
+ have the freedom to distribute copies of free software (and charge for
25
+ them if you wish), that you receive source code or can get it if you
26
+ want it, that you can change the software or use pieces of it in new
27
+ free programs, and that you know you can do these things.
28
+
29
+ To protect your rights, we need to prevent others from denying you
30
+ these rights or asking you to surrender the rights. Therefore, you have
31
+ certain responsibilities if you distribute copies of the software, or if
32
+ you modify it: responsibilities to respect the freedom of others.
33
+
34
+ For example, if you distribute copies of such a program, whether
35
+ gratis or for a fee, you must pass on to the recipients the same
36
+ freedoms that you received. You must make sure that they, too, receive
37
+ or can get the source code. And you must show them these terms so they
38
+ know their rights.
39
+
40
+ Developers that use the GNU GPL protect your rights with two steps:
41
+ (1) assert copyright on the software, and (2) offer you this License
42
+ giving you legal permission to copy, distribute and/or modify it.
43
+
44
+ For the developers' and authors' protection, the GPL clearly explains
45
+ that there is no warranty for this free software. For both users' and
46
+ authors' sake, the GPL requires that modified versions be marked as
47
+ changed, so that their problems will not be attributed erroneously to
48
+ authors of previous versions.
49
+
50
+ Some devices are designed to deny users access to install or run
51
+ modified versions of the software inside them, although the manufacturer
52
+ can do so. This is fundamentally incompatible with the aim of
53
+ protecting users' freedom to change the software. The systematic
54
+ pattern of such abuse occurs in the area of products for individuals to
55
+ use, which is precisely where it is most unacceptable. Therefore, we
56
+ have designed this version of the GPL to prohibit the practice for those
57
+ products. If such problems arise substantially in other domains, we
58
+ stand ready to extend this provision to those domains in future versions
59
+ of the GPL, as needed to protect the freedom of users.
60
+
61
+ Finally, every program is threatened constantly by software patents.
62
+ States should not allow patents to restrict development and use of
63
+ software on general-purpose computers, but in those that do, we wish to
64
+ avoid the special danger that patents applied to a free program could
65
+ make it effectively proprietary. To prevent this, the GPL assures that
66
+ patents cannot be used to render the program non-free.
67
+
68
+ The precise terms and conditions for copying, distribution and
69
+ modification follow.
70
+
71
+ TERMS AND CONDITIONS
72
+
73
+ 0. Definitions.
74
+
75
+ "This License" refers to version 3 of the GNU General Public License.
76
+
77
+ "Copyright" also means copyright-like laws that apply to other kinds of
78
+ works, such as semiconductor masks.
79
+
80
+ "The Program" refers to any copyrightable work licensed under this
81
+ License. Each licensee is addressed as "you". "Licensees" and
82
+ "recipients" may be individuals or organizations.
83
+
84
+ To "modify" a work means to copy from or adapt all or part of the work
85
+ in a fashion requiring copyright permission, other than the making of an
86
+ exact copy. The resulting work is called a "modified version" of the
87
+ earlier work or a work "based on" the earlier work.
88
+
89
+ A "covered work" means either the unmodified Program or a work based
90
+ on the Program.
91
+
92
+ To "propagate" a work means to do anything with it that, without
93
+ permission, would make you directly or secondarily liable for
94
+ infringement under applicable copyright law, except executing it on a
95
+ computer or modifying a private copy. Propagation includes copying,
96
+ distribution (with or without modification), making available to the
97
+ public, and in some countries other activities as well.
98
+
99
+ To "convey" a work means any kind of propagation that enables other
100
+ parties to make or receive copies. Mere interaction with a user through
101
+ a computer network, with no transfer of a copy, is not conveying.
102
+
103
+ An interactive user interface displays "Appropriate Legal Notices"
104
+ to the extent that it includes a convenient and prominently visible
105
+ feature that (1) displays an appropriate copyright notice, and (2)
106
+ tells the user that there is no warranty for the work (except to the
107
+ extent that warranties are provided), that licensees may convey the
108
+ work under this License, and how to view a copy of this License. If
109
+ the interface presents a list of user commands or options, such as a
110
+ menu, a prominent item in the list meets this criterion.
111
+
112
+ 1. Source Code.
113
+
114
+ The "source code" for a work means the preferred form of the work
115
+ for making modifications to it. "Object code" means any non-source
116
+ form of a work.
117
+
118
+ A "Standard Interface" means an interface that either is an official
119
+ standard defined by a recognized standards body, or, in the case of
120
+ interfaces specified for a particular programming language, one that
121
+ is widely used among developers working in that language.
122
+
123
+ The "System Libraries" of an executable work include anything, other
124
+ than the work as a whole, that (a) is included in the normal form of
125
+ packaging a Major Component, but which is not part of that Major
126
+ Component, and (b) serves only to enable use of the work with that
127
+ Major Component, or to implement a Standard Interface for which an
128
+ implementation is available to the public in source code form. A
129
+ "Major Component", in this context, means a major essential component
130
+ (kernel, window system, and so on) of the specific operating system
131
+ (if any) on which the executable work runs, or a compiler used to
132
+ produce the work, or an object code interpreter used to run it.
133
+
134
+ The "Corresponding Source" for a work in object code form means all
135
+ the source code needed to generate, install, and (for an executable
136
+ work) run the object code and to modify the work, including scripts to
137
+ control those activities. However, it does not include the work's
138
+ System Libraries, or general-purpose tools or generally available free
139
+ programs which are used unmodified in performing those activities but
140
+ which are not part of the work. For example, Corresponding Source
141
+ includes interface definition files associated with source files for
142
+ the work, and the source code for shared libraries and dynamically
143
+ linked subprograms that the work is specifically designed to require,
144
+ such as by intimate data communication or control flow between those
145
+ subprograms and other parts of the work.
146
+
147
+ The Corresponding Source need not include anything that users
148
+ can regenerate automatically from other parts of the Corresponding
149
+ Source.
150
+
151
+ The Corresponding Source for a work in source code form is that
152
+ same work.
153
+
154
+ 2. Basic Permissions.
155
+
156
+ All rights granted under this License are granted for the term of
157
+ copyright on the Program, and are irrevocable provided the stated
158
+ conditions are met. This License explicitly affirms your unlimited
159
+ permission to run the unmodified Program. The output from running a
160
+ covered work is covered by this License only if the output, given its
161
+ content, constitutes a covered work. This License acknowledges your
162
+ rights of fair use or other equivalent, as provided by copyright law.
163
+
164
+ You may make, run and propagate covered works that you do not
165
+ convey, without conditions so long as your license otherwise remains
166
+ in force. You may convey covered works to others for the sole purpose
167
+ of having them make modifications exclusively for you, or provide you
168
+ with facilities for running those works, provided that you comply with
169
+ the terms of this License in conveying all material for which you do
170
+ not control copyright. Those thus making or running the covered works
171
+ for you must do so exclusively on your behalf, under your direction
172
+ and control, on terms that prohibit them from making any copies of
173
+ your copyrighted material outside their relationship with you.
174
+
175
+ Conveying under any other circumstances is permitted solely under
176
+ the conditions stated below. Sublicensing is not allowed; section 10
177
+ makes it unnecessary.
178
+
179
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180
+
181
+ No covered work shall be deemed part of an effective technological
182
+ measure under any applicable law fulfilling obligations under article
183
+ 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184
+ similar laws prohibiting or restricting circumvention of such
185
+ measures.
186
+
187
+ When you convey a covered work, you waive any legal power to forbid
188
+ circumvention of technological measures to the extent such circumvention
189
+ is effected by exercising rights under this License with respect to
190
+ the covered work, and you disclaim any intention to limit operation or
191
+ modification of the work as a means of enforcing, against the work's
192
+ users, your or third parties' legal rights to forbid circumvention of
193
+ technological measures.
194
+
195
+ 4. Conveying Verbatim Copies.
196
+
197
+ You may convey verbatim copies of the Program's source code as you
198
+ receive it, in any medium, provided that you conspicuously and
199
+ appropriately publish on each copy an appropriate copyright notice;
200
+ keep intact all notices stating that this License and any
201
+ non-permissive terms added in accord with section 7 apply to the code;
202
+ keep intact all notices of the absence of any warranty; and give all
203
+ recipients a copy of this License along with the Program.
204
+
205
+ You may charge any price or no price for each copy that you convey,
206
+ and you may offer support or warranty protection for a fee.
207
+
208
+ 5. Conveying Modified Source Versions.
209
+
210
+ You may convey a work based on the Program, or the modifications to
211
+ produce it from the Program, in the form of source code under the
212
+ terms of section 4, provided that you also meet all of these conditions:
213
+
214
+ a) The work must carry prominent notices stating that you modified
215
+ it, and giving a relevant date.
216
+
217
+ b) The work must carry prominent notices stating that it is
218
+ released under this License and any conditions added under section
219
+ 7. This requirement modifies the requirement in section 4 to
220
+ "keep intact all notices".
221
+
222
+ c) You must license the entire work, as a whole, under this
223
+ License to anyone who comes into possession of a copy. This
224
+ License will therefore apply, along with any applicable section 7
225
+ additional terms, to the whole of the work, and all its parts,
226
+ regardless of how they are packaged. This License gives no
227
+ permission to license the work in any other way, but it does not
228
+ invalidate such permission if you have separately received it.
229
+
230
+ d) If the work has interactive user interfaces, each must display
231
+ Appropriate Legal Notices; however, if the Program has interactive
232
+ interfaces that do not display Appropriate Legal Notices, your
233
+ work need not make them do so.
234
+
235
+ A compilation of a covered work with other separate and independent
236
+ works, which are not by their nature extensions of the covered work,
237
+ and which are not combined with it such as to form a larger program,
238
+ in or on a volume of a storage or distribution medium, is called an
239
+ "aggregate" if the compilation and its resulting copyright are not
240
+ used to limit the access or legal rights of the compilation's users
241
+ beyond what the individual works permit. Inclusion of a covered work
242
+ in an aggregate does not cause this License to apply to the other
243
+ parts of the aggregate.
244
+
245
+ 6. Conveying Non-Source Forms.
246
+
247
+ You may convey a covered work in object code form under the terms
248
+ of sections 4 and 5, provided that you also convey the
249
+ machine-readable Corresponding Source under the terms of this License,
250
+ in one of these ways:
251
+
252
+ a) Convey the object code in, or embodied in, a physical product
253
+ (including a physical distribution medium), accompanied by the
254
+ Corresponding Source fixed on a durable physical medium
255
+ customarily used for software interchange.
256
+
257
+ b) Convey the object code in, or embodied in, a physical product
258
+ (including a physical distribution medium), accompanied by a
259
+ written offer, valid for at least three years and valid for as
260
+ long as you offer spare parts or customer support for that product
261
+ model, to give anyone who possesses the object code either (1) a
262
+ copy of the Corresponding Source for all the software in the
263
+ product that is covered by this License, on a durable physical
264
+ medium customarily used for software interchange, for a price no
265
+ more than your reasonable cost of physically performing this
266
+ conveying of source, or (2) access to copy the
267
+ Corresponding Source from a network server at no charge.
268
+
269
+ c) Convey individual copies of the object code with a copy of the
270
+ written offer to provide the Corresponding Source. This
271
+ alternative is allowed only occasionally and noncommercially, and
272
+ only if you received the object code with such an offer, in accord
273
+ with subsection 6b.
274
+
275
+ d) Convey the object code by offering access from a designated
276
+ place (gratis or for a charge), and offer equivalent access to the
277
+ Corresponding Source in the same way through the same place at no
278
+ further charge. You need not require recipients to copy the
279
+ Corresponding Source along with the object code. If the place to
280
+ copy the object code is a network server, the Corresponding Source
281
+ may be on a different server (operated by you or a third party)
282
+ that supports equivalent copying facilities, provided you maintain
283
+ clear directions next to the object code saying where to find the
284
+ Corresponding Source. Regardless of what server hosts the
285
+ Corresponding Source, you remain obligated to ensure that it is
286
+ available for as long as needed to satisfy these requirements.
287
+
288
+ e) Convey the object code using peer-to-peer transmission, provided
289
+ you inform other peers where the object code and Corresponding
290
+ Source of the work are being offered to the general public at no
291
+ charge under subsection 6d.
292
+
293
+ A separable portion of the object code, whose source code is excluded
294
+ from the Corresponding Source as a System Library, need not be
295
+ included in conveying the object code work.
296
+
297
+ A "User Product" is either (1) a "consumer product", which means any
298
+ tangible personal property which is normally used for personal, family,
299
+ or household purposes, or (2) anything designed or sold for incorporation
300
+ into a dwelling. In determining whether a product is a consumer product,
301
+ doubtful cases shall be resolved in favor of coverage. For a particular
302
+ product received by a particular user, "normally used" refers to a
303
+ typical or common use of that class of product, regardless of the status
304
+ of the particular user or of the way in which the particular user
305
+ actually uses, or expects or is expected to use, the product. A product
306
+ is a consumer product regardless of whether the product has substantial
307
+ commercial, industrial or non-consumer uses, unless such uses represent
308
+ the only significant mode of use of the product.
309
+
310
+ "Installation Information" for a User Product means any methods,
311
+ procedures, authorization keys, or other information required to install
312
+ and execute modified versions of a covered work in that User Product from
313
+ a modified version of its Corresponding Source. The information must
314
+ suffice to ensure that the continued functioning of the modified object
315
+ code is in no case prevented or interfered with solely because
316
+ modification has been made.
317
+
318
+ If you convey an object code work under this section in, or with, or
319
+ specifically for use in, a User Product, and the conveying occurs as
320
+ part of a transaction in which the right of possession and use of the
321
+ User Product is transferred to the recipient in perpetuity or for a
322
+ fixed term (regardless of how the transaction is characterized), the
323
+ Corresponding Source conveyed under this section must be accompanied
324
+ by the Installation Information. But this requirement does not apply
325
+ if neither you nor any third party retains the ability to install
326
+ modified object code on the User Product (for example, the work has
327
+ been installed in ROM).
328
+
329
+ The requirement to provide Installation Information does not include a
330
+ requirement to continue to provide support service, warranty, or updates
331
+ for a work that has been modified or installed by the recipient, or for
332
+ the User Product in which it has been modified or installed. Access to a
333
+ network may be denied when the modification itself materially and
334
+ adversely affects the operation of the network or violates the rules and
335
+ protocols for communication across the network.
336
+
337
+ Corresponding Source conveyed, and Installation Information provided,
338
+ in accord with this section must be in a format that is publicly
339
+ documented (and with an implementation available to the public in
340
+ source code form), and must require no special password or key for
341
+ unpacking, reading or copying.
342
+
343
+ 7. Additional Terms.
344
+
345
+ "Additional permissions" are terms that supplement the terms of this
346
+ License by making exceptions from one or more of its conditions.
347
+ Additional permissions that are applicable to the entire Program shall
348
+ be treated as though they were included in this License, to the extent
349
+ that they are valid under applicable law. If additional permissions
350
+ apply only to part of the Program, that part may be used separately
351
+ under those permissions, but the entire Program remains governed by
352
+ this License without regard to the additional permissions.
353
+
354
+ When you convey a copy of a covered work, you may at your option
355
+ remove any additional permissions from that copy, or from any part of
356
+ it. (Additional permissions may be written to require their own
357
+ removal in certain cases when you modify the work.) You may place
358
+ additional permissions on material, added by you to a covered work,
359
+ for which you have or can give appropriate copyright permission.
360
+
361
+ Notwithstanding any other provision of this License, for material you
362
+ add to a covered work, you may (if authorized by the copyright holders of
363
+ that material) supplement the terms of this License with terms:
364
+
365
+ a) Disclaiming warranty or limiting liability differently from the
366
+ terms of sections 15 and 16 of this License; or
367
+
368
+ b) Requiring preservation of specified reasonable legal notices or
369
+ author attributions in that material or in the Appropriate Legal
370
+ Notices displayed by works containing it; or
371
+
372
+ c) Prohibiting misrepresentation of the origin of that material, or
373
+ requiring that modified versions of such material be marked in
374
+ reasonable ways as different from the original version; or
375
+
376
+ d) Limiting the use for publicity purposes of names of licensors or
377
+ authors of the material; or
378
+
379
+ e) Declining to grant rights under trademark law for use of some
380
+ trade names, trademarks, or service marks; or
381
+
382
+ f) Requiring indemnification of licensors and authors of that
383
+ material by anyone who conveys the material (or modified versions of
384
+ it) with contractual assumptions of liability to the recipient, for
385
+ any liability that these contractual assumptions directly impose on
386
+ those licensors and authors.
387
+
388
+ All other non-permissive additional terms are considered "further
389
+ restrictions" within the meaning of section 10. If the Program as you
390
+ received it, or any part of it, contains a notice stating that it is
391
+ governed by this License along with a term that is a further
392
+ restriction, you may remove that term. If a license document contains
393
+ a further restriction but permits relicensing or conveying under this
394
+ License, you may add to a covered work material governed by the terms
395
+ of that license document, provided that the further restriction does
396
+ not survive such relicensing or conveying.
397
+
398
+ If you add terms to a covered work in accord with this section, you
399
+ must place, in the relevant source files, a statement of the
400
+ additional terms that apply to those files, or a notice indicating
401
+ where to find the applicable terms.
402
+
403
+ Additional terms, permissive or non-permissive, may be stated in the
404
+ form of a separately written license, or stated as exceptions;
405
+ the above requirements apply either way.
406
+
407
+ 8. Termination.
408
+
409
+ You may not propagate or modify a covered work except as expressly
410
+ provided under this License. Any attempt otherwise to propagate or
411
+ modify it is void, and will automatically terminate your rights under
412
+ this License (including any patent licenses granted under the third
413
+ paragraph of section 11).
414
+
415
+ However, if you cease all violation of this License, then your
416
+ license from a particular copyright holder is reinstated (a)
417
+ provisionally, unless and until the copyright holder explicitly and
418
+ finally terminates your license, and (b) permanently, if the copyright
419
+ holder fails to notify you of the violation by some reasonable means
420
+ prior to 60 days after the cessation.
421
+
422
+ Moreover, your license from a particular copyright holder is
423
+ reinstated permanently if the copyright holder notifies you of the
424
+ violation by some reasonable means, this is the first time you have
425
+ received notice of violation of this License (for any work) from that
426
+ copyright holder, and you cure the violation prior to 30 days after
427
+ your receipt of the notice.
428
+
429
+ Termination of your rights under this section does not terminate the
430
+ licenses of parties who have received copies or rights from you under
431
+ this License. If your rights have been terminated and not permanently
432
+ reinstated, you do not qualify to receive new licenses for the same
433
+ material under section 10.
434
+
435
+ 9. Acceptance Not Required for Having Copies.
436
+
437
+ You are not required to accept this License in order to receive or
438
+ run a copy of the Program. Ancillary propagation of a covered work
439
+ occurring solely as a consequence of using peer-to-peer transmission
440
+ to receive a copy likewise does not require acceptance. However,
441
+ nothing other than this License grants you permission to propagate or
442
+ modify any covered work. These actions infringe copyright if you do
443
+ not accept this License. Therefore, by modifying or propagating a
444
+ covered work, you indicate your acceptance of this License to do so.
445
+
446
+ 10. Automatic Licensing of Downstream Recipients.
447
+
448
+ Each time you convey a covered work, the recipient automatically
449
+ receives a license from the original licensors, to run, modify and
450
+ propagate that work, subject to this License. You are not responsible
451
+ for enforcing compliance by third parties with this License.
452
+
453
+ An "entity transaction" is a transaction transferring control of an
454
+ organization, or substantially all assets of one, or subdividing an
455
+ organization, or merging organizations. If propagation of a covered
456
+ work results from an entity transaction, each party to that
457
+ transaction who receives a copy of the work also receives whatever
458
+ licenses to the work the party's predecessor in interest had or could
459
+ give under the previous paragraph, plus a right to possession of the
460
+ Corresponding Source of the work from the predecessor in interest, if
461
+ the predecessor has it or can get it with reasonable efforts.
462
+
463
+ You may not impose any further restrictions on the exercise of the
464
+ rights granted or affirmed under this License. For example, you may
465
+ not impose a license fee, royalty, or other charge for exercise of
466
+ rights granted under this License, and you may not initiate litigation
467
+ (including a cross-claim or counterclaim in a lawsuit) alleging that
468
+ any patent claim is infringed by making, using, selling, offering for
469
+ sale, or importing the Program or any portion of it.
470
+
471
+ 11. Patents.
472
+
473
+ A "contributor" is a copyright holder who authorizes use under this
474
+ License of the Program or a work on which the Program is based. The
475
+ work thus licensed is called the contributor's "contributor version".
476
+
477
+ A contributor's "essential patent claims" are all patent claims
478
+ owned or controlled by the contributor, whether already acquired or
479
+ hereafter acquired, that would be infringed by some manner, permitted
480
+ by this License, of making, using, or selling its contributor version,
481
+ but do not include claims that would be infringed only as a
482
+ consequence of further modification of the contributor version. For
483
+ purposes of this definition, "control" includes the right to grant
484
+ patent sublicenses in a manner consistent with the requirements of
485
+ this License.
486
+
487
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
488
+ patent license under the contributor's essential patent claims, to
489
+ make, use, sell, offer for sale, import and otherwise run, modify and
490
+ propagate the contents of its contributor version.
491
+
492
+ In the following three paragraphs, a "patent license" is any express
493
+ agreement or commitment, however denominated, not to enforce a patent
494
+ (such as an express permission to practice a patent or covenant not to
495
+ sue for patent infringement). To "grant" such a patent license to a
496
+ party means to make such an agreement or commitment not to enforce a
497
+ patent against the party.
498
+
499
+ If you convey a covered work, knowingly relying on a patent license,
500
+ and the Corresponding Source of the work is not available for anyone
501
+ to copy, free of charge and under the terms of this License, through a
502
+ publicly available network server or other readily accessible means,
503
+ then you must either (1) cause the Corresponding Source to be so
504
+ available, or (2) arrange to deprive yourself of the benefit of the
505
+ patent license for this particular work, or (3) arrange, in a manner
506
+ consistent with the requirements of this License, to extend the patent
507
+ license to downstream recipients. "Knowingly relying" means you have
508
+ actual knowledge that, but for the patent license, your conveying the
509
+ covered work in a country, or your recipient's use of the covered work
510
+ in a country, would infringe one or more identifiable patents in that
511
+ country that you have reason to believe are valid.
512
+
513
+ If, pursuant to or in connection with a single transaction or
514
+ arrangement, you convey, or propagate by procuring conveyance of, a
515
+ covered work, and grant a patent license to some of the parties
516
+ receiving the covered work authorizing them to use, propagate, modify
517
+ or convey a specific copy of the covered work, then the patent license
518
+ you grant is automatically extended to all recipients of the covered
519
+ work and works based on it.
520
+
521
+ A patent license is "discriminatory" if it does not include within
522
+ the scope of its coverage, prohibits the exercise of, or is
523
+ conditioned on the non-exercise of one or more of the rights that are
524
+ specifically granted under this License. You may not convey a covered
525
+ work if you are a party to an arrangement with a third party that is
526
+ in the business of distributing software, under which you make payment
527
+ to the third party based on the extent of your activity of conveying
528
+ the work, and under which the third party grants, to any of the
529
+ parties who would receive the covered work from you, a discriminatory
530
+ patent license (a) in connection with copies of the covered work
531
+ conveyed by you (or copies made from those copies), or (b) primarily
532
+ for and in connection with specific products or compilations that
533
+ contain the covered work, unless you entered into that arrangement,
534
+ or that patent license was granted, prior to 28 March 2007.
535
+
536
+ Nothing in this License shall be construed as excluding or limiting
537
+ any implied license or other defenses to infringement that may
538
+ otherwise be available to you under applicable patent law.
539
+
540
+ 12. No Surrender of Others' Freedom.
541
+
542
+ If conditions are imposed on you (whether by court order, agreement or
543
+ otherwise) that contradict the conditions of this License, they do not
544
+ excuse you from the conditions of this License. If you cannot convey a
545
+ covered work so as to satisfy simultaneously your obligations under this
546
+ License and any other pertinent obligations, then as a consequence you may
547
+ not convey it at all. For example, if you agree to terms that obligate you
548
+ to collect a royalty for further conveying from those to whom you convey
549
+ the Program, the only way you could satisfy both those terms and this
550
+ License would be to refrain entirely from conveying the Program.
551
+
552
+ 13. Use with the GNU Affero General Public License.
553
+
554
+ Notwithstanding any other provision of this License, you have
555
+ permission to link or combine any covered work with a work licensed
556
+ under version 3 of the GNU Affero General Public License into a single
557
+ combined work, and to convey the resulting work. The terms of this
558
+ License will continue to apply to the part which is the covered work,
559
+ but the special requirements of the GNU Affero General Public License,
560
+ section 13, concerning interaction through a network will apply to the
561
+ combination as such.
562
+
563
+ 14. Revised Versions of this License.
564
+
565
+ The Free Software Foundation may publish revised and/or new versions of
566
+ the GNU General Public License from time to time. Such new versions will
567
+ be similar in spirit to the present version, but may differ in detail to
568
+ address new problems or concerns.
569
+
570
+ Each version is given a distinguishing version number. If the
571
+ Program specifies that a certain numbered version of the GNU General
572
+ Public License "or any later version" applies to it, you have the
573
+ option of following the terms and conditions either of that numbered
574
+ version or of any later version published by the Free Software
575
+ Foundation. If the Program does not specify a version number of the
576
+ GNU General Public License, you may choose any version ever published
577
+ by the Free Software Foundation.
578
+
579
+ If the Program specifies that a proxy can decide which future
580
+ versions of the GNU General Public License can be used, that proxy's
581
+ public statement of acceptance of a version permanently authorizes you
582
+ to choose that version for the Program.
583
+
584
+ Later license versions may give you additional or different
585
+ permissions. However, no additional obligations are imposed on any
586
+ author or copyright holder as a result of your choosing to follow a
587
+ later version.
588
+
589
+ 15. Disclaimer of Warranty.
590
+
591
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592
+ APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593
+ HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594
+ OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595
+ THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596
+ PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597
+ IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598
+ ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599
+
600
+ 16. Limitation of Liability.
601
+
602
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603
+ WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604
+ THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605
+ GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606
+ USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607
+ DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608
+ PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609
+ EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610
+ SUCH DAMAGES.
611
+
612
+ 17. Interpretation of Sections 15 and 16.
613
+
614
+ If the disclaimer of warranty and limitation of liability provided
615
+ above cannot be given local legal effect according to their terms,
616
+ reviewing courts shall apply local law that most closely approximates
617
+ an absolute waiver of all civil liability in connection with the
618
+ Program, unless a warranty or assumption of liability accompanies a
619
+ copy of the Program in return for a fee.
620
+
621
+ END OF TERMS AND CONDITIONS
622
+
623
+ How to Apply These Terms to Your New Programs
624
+
625
+ If you develop a new program, and you want it to be of the greatest
626
+ possible use to the public, the best way to achieve this is to make it
627
+ free software which everyone can redistribute and change under these terms.
628
+
629
+ To do so, attach the following notices to the program. It is safest
630
+ to attach them to the start of each source file to most effectively
631
+ state the exclusion of warranty; and each file should have at least
632
+ the "copyright" line and a pointer to where the full notice is found.
633
+
634
+ <one line to give the program's name and a brief idea of what it does.>
635
+ Copyright (C) <year> <name of author>
636
+
637
+ This program is free software: you can redistribute it and/or modify
638
+ it under the terms of the GNU General Public License as published by
639
+ the Free Software Foundation, either version 3 of the License, or
640
+ (at your option) any later version.
641
+
642
+ This program is distributed in the hope that it will be useful,
643
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
644
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645
+ GNU General Public License for more details.
646
+
647
+ You should have received a copy of the GNU General Public License
648
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
649
+
650
+ Also add information on how to contact you by electronic and paper mail.
651
+
652
+ If the program does terminal interaction, make it output a short
653
+ notice like this when it starts in an interactive mode:
654
+
655
+ <program> Copyright (C) <year> <name of author>
656
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657
+ This is free software, and you are welcome to redistribute it
658
+ under certain conditions; type `show c' for details.
659
+
660
+ The hypothetical commands `show w' and `show c' should show the appropriate
661
+ parts of the General Public License. Of course, your program's commands
662
+ might be different; for a GUI interface, you would use an "about box".
663
+
664
+ You should also get your employer (if you work as a programmer) or school,
665
+ if any, to sign a "copyright disclaimer" for the program, if necessary.
666
+ For more information on this, and how to apply and follow the GNU GPL, see
667
+ <http://www.gnu.org/licenses/>.
668
+
669
+ The GNU General Public License does not permit incorporating your program
670
+ into proprietary programs. If your program is a subroutine library, you
671
+ may consider it more useful to permit linking proprietary applications with
672
+ the library. If this is what you want to do, use the GNU Lesser General
673
+ Public License instead of this License. But first, please read
674
+ <http://www.gnu.org/philosophy/why-not-lgpl.html>.
__pycache__/track.cpython-38.pyc ADDED
Binary file (9.11 kB). View file
 
demo.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import tempfile
3
+ import os
4
+ import track
5
+ import shutil
6
+ from pathlib import Path
7
+ from yolov5 import detect
8
+ from PIL import Image
9
+
10
+ # 目标检测
11
+ def Detect(image):
12
+ # 创建临时文件夹
13
+ temp_path = tempfile.TemporaryDirectory(dir="./")
14
+ temp_dir = temp_path.name
15
+ # 临时图片的路径
16
+ temp_image_path = os.path.join(temp_dir, f"temp.jpg")
17
+ # 存储临时图片
18
+ img = Image.fromarray(image)
19
+ img.save(temp_image_path)
20
+ # 结果图片的存储目录
21
+ temp_result_path = os.path.join(temp_dir, "tempresult")
22
+ # 对临时图片进行检测
23
+ detect.run(source=temp_image_path, data="test_image/FLIR.yaml", weights="weights/best.pt", project=f'./{temp_dir}',name = 'tempresult', hide_conf=False, conf_thres=0.35)
24
+ # 结果图片的路径
25
+ temp_result_path = os.path.join(temp_result_path, os.listdir(temp_result_path)[0])
26
+ # 读取结果图片
27
+ result_image = Image.open(temp_result_path).copy()
28
+ # 删除临时文件夹
29
+ temp_path.cleanup()
30
+ return result_image
31
+
32
+ # 候选图片
33
+ example_image= [
34
+ "./test_image/video-2SReBn5LtAkL5HMj2-frame-005072-MA7NCLQGoqq9aHaiL.jpg",
35
+ "./test_image/video-2rsjnZFyGQGeynfbv-frame-003708-6fPQbB7jtibwaYAE7.jpg",
36
+ "./test_image/video-2SReBn5LtAkL5HMj2-frame-000317-HTgPBFgZyPdwQnNvE.jpg",
37
+ "./test_image/video-jNQtRj6NGycZDEXpe-frame-002515-J3YntG8ntvZheKK3P.jpg",
38
+ "./test_image/video-kDDWXrnLSoSdHCZ7S-frame-003063-eaKjPvPskDPjenZ8S.jpg",
39
+ "./test_image/video-r68Yr9RPWEp5fW2ZF-frame-000333-X6K5iopqbmjKEsSqN.jpg"
40
+ ]
41
+
42
+ # 目标追踪
43
+ def Track(video, tracking_method):
44
+ # 存储临时视频的文件夹
45
+ temp_dir = "./temp"
46
+ # 先清空temp文件夹
47
+ shutil.rmtree("./temp")
48
+ os.mkdir("./temp")
49
+ # 获取视频的名字
50
+ video_name = os.path.basename(video)
51
+ # 对视频进行检测
52
+ track.run(source=video, yolo_weights=Path("weights/best2.pt"),reid_weights=Path("weights/osnet_x0_25_msmt17.pt") , project=Path(f'./{temp_dir}'),name = 'tempresult', tracking_method=tracking_method)
53
+ # 结果视频的路径
54
+ temp_result_path = os.path.join(f'./{temp_dir}', "tempresult", video_name)
55
+ # 返回结果视频的路径
56
+ return temp_result_path
57
+
58
+ # 候选视频
59
+ example_video= [
60
+ ["./video/5.mp4", None],
61
+ ["./video/bicyclecity.mp4", None],
62
+ ["./video/9.mp4", None],
63
+ ["./video/8.mp4", None],
64
+ ["./video/4.mp4", None],
65
+ ["./video/car.mp4", None],
66
+ ]
67
+
68
+ iface_Image = gr.Interface(fn=Detect,
69
+ inputs=gr.Image(label="上传一张红外图像,仅支持jpg格式"),
70
+ outputs=gr.Image(label="检测结果"),
71
+ examples=example_image)
72
+
73
+ iface_video = gr.Interface(fn=Track,
74
+ inputs=[gr.Video(label="上传段红外视频,仅支持mp4格式"), gr.Radio(["bytetrack", "strongsort"], label="track methond", info="选择追踪器", value="bytetrack")],
75
+ outputs=gr.Video(label="追踪结果"),
76
+ examples=example_video)
77
+
78
+ demo = gr.TabbedInterface([iface_video, iface_Image], tab_names=["目标追踪", "目标检测"], title="红外目标检测追踪")
79
+
80
+ demo.launch(share=True)
81
+
82
+
83
+
84
+
85
+
86
+
87
+
88
+
89
+
90
+
91
+
92
+
93
+
94
+
95
+
96
+
97
+
98
+
flagged/log.csv ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ text,output,flag,username,timestamp
2
+ sfsda 东风东,SFSDA 东风东,,,2023-05-21 21:03:22.616101
reid_export.py ADDED
@@ -0,0 +1,314 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+
3
+ import os
4
+ # limit the number of cpus used by high performance libraries
5
+ os.environ["OMP_NUM_THREADS"] = "1"
6
+ os.environ["OPENBLAS_NUM_THREADS"] = "1"
7
+ os.environ["MKL_NUM_THREADS"] = "1"
8
+ os.environ["VECLIB_MAXIMUM_THREADS"] = "1"
9
+ os.environ["NUMEXPR_NUM_THREADS"] = "1"
10
+
11
+ import sys
12
+ import numpy as np
13
+ from pathlib import Path
14
+ import torch
15
+ import time
16
+ import platform
17
+ import pandas as pd
18
+ import subprocess
19
+ import torch.backends.cudnn as cudnn
20
+ from torch.utils.mobile_optimizer import optimize_for_mobile
21
+
22
+ FILE = Path(__file__).resolve()
23
+ ROOT = FILE.parents[0] # yolov5 strongsort root directory
24
+ WEIGHTS = ROOT / 'weights'
25
+
26
+ print(ROOT)
27
+
28
+ if str(ROOT) not in sys.path:
29
+ sys.path.append(str(ROOT)) # add ROOT to PATH
30
+ if str(ROOT / 'yolov5') not in sys.path:
31
+ sys.path.append(str(ROOT / 'yolov5')) # add yolov5 ROOT to PATH
32
+
33
+ ROOT = Path(os.path.relpath(ROOT, Path.cwd())) # relative
34
+
35
+ import logging
36
+ from yolov5.utils.torch_utils import select_device
37
+ from yolov5.models.common import DetectMultiBackend
38
+ from yolov5.utils.general import LOGGER, colorstr, check_requirements, check_version
39
+ from trackers.strong_sort.deep.models import build_model
40
+ from trackers.strong_sort.deep.reid_model_factory import get_model_name, load_pretrained_weights
41
+
42
+
43
+ def file_size(path):
44
+ # Return file/dir size (MB)
45
+ path = Path(path)
46
+ if path.is_file():
47
+ return path.stat().st_size / 1E6
48
+ elif path.is_dir():
49
+ return sum(f.stat().st_size for f in path.glob('**/*') if f.is_file()) / 1E6
50
+ else:
51
+ return 0.0
52
+
53
+
54
+ def export_formats():
55
+ # YOLOv5 export formats
56
+ x = [
57
+ ['PyTorch', '-', '.pt', True, True],
58
+ ['TorchScript', 'torchscript', '.torchscript', True, True],
59
+ ['ONNX', 'onnx', '.onnx', True, True],
60
+ ['OpenVINO', 'openvino', '_openvino_model', True, False],
61
+ ['TensorRT', 'engine', '.engine', False, True],
62
+ ['TensorFlow Lite', 'tflite', '.tflite', True, False],
63
+ ]
64
+ return pd.DataFrame(x, columns=['Format', 'Argument', 'Suffix', 'CPU', 'GPU'])
65
+
66
+
67
+ def export_torchscript(model, im, file, optimize, prefix=colorstr('TorchScript:')):
68
+ # YOLOv5 TorchScript model export
69
+ try:
70
+ LOGGER.info(f'\n{prefix} starting export with torch {torch.__version__}...')
71
+ f = file.with_suffix('.torchscript')
72
+
73
+ ts = torch.jit.trace(model, im, strict=False)
74
+ if optimize: # https://pytorch.org/tutorials/recipes/mobile_interpreter.html
75
+ optimize_for_mobile(ts)._save_for_lite_interpreter(str(f))
76
+ else:
77
+ ts.save(str(f))
78
+
79
+ LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)')
80
+ return f
81
+ except Exception as e:
82
+ LOGGER.info(f'{prefix} export failure: {e}')
83
+
84
+
85
+ def export_onnx(model, im, file, opset, dynamic, simplify, prefix=colorstr('ONNX:')):
86
+ # ONNX export
87
+ try:
88
+ check_requirements(('onnx',))
89
+ import onnx
90
+
91
+ f = file.with_suffix('.onnx')
92
+ LOGGER.info(f'\n{prefix} starting export with onnx {onnx.__version__}...')
93
+
94
+ if dynamic:
95
+ dynamic = {'images': {0: 'batch'}} # shape(1,3,640,640)
96
+ dynamic['output'] = {0: 'batch'} # shape(1,25200,85)
97
+
98
+ torch.onnx.export(
99
+ model.cpu() if dynamic else model, # --dynamic only compatible with cpu
100
+ im.cpu() if dynamic else im,
101
+ f,
102
+ verbose=False,
103
+ opset_version=opset,
104
+ do_constant_folding=True,
105
+ input_names=['images'],
106
+ output_names=['output'],
107
+ dynamic_axes=dynamic or None
108
+ )
109
+ # Checks
110
+ model_onnx = onnx.load(f) # load onnx model
111
+ onnx.checker.check_model(model_onnx) # check onnx model
112
+ onnx.save(model_onnx, f)
113
+
114
+ # Simplify
115
+ if simplify:
116
+ try:
117
+ cuda = torch.cuda.is_available()
118
+ check_requirements(('onnxruntime-gpu' if cuda else 'onnxruntime', 'onnx-simplifier>=0.4.1'))
119
+ import onnxsim
120
+
121
+ LOGGER.info(f'simplifying with onnx-simplifier {onnxsim.__version__}...')
122
+ model_onnx, check = onnxsim.simplify(model_onnx)
123
+ assert check, 'assert check failed'
124
+ onnx.save(model_onnx, f)
125
+ except Exception as e:
126
+ LOGGER.info(f'simplifier failure: {e}')
127
+ LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)')
128
+ return f
129
+ except Exception as e:
130
+ LOGGER.info(f'export failure: {e}')
131
+
132
+
133
+
134
+ def export_openvino(file, half, prefix=colorstr('OpenVINO:')):
135
+ # YOLOv5 OpenVINO export
136
+ check_requirements(('openvino-dev',)) # requires openvino-dev: https://pypi.org/project/openvino-dev/
137
+ import openvino.inference_engine as ie
138
+ try:
139
+ LOGGER.info(f'\n{prefix} starting export with openvino {ie.__version__}...')
140
+ f = str(file).replace('.pt', f'_openvino_model{os.sep}')
141
+
142
+ cmd = f"mo --input_model {file.with_suffix('.onnx')} --output_dir {f} --data_type {'FP16' if half else 'FP32'}"
143
+ subprocess.check_output(cmd.split()) # export
144
+ except Exception as e:
145
+ LOGGER.info(f'export failure: {e}')
146
+ LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)')
147
+ return f
148
+
149
+
150
+ def export_tflite(file, half, prefix=colorstr('TFLite:')):
151
+ # YOLOv5 OpenVINO export
152
+ try:
153
+ check_requirements(('openvino2tensorflow', 'tensorflow', 'tensorflow_datasets')) # requires openvino-dev: https://pypi.org/project/openvino-dev/
154
+ import openvino.inference_engine as ie
155
+ LOGGER.info(f'\n{prefix} starting export with openvino {ie.__version__}...')
156
+ output = Path(str(file).replace(f'_openvino_model{os.sep}', f'_tflite_model{os.sep}'))
157
+ modelxml = list(Path(file).glob('*.xml'))[0]
158
+ cmd = f"openvino2tensorflow \
159
+ --model_path {modelxml} \
160
+ --model_output_path {output} \
161
+ --output_pb \
162
+ --output_saved_model \
163
+ --output_no_quant_float32_tflite \
164
+ --output_dynamic_range_quant_tflite"
165
+ subprocess.check_output(cmd.split()) # export
166
+
167
+ LOGGER.info(f'{prefix} export success, results saved in {output} ({file_size(f):.1f} MB)')
168
+ return f
169
+ except Exception as e:
170
+ LOGGER.info(f'\n{prefix} export failure: {e}')
171
+
172
+
173
+ def export_engine(model, im, file, half, dynamic, simplify, workspace=4, verbose=False, prefix=colorstr('TensorRT:')):
174
+ # YOLOv5 TensorRT export https://developer.nvidia.com/tensorrt
175
+ try:
176
+ assert im.device.type != 'cpu', 'export running on CPU but must be on GPU, i.e. `python export.py --device 0`'
177
+ try:
178
+ import tensorrt as trt
179
+ except Exception:
180
+ if platform.system() == 'Linux':
181
+ check_requirements(('nvidia-tensorrt',), cmds=('-U --index-url https://pypi.ngc.nvidia.com',))
182
+ import tensorrt as trt
183
+
184
+ if trt.__version__[0] == '7': # TensorRT 7 handling https://github.com/ultralytics/yolov5/issues/6012
185
+ grid = model.model[-1].anchor_grid
186
+ model.model[-1].anchor_grid = [a[..., :1, :1, :] for a in grid]
187
+ export_onnx(model, im, file, 12, dynamic, simplify) # opset 12
188
+ model.model[-1].anchor_grid = grid
189
+ else: # TensorRT >= 8
190
+ check_version(trt.__version__, '8.0.0', hard=True) # require tensorrt>=8.0.0
191
+ export_onnx(model, im, file, 12, dynamic, simplify) # opset 13
192
+ onnx = file.with_suffix('.onnx')
193
+
194
+ LOGGER.info(f'\n{prefix} starting export with TensorRT {trt.__version__}...')
195
+ assert onnx.exists(), f'failed to export ONNX file: {onnx}'
196
+ f = file.with_suffix('.engine') # TensorRT engine file
197
+ logger = trt.Logger(trt.Logger.INFO)
198
+ if verbose:
199
+ logger.min_severity = trt.Logger.Severity.VERBOSE
200
+
201
+ builder = trt.Builder(logger)
202
+ config = builder.create_builder_config()
203
+ config.max_workspace_size = workspace * 1 << 30
204
+ # config.set_memory_pool_limit(trt.MemoryPoolType.WORKSPACE, workspace << 30) # fix TRT 8.4 deprecation notice
205
+
206
+ flag = (1 << int(trt.NetworkDefinitionCreationFlag.EXPLICIT_BATCH))
207
+ network = builder.create_network(flag)
208
+ parser = trt.OnnxParser(network, logger)
209
+ if not parser.parse_from_file(str(onnx)):
210
+ raise RuntimeError(f'failed to load ONNX file: {onnx}')
211
+
212
+ inputs = [network.get_input(i) for i in range(network.num_inputs)]
213
+ outputs = [network.get_output(i) for i in range(network.num_outputs)]
214
+ LOGGER.info(f'{prefix} Network Description:')
215
+ for inp in inputs:
216
+ LOGGER.info(f'{prefix}\tinput "{inp.name}" with shape {inp.shape} and dtype {inp.dtype}')
217
+ for out in outputs:
218
+ LOGGER.info(f'{prefix}\toutput "{out.name}" with shape {out.shape} and dtype {out.dtype}')
219
+
220
+ if dynamic:
221
+ if im.shape[0] <= 1:
222
+ LOGGER.warning(f"{prefix}WARNING: --dynamic model requires maximum --batch-size argument")
223
+ profile = builder.create_optimization_profile()
224
+ for inp in inputs:
225
+ profile.set_shape(inp.name, (1, *im.shape[1:]), (max(1, im.shape[0] // 2), *im.shape[1:]), im.shape)
226
+ config.add_optimization_profile(profile)
227
+
228
+ LOGGER.info(f'{prefix} building FP{16 if builder.platform_has_fast_fp16 and half else 32} engine in {f}')
229
+ if builder.platform_has_fast_fp16 and half:
230
+ config.set_flag(trt.BuilderFlag.FP16)
231
+ with builder.build_engine(network, config) as engine, open(f, 'wb') as t:
232
+ t.write(engine.serialize())
233
+ LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)')
234
+ return f
235
+ except Exception as e:
236
+ LOGGER.info(f'\n{prefix} export failure: {e}')
237
+
238
+
239
+ if __name__ == "__main__":
240
+
241
+ parser = argparse.ArgumentParser(description="ReID export")
242
+ parser.add_argument('--batch-size', type=int, default=1, help='batch size')
243
+ parser.add_argument('--imgsz', '--img', '--img-size', nargs='+', type=int, default=[256, 128], help='image (h, w)')
244
+ parser.add_argument('--device', default='cpu', help='cuda device, i.e. 0 or 0,1,2,3 or cpu')
245
+ parser.add_argument('--optimize', action='store_true', help='TorchScript: optimize for mobile')
246
+ parser.add_argument('--dynamic', action='store_true', help='ONNX/TF/TensorRT: dynamic axes')
247
+ parser.add_argument('--simplify', action='store_true', help='ONNX: simplify model')
248
+ parser.add_argument('--opset', type=int, default=12, help='ONNX: opset version')
249
+ parser.add_argument('--workspace', type=int, default=4, help='TensorRT: workspace size (GB)')
250
+ parser.add_argument('--verbose', action='store_true', help='TensorRT: verbose log')
251
+ parser.add_argument('--weights', nargs='+', type=str, default=WEIGHTS / 'osnet_x0_25_msmt17.pt', help='model.pt path(s)')
252
+ parser.add_argument('--half', action='store_true', help='FP16 half-precision export')
253
+ parser.add_argument('--include',
254
+ nargs='+',
255
+ default=['torchscript'],
256
+ help='torchscript, onnx, openvino, engine')
257
+ args = parser.parse_args()
258
+
259
+ t = time.time()
260
+
261
+ include = [x.lower() for x in args.include] # to lowercase
262
+ fmts = tuple(export_formats()['Argument'][1:]) # --include arguments
263
+ flags = [x in include for x in fmts]
264
+ assert sum(flags) == len(include), f'ERROR: Invalid --include {include}, valid --include arguments are {fmts}'
265
+ jit, onnx, openvino, engine, tflite = flags # export booleans
266
+
267
+ args.device = select_device(args.device)
268
+ if args.half:
269
+ assert args.device.type != 'cpu', '--half only compatible with GPU export, i.e. use --device 0'
270
+ assert not args.dynamic, '--half not compatible with --dynamic, i.e. use either --half or --dynamic but not both'
271
+
272
+ if type(args.weights) is list:
273
+ args.weights = Path(args.weights[0])
274
+
275
+ model = build_model(
276
+ get_model_name(args.weights),
277
+ num_classes=1,
278
+ pretrained=not (args.weights and args.weights.is_file() and args.weights.suffix == '.pt'),
279
+ use_gpu=args.device
280
+ ).to(args.device)
281
+ load_pretrained_weights(model, args.weights)
282
+ model.eval()
283
+
284
+ if args.optimize:
285
+ assert device.type == 'cpu', '--optimize not compatible with cuda devices, i.e. use --device cpu'
286
+
287
+ im = torch.zeros(args.batch_size, 3, args.imgsz[0], args.imgsz[1]).to(args.device) # image size(1,3,640,480) BCHW iDetection
288
+ for _ in range(2):
289
+ y = model(im) # dry runs
290
+ if args.half:
291
+ im, model = im.half(), model.half() # to FP16
292
+ shape = tuple((y[0] if isinstance(y, tuple) else y).shape) # model output shape
293
+ LOGGER.info(f"\n{colorstr('PyTorch:')} starting from {args.weights} with output shape {shape} ({file_size(args.weights):.1f} MB)")
294
+
295
+ # Exports
296
+ f = [''] * len(fmts) # exported filenames
297
+ if jit:
298
+ f[0] = export_torchscript(model, im, args.weights, args.optimize) # opset 12
299
+ if engine: # TensorRT required before ONNX
300
+ f[1] = export_engine(model, im, args.weights, args.half, args.dynamic, args.simplify, args.workspace, args.verbose)
301
+ if onnx: # OpenVINO requires ONNX
302
+ f[2] = export_onnx(model, im, args.weights, args.opset, args.dynamic, args.simplify) # opset 12
303
+ if openvino:
304
+ f[3] = export_openvino(args.weights, args.half)
305
+ if tflite:
306
+ export_tflite(f, False)
307
+
308
+ # Finish
309
+ f = [str(x) for x in f if x] # filter out '' and None
310
+ if any(f):
311
+ LOGGER.info(f'\nExport complete ({time.time() - t:.1f}s)'
312
+ f"\nResults saved to {colorstr('bold', args.weights.parent.resolve())}"
313
+ f"\nVisualize: https://netron.app")
314
+
requirements.txt ADDED
Binary file (14.7 kB). View file
 
test.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import track
2
+ import os
3
+ from pathlib import Path
4
+
5
+ FILE = Path(__file__).resolve()
6
+ ROOT = FILE.parents[0] # yolov5 strongsort root directory
7
+ WEIGHTS = ROOT / 'weights'
8
+ trackers = "strongsort" # ["strongsort", "bytetrack"]
9
+ path = "./video"
10
+ v = [x for x in os.listdir(path) if x.endswith(".mp4")]
11
+
12
+ for i in v:
13
+ # , classes=[0, 1, 2, 3, 5, 7, 9, 11, 10]
14
+ track.run(source=ROOT / f"video/{i}", tracking_method=trackers, yolo_weights=WEIGHTS / "best2.pt", project=ROOT / f"runs/{trackers}3", name=f"video{i.split('.')[0]}", iou_thres=0.45, conf_thres=0.35, hide_conf=True)
15
+ print(f"{i}检测完毕")
16
+
17
+
test_image/FLIR.yaml ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ train: VOCdevkit\images\train # 8493 images 训练集
2
+ val: VOCdevkit\images\val # 2249 images 验证集
3
+
4
+ # number of classes 类别数
5
+ nc: 15
6
+
7
+
8
+ # Classes
9
+ names:
10
+ 0: person
11
+ 1: bike
12
+ 2: car
13
+ 3: motor
14
+ 4: bus
15
+ 5: truck #6-5
16
+ 6: light #7-6
17
+ 7: hydrant #8-7
18
+ 8: sign # 9-8
19
+
20
+
21
+ # 5: train
22
+ # 10: dog
23
+ # 11: skateboard
24
+ # 12: stroller
25
+ # 13: scooter
26
+ # 14: other vehicle
27
+
28
+
29
+
test_image/video-2SReBn5LtAkL5HMj2-frame-000317-HTgPBFgZyPdwQnNvE.jpg ADDED
test_image/video-2SReBn5LtAkL5HMj2-frame-005072-MA7NCLQGoqq9aHaiL.jpg ADDED
test_image/video-2rsjnZFyGQGeynfbv-frame-003708-6fPQbB7jtibwaYAE7.jpg ADDED
test_image/video-jNQtRj6NGycZDEXpe-frame-002515-J3YntG8ntvZheKK3P.jpg ADDED
test_image/video-kDDWXrnLSoSdHCZ7S-frame-003063-eaKjPvPskDPjenZ8S.jpg ADDED
test_image/video-r68Yr9RPWEp5fW2ZF-frame-000333-X6K5iopqbmjKEsSqN.jpg ADDED
track.py ADDED
@@ -0,0 +1,308 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+
3
+ import os
4
+ # limit the number of cpus used by high performance libraries
5
+ os.environ["OMP_NUM_THREADS"] = "1"
6
+ os.environ["OPENBLAS_NUM_THREADS"] = "1"
7
+ os.environ["MKL_NUM_THREADS"] = "1"
8
+ os.environ["VECLIB_MAXIMUM_THREADS"] = "1"
9
+ os.environ["NUMEXPR_NUM_THREADS"] = "1"
10
+
11
+ import sys
12
+ import numpy as np
13
+ from pathlib import Path
14
+ import torch
15
+ import torch.backends.cudnn as cudnn
16
+
17
+ FILE = Path(__file__).resolve()
18
+ ROOT = FILE.parents[0] # yolov5 strongsort root directory
19
+ WEIGHTS = ROOT / 'weights'
20
+
21
+ if str(ROOT) not in sys.path:
22
+ sys.path.append(str(ROOT)) # add ROOT to PATH
23
+ if str(ROOT / 'yolov5') not in sys.path:
24
+ sys.path.append(str(ROOT / 'yolov5')) # add yolov5 ROOT to PATH
25
+ if str(ROOT / 'trackers' / 'strong_sort') not in sys.path:
26
+ sys.path.append(str(ROOT / 'trackers' / 'strong_sort')) # add strong_sort ROOT to PATH
27
+
28
+ ROOT = Path(os.path.relpath(ROOT, Path.cwd())) # relative
29
+
30
+ import logging
31
+ from yolov5.models.common import DetectMultiBackend
32
+ from yolov5.utils.dataloaders import VID_FORMATS, LoadImages, LoadStreams
33
+ from yolov5.utils.general import (LOGGER, check_img_size, non_max_suppression, scale_boxes, check_requirements, cv2,
34
+ check_imshow, xyxy2xywh, increment_path, strip_optimizer, colorstr, print_args, check_file)
35
+ from yolov5.utils.torch_utils import select_device, time_sync
36
+ from yolov5.utils.plots import Annotator, colors, save_one_box
37
+ from trackers.multi_tracker_zoo import create_tracker
38
+
39
+ # remove duplicated stream handler to avoid duplicated logging
40
+ #logging.getLogger().removeHandler(logging.getLogger().handlers[0])
41
+
42
+ @torch.no_grad()
43
+ def run(
44
+ source='0',
45
+ yolo_weights=WEIGHTS / 'yolov5m.pt', # model.pt path(s),
46
+ reid_weights=WEIGHTS / 'osnet_x0_25_msmt17.pt', # model.pt path,
47
+ tracking_method='strongsort',
48
+ imgsz=(640, 640), # inference size (height, width)
49
+ conf_thres=0.25, # confidence threshold
50
+ iou_thres=0.45, # NMS IOU threshold
51
+ max_det=1000, # maximum detections per image
52
+ device='', # cuda device, i.e. 0 or 0,1,2,3 or cpu
53
+ show_vid=False, # show results
54
+ save_txt=False, # save results to *.txt
55
+ save_conf=False, # save confidences in --save-txt labels
56
+ save_crop=False, # save cropped prediction boxes
57
+ save_vid=True, # save confidences in --save-txt labels
58
+ nosave=False, # do not save images/videos
59
+ classes=None, # filter by class: --class 0, or --class 0 2 3
60
+ agnostic_nms=False, # class-agnostic NMS
61
+ augment=False, # augmented inference
62
+ visualize=False, # visualize features
63
+ update=False, # update all models
64
+ project=ROOT / 'runs/track', # save results to project/name
65
+ name='exp', # save results to project/name
66
+ exist_ok=False, # existing project/name ok, do not increment
67
+ line_thickness=1, # bounding box thickness (pixels)
68
+ hide_labels=False, # hide labels
69
+ hide_conf=False, # hide confidences
70
+ hide_class=False, # hide IDs
71
+ half=False, # use FP16 half-precision inference
72
+ dnn=False, # use OpenCV DNN for ONNX inference
73
+ vid_stride=1, # video frame-rate stride
74
+ ):
75
+ save_txt = True
76
+ source = str(source)
77
+ save_img = not nosave and not source.endswith('.txt') # save inference images
78
+ is_file = Path(source).suffix[1:] in (VID_FORMATS)
79
+ is_url = source.lower().startswith(('rtsp://', 'rtmp://', 'http://', 'https://'))
80
+ webcam = source.isnumeric() or source.endswith('.txt') or (is_url and not is_file)
81
+ if is_url and is_file:
82
+ source = check_file(source) # download
83
+
84
+ # Directories
85
+ if not isinstance(yolo_weights, list): # single yolo model
86
+ exp_name = yolo_weights.stem
87
+ elif type(yolo_weights) is list and len(yolo_weights) == 1: # single models after --yolo_weights
88
+ exp_name = Path(yolo_weights[0]).stem
89
+ else: # multiple models after --yolo_weights
90
+ exp_name = 'ensemble'
91
+ exp_name = name if name else exp_name + "_" + reid_weights.stem
92
+ save_dir = increment_path(Path(project) / exp_name, exist_ok=exist_ok) # increment run
93
+ (save_dir / 'tracks' if save_txt else save_dir).mkdir(parents=True, exist_ok=True) # make dir
94
+
95
+ # Load model
96
+ device = select_device(device)
97
+ model = DetectMultiBackend(yolo_weights, device=device, dnn=dnn, data=None, fp16=half)
98
+ stride, names, pt = model.stride, model.names, model.pt
99
+ imgsz = check_img_size(imgsz, s=stride) # check image size
100
+
101
+ # Dataloader
102
+ if webcam:
103
+ show_vid = check_imshow()
104
+ dataset = LoadStreams(source, img_size=imgsz, stride=stride, auto=pt, vid_stride=vid_stride)
105
+ nr_sources = len(dataset)
106
+ else:
107
+ dataset = LoadImages(source, img_size=imgsz, stride=stride, auto=pt)
108
+ nr_sources = 1
109
+ vid_path, vid_writer, txt_path = [None] * nr_sources, [None] * nr_sources, [None] * nr_sources
110
+
111
+ # Create as many strong sort instances as there are video sources
112
+ tracker_list = []
113
+ for i in range(nr_sources):
114
+ tracker = create_tracker(tracking_method, reid_weights, device, half)
115
+ tracker_list.append(tracker, )
116
+ if hasattr(tracker_list[i], 'model'):
117
+ if hasattr(tracker_list[i].model, 'warmup'):
118
+ tracker_list[i].model.warmup()
119
+ outputs = [None] * nr_sources
120
+
121
+ # Run tracking
122
+ #model.warmup(imgsz=(1 if pt else nr_sources, 3, *imgsz)) # warmup
123
+ dt, seen = [0.0, 0.0, 0.0, 0.0], 0
124
+ curr_frames, prev_frames = [None] * nr_sources, [None] * nr_sources
125
+ for frame_idx, (path, im, im0s, vid_cap, s) in enumerate(dataset):
126
+ t1 = time_sync()
127
+ im = torch.from_numpy(im).to(device)
128
+ im = im.half() if half else im.float() # uint8 to fp16/32
129
+ im /= 255.0 # 0 - 255 to 0.0 - 1.0
130
+ if len(im.shape) == 3:
131
+ im = im[None] # expand for batch dim
132
+ t2 = time_sync()
133
+ dt[0] += t2 - t1
134
+
135
+ # Inference
136
+ visualize = increment_path(save_dir / Path(path[0]).stem, mkdir=True) if visualize else False
137
+ pred = model(im, augment=augment, visualize=visualize)
138
+ t3 = time_sync()
139
+ dt[1] += t3 - t2
140
+
141
+ # Apply NMS
142
+ pred = non_max_suppression(pred, conf_thres, iou_thres, classes, agnostic_nms, max_det=max_det)
143
+ dt[2] += time_sync() - t3
144
+
145
+ # Process detections
146
+ for i, det in enumerate(pred): # detections per image
147
+ seen += 1
148
+ if webcam: # nr_sources >= 1
149
+ p, im0, _ = path[i], im0s[i].copy(), dataset.count
150
+ p = Path(p) # to Path
151
+ s += f'{i}: '
152
+ txt_file_name = p.name
153
+ save_path = str(save_dir / p.name) # im.jpg, vid.mp4, ...
154
+ else:
155
+ p, im0, _ = path, im0s.copy(), getattr(dataset, 'frame', 0)
156
+ p = Path(p) # to Path
157
+ # video file
158
+ if source.endswith(VID_FORMATS):
159
+ txt_file_name = p.stem
160
+ save_path = str(save_dir / p.name) # im.jpg, vid.mp4, ...
161
+ # folder with imgs
162
+ else:
163
+ txt_file_name = p.parent.name # get folder name containing current img
164
+ save_path = str(save_dir / p.parent.name) # im.jpg, vid.mp4, ...
165
+ curr_frames[i] = im0
166
+
167
+ txt_path = str(save_dir / 'tracks' / txt_file_name) # im.txt
168
+ s += '%gx%g ' % im.shape[2:] # print string
169
+ imc = im0.copy() if save_crop else im0 # for save_crop
170
+
171
+ annotator = Annotator(im0, line_width=line_thickness, example=str(names))
172
+
173
+ if hasattr(tracker_list[i], 'tracker') and hasattr(tracker_list[i].tracker, 'camera_update'):
174
+ if prev_frames[i] is not None and curr_frames[i] is not None: # camera motion compensation
175
+ tracker_list[i].tracker.camera_update(prev_frames[i], curr_frames[i])
176
+
177
+ if det is not None and len(det):
178
+ # Rescale boxes from img_size to im0 size
179
+ det[:, :4] = scale_boxes(im.shape[2:], det[:, :4], im0.shape).round() # xyxy
180
+
181
+ # Print results
182
+ for c in det[:, -1].unique():
183
+ n = (det[:, -1] == c).sum() # detections per class
184
+ s += f"{n} {names[int(c)]}{'s' * (n > 1)}, " # add to string
185
+
186
+ # pass detections to strongsort
187
+ t4 = time_sync()
188
+ outputs[i] = tracker_list[i].update(det.cpu(), im0)
189
+ t5 = time_sync()
190
+ dt[3] += t5 - t4
191
+
192
+ # draw boxes for visualization
193
+ if len(outputs[i]) > 0:
194
+ for j, (output, conf) in enumerate(zip(outputs[i], det[:, 4])):
195
+
196
+ bboxes = output[0:4]
197
+ id = output[4]
198
+ cls = output[5]
199
+
200
+ if save_txt:
201
+ # to MOT format
202
+ bbox_left = output[0]
203
+ bbox_top = output[1]
204
+ bbox_w = output[2] - output[0]
205
+ bbox_h = output[3] - output[1]
206
+ # Write MOT compliant results to file
207
+ with open(txt_path + '.txt', 'a') as f:
208
+ f.write(('%g ' * 10 + '\n') % (frame_idx + 1, id, bbox_left, # MOT format
209
+ bbox_top, bbox_w, bbox_h, -1, -1, -1, i))
210
+ save_vid=True
211
+
212
+ if save_vid or save_crop or show_vid: # Add bbox to image
213
+ c = int(cls) # integer class
214
+ id = int(id) # integer id
215
+ label = None if hide_labels else (f'{id} {names[c]}' if hide_conf else \
216
+ (f'{id} {conf:.2f}' if hide_class else f'{id} {names[c]} {conf:.2f}'))
217
+ annotator.box_label(bboxes, label, color=colors(c, True))
218
+ if save_crop:
219
+ txt_file_name = txt_file_name if (isinstance(path, list) and len(path) > 1) else ''
220
+ save_one_box(bboxes, imc, file=save_dir / 'crops' / txt_file_name / names[c] / f'{id}' / f'{p.stem}.jpg', BGR=True)
221
+
222
+ LOGGER.info(f'{s}Done. yolo:({t3 - t2:.3f}s), {tracking_method}:({t5 - t4:.3f}s)')
223
+
224
+ else:
225
+ #strongsort_list[i].increment_ages()
226
+ LOGGER.info('No detections')
227
+
228
+ # Stream results
229
+ im0 = annotator.result()
230
+ if show_vid:
231
+ cv2.imshow(str(p), im0)
232
+ cv2.waitKey(1) # 1 millisecond
233
+
234
+ # Save results (image with detections)
235
+ if save_vid:
236
+ if vid_path[i] != save_path: # new video
237
+ vid_path[i] = save_path
238
+ if isinstance(vid_writer[i], cv2.VideoWriter):
239
+ vid_writer[i].release() # release previous video writer
240
+ if vid_cap: # video
241
+ fps = vid_cap.get(cv2.CAP_PROP_FPS)
242
+ w = int(vid_cap.get(cv2.CAP_PROP_FRAME_WIDTH))
243
+ h = int(vid_cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
244
+ else: # stream
245
+ fps, w, h = 30, im0.shape[1], im0.shape[0]
246
+ save_path = str(Path(save_path).with_suffix('.mp4')) # force *.mp4 suffix on results videos
247
+ vid_writer[i] = cv2.VideoWriter(save_path, cv2.VideoWriter_fourcc(*'mp4v'), fps, (w, h))
248
+ vid_writer[i].write(im0)
249
+
250
+ prev_frames[i] = curr_frames[i]
251
+
252
+ # Print results
253
+ t = tuple(x / seen * 1E3 for x in dt) # speeds per image
254
+ LOGGER.info(f'Speed: %.1fms pre-process, %.1fms inference, %.1fms NMS, %.1fms {tracking_method} update per image at shape {(1, 3, *imgsz)}' % t)
255
+ if save_txt or save_vid:
256
+ s = f"\n{len(list(save_dir.glob('tracks/*.txt')))} tracks saved to {save_dir / 'tracks'}" if save_txt else ''
257
+ LOGGER.info(f"Results saved to {colorstr('bold', save_dir)}{s}")
258
+ if update:
259
+ strip_optimizer(yolo_weights) # update model (to fix SourceChangeWarning)
260
+
261
+
262
+ def parse_opt():
263
+ parser = argparse.ArgumentParser()
264
+ parser.add_argument('--yolo-weights', nargs='+', type=Path, default=WEIGHTS / 'best2.pt', help='model.pt path(s)')
265
+ parser.add_argument('--reid-weights', type=Path, default=WEIGHTS / 'osnet_x0_25_msmt17.pt')
266
+ parser.add_argument('--tracking-method', type=str, default='bytetrack', help='strongsort, ocsort, bytetrack')
267
+ parser.add_argument('--source', type=str, default=r'video', help='file/dir/URL/glob, 0 for webcam')
268
+ parser.add_argument('--imgsz', '--img', '--img-size', nargs='+', type=int, default=[640], help='inference size h,w')
269
+ parser.add_argument('--conf-thres', type=float, default=0.5, help='confidence threshold')
270
+ parser.add_argument('--iou-thres', type=float, default=0.5, help='NMS IoU threshold') #0.5
271
+ parser.add_argument('--max-det', type=int, default=1000, help='maximum detections per image')
272
+ parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu')
273
+ parser.add_argument('--show-vid', action='store_true', help='display tracking video results')
274
+ parser.add_argument('--save-txt', action='store_true', help='save results to *.txt')
275
+ parser.add_argument('--save-conf', action='store_true', help='save confidences in --save-txt labels')
276
+ parser.add_argument('--save-crop', action='store_true', help='save cropped prediction boxes')
277
+ parser.add_argument('--save-vid', action='store_true', help='save video tracking results')
278
+ parser.add_argument('--nosave', action='store_false', help='do not save images/videos')
279
+ # class 0 is person, 1 is bycicle, 2 is car... 79 is oven 0 1 2 3 5 7 9 11 10
280
+ parser.add_argument('--classes', nargs='+', type=int, help='filter by class: --classes 0, or --classes 0 2 3')
281
+ parser.add_argument('--agnostic-nms', action='store_true', help='class-agnostic NMS')
282
+ parser.add_argument('--augment', action='store_true', help='augmented inference')
283
+ parser.add_argument('--visualize', action='store_true', help='visualize features')
284
+ parser.add_argument('--update', action='store_true', help='update all models')
285
+ parser.add_argument('--project', default=ROOT / 'runs/track', help='save results to project/name')
286
+ parser.add_argument('--name', default='exp', help='save results to project/name')
287
+ parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment')
288
+ parser.add_argument('--line-thickness', default=1, type=int, help='bounding box thickness (pixels)')
289
+ parser.add_argument('--hide-labels', default=False, action='store_true', help='hide labels')
290
+ parser.add_argument('--hide-conf', default=True, action='store_true', help='hide confidences')
291
+ parser.add_argument('--hide-class', default=False, action='store_true', help='hide IDs')
292
+ parser.add_argument('--half', action='store_true', help='use FP16 half-precision inference')
293
+ parser.add_argument('--dnn', action='store_true', help='use OpenCV DNN for ONNX inference')
294
+ parser.add_argument('--vid-stride', type=int, default=1, help='video frame-rate stride')
295
+ opt = parser.parse_args()
296
+ opt.imgsz *= 2 if len(opt.imgsz) == 1 else 1 # expand
297
+ print_args(vars(opt))
298
+ return opt
299
+
300
+
301
+ def main(opt):
302
+ check_requirements(requirements=ROOT / 'requirements.txt', exclude=('tensorboard', 'thop'))
303
+ run(**vars(opt))
304
+
305
+
306
+ if __name__ == "__main__":
307
+ opt = parse_opt()
308
+ main(opt)
trackers/__init__.py ADDED
File without changes
trackers/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (149 Bytes). View file
 
trackers/__pycache__/__init__.cpython-39.pyc ADDED
Binary file (149 Bytes). View file
 
trackers/__pycache__/multi_tracker_zoo.cpython-38.pyc ADDED
Binary file (1.27 kB). View file
 
trackers/__pycache__/multi_tracker_zoo.cpython-39.pyc ADDED
Binary file (1.27 kB). View file
 
trackers/bytetrack/__pycache__/basetrack.cpython-38.pyc ADDED
Binary file (1.85 kB). View file
 
trackers/bytetrack/__pycache__/basetrack.cpython-39.pyc ADDED
Binary file (1.85 kB). View file
 
trackers/bytetrack/__pycache__/byte_tracker.cpython-38.pyc ADDED
Binary file (9.91 kB). View file
 
trackers/bytetrack/__pycache__/byte_tracker.cpython-39.pyc ADDED
Binary file (9.85 kB). View file
 
trackers/bytetrack/__pycache__/kalman_filter.cpython-38.pyc ADDED
Binary file (8.11 kB). View file
 
trackers/bytetrack/__pycache__/kalman_filter.cpython-39.pyc ADDED
Binary file (8.08 kB). View file
 
trackers/bytetrack/__pycache__/matching.cpython-38.pyc ADDED
Binary file (6.48 kB). View file
 
trackers/bytetrack/__pycache__/matching.cpython-39.pyc ADDED
Binary file (6.42 kB). View file
 
trackers/bytetrack/basetrack.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ from collections import OrderedDict
3
+
4
+
5
+ class TrackState(object):
6
+ New = 0
7
+ Tracked = 1
8
+ Lost = 2
9
+ Removed = 3
10
+
11
+
12
+ class BaseTrack(object):
13
+ _count = 0
14
+
15
+ track_id = 0
16
+ is_activated = False
17
+ state = TrackState.New
18
+
19
+ history = OrderedDict()
20
+ features = []
21
+ curr_feature = None
22
+ score = 0
23
+ start_frame = 0
24
+ frame_id = 0
25
+ time_since_update = 0
26
+
27
+ # multi-camera
28
+ location = (np.inf, np.inf)
29
+
30
+ @property
31
+ def end_frame(self):
32
+ return self.frame_id
33
+
34
+ @staticmethod
35
+ def next_id():
36
+ BaseTrack._count += 1
37
+ return BaseTrack._count
38
+
39
+ def activate(self, *args):
40
+ raise NotImplementedError
41
+
42
+ def predict(self):
43
+ raise NotImplementedError
44
+
45
+ def update(self, *args, **kwargs):
46
+ raise NotImplementedError
47
+
48
+ def mark_lost(self):
49
+ self.state = TrackState.Lost
50
+
51
+ def mark_removed(self):
52
+ self.state = TrackState.Removed
trackers/bytetrack/byte_tracker.py ADDED
@@ -0,0 +1,353 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ from collections import deque
3
+ import os
4
+ import os.path as osp
5
+ import copy
6
+ import torch
7
+ import torch.nn.functional as F
8
+
9
+ from yolov5.utils.general import xywh2xyxy, xyxy2xywh
10
+
11
+
12
+ from trackers.bytetrack.kalman_filter import KalmanFilter
13
+ from trackers.bytetrack import matching
14
+ from trackers.bytetrack.basetrack import BaseTrack, TrackState
15
+
16
+ class STrack(BaseTrack):
17
+ shared_kalman = KalmanFilter()
18
+ def __init__(self, tlwh, score, cls):
19
+
20
+ # wait activate
21
+ self._tlwh = np.asarray(tlwh, dtype=np.float)
22
+ self.kalman_filter = None
23
+ self.mean, self.covariance = None, None
24
+ self.is_activated = False
25
+
26
+ self.score = score
27
+ self.tracklet_len = 0
28
+ self.cls = cls
29
+
30
+ def predict(self):
31
+ mean_state = self.mean.copy()
32
+ if self.state != TrackState.Tracked:
33
+ mean_state[7] = 0
34
+ self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance)
35
+
36
+ @staticmethod
37
+ def multi_predict(stracks):
38
+ if len(stracks) > 0:
39
+ multi_mean = np.asarray([st.mean.copy() for st in stracks])
40
+ multi_covariance = np.asarray([st.covariance for st in stracks])
41
+ for i, st in enumerate(stracks):
42
+ if st.state != TrackState.Tracked:
43
+ multi_mean[i][7] = 0
44
+ multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance)
45
+ for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)):
46
+ stracks[i].mean = mean
47
+ stracks[i].covariance = cov
48
+
49
+ def activate(self, kalman_filter, frame_id):
50
+ """Start a new tracklet"""
51
+ self.kalman_filter = kalman_filter
52
+ self.track_id = self.next_id()
53
+ self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh))
54
+
55
+ self.tracklet_len = 0
56
+ self.state = TrackState.Tracked
57
+ if frame_id == 1:
58
+ self.is_activated = True
59
+ # self.is_activated = True
60
+ self.frame_id = frame_id
61
+ self.start_frame = frame_id
62
+
63
+ def re_activate(self, new_track, frame_id, new_id=False):
64
+ self.mean, self.covariance = self.kalman_filter.update(
65
+ self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh)
66
+ )
67
+ self.tracklet_len = 0
68
+ self.state = TrackState.Tracked
69
+ self.is_activated = True
70
+ self.frame_id = frame_id
71
+ if new_id:
72
+ self.track_id = self.next_id()
73
+ self.score = new_track.score
74
+ self.cls = new_track.cls
75
+
76
+ def update(self, new_track, frame_id):
77
+ """
78
+ Update a matched track
79
+ :type new_track: STrack
80
+ :type frame_id: int
81
+ :type update_feature: bool
82
+ :return:
83
+ """
84
+ self.frame_id = frame_id
85
+ self.tracklet_len += 1
86
+ # self.cls = cls
87
+
88
+ new_tlwh = new_track.tlwh
89
+ self.mean, self.covariance = self.kalman_filter.update(
90
+ self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh))
91
+ self.state = TrackState.Tracked
92
+ self.is_activated = True
93
+
94
+ self.score = new_track.score
95
+
96
+ @property
97
+ # @jit(nopython=True)
98
+ def tlwh(self):
99
+ """Get current position in bounding box format `(top left x, top left y,
100
+ width, height)`.
101
+ """
102
+ if self.mean is None:
103
+ return self._tlwh.copy()
104
+ ret = self.mean[:4].copy()
105
+ ret[2] *= ret[3]
106
+ ret[:2] -= ret[2:] / 2
107
+ return ret
108
+
109
+ @property
110
+ # @jit(nopython=True)
111
+ def tlbr(self):
112
+ """Convert bounding box to format `(min x, min y, max x, max y)`, i.e.,
113
+ `(top left, bottom right)`.
114
+ """
115
+ ret = self.tlwh.copy()
116
+ ret[2:] += ret[:2]
117
+ return ret
118
+
119
+ @staticmethod
120
+ # @jit(nopython=True)
121
+ def tlwh_to_xyah(tlwh):
122
+ """Convert bounding box to format `(center x, center y, aspect ratio,
123
+ height)`, where the aspect ratio is `width / height`.
124
+ """
125
+ ret = np.asarray(tlwh).copy()
126
+ ret[:2] += ret[2:] / 2
127
+ ret[2] /= ret[3]
128
+ return ret
129
+
130
+ def to_xyah(self):
131
+ return self.tlwh_to_xyah(self.tlwh)
132
+
133
+ @staticmethod
134
+ # @jit(nopython=True)
135
+ def tlbr_to_tlwh(tlbr):
136
+ ret = np.asarray(tlbr).copy()
137
+ ret[2:] -= ret[:2]
138
+ return ret
139
+
140
+ @staticmethod
141
+ # @jit(nopython=True)
142
+ def tlwh_to_tlbr(tlwh):
143
+ ret = np.asarray(tlwh).copy()
144
+ ret[2:] += ret[:2]
145
+ return ret
146
+
147
+ def __repr__(self):
148
+ return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame)
149
+
150
+
151
+ class BYTETracker(object):
152
+ def __init__(self, track_thresh=0.45, track_buffer=25, match_thresh=0.8, frame_rate=30):
153
+ self.tracked_stracks = [] # type: list[STrack]
154
+ self.lost_stracks = [] # type: list[STrack]
155
+ self.removed_stracks = [] # type: list[STrack]
156
+
157
+ self.frame_id = 0
158
+ self.track_buffer=track_buffer
159
+
160
+ self.track_thresh = track_thresh
161
+ self.match_thresh = match_thresh
162
+ # self.det_thresh = track_thresh
163
+ self.det_thresh = track_thresh + 0.1
164
+ self.buffer_size = int(frame_rate / 30.0 * track_buffer)
165
+ self.max_time_lost = self.buffer_size
166
+ self.kalman_filter = KalmanFilter()
167
+
168
+ def update(self, dets, _):
169
+ self.frame_id += 1
170
+ activated_starcks = []
171
+ refind_stracks = []
172
+ lost_stracks = []
173
+ removed_stracks = []
174
+
175
+ xyxys = dets[:, 0:4]
176
+ xywh = xyxy2xywh(xyxys)
177
+ confs = dets[:, 4]
178
+ clss = dets[:, 5]
179
+
180
+ classes = clss.numpy()
181
+ xyxys = xyxys.numpy()
182
+ confs = confs.numpy()
183
+
184
+ remain_inds = confs > self.track_thresh
185
+ inds_low = confs > 0.1
186
+ inds_high = confs < self.track_thresh
187
+
188
+ inds_second = np.logical_and(inds_low, inds_high)
189
+
190
+ dets_second = xywh[inds_second]
191
+ dets = xywh[remain_inds]
192
+
193
+ scores_keep = confs[remain_inds]
194
+ scores_second = confs[inds_second]
195
+
196
+ clss_keep = classes[remain_inds]
197
+ clss_second = classes[remain_inds]
198
+
199
+
200
+ if len(dets) > 0:
201
+ '''Detections'''
202
+ detections = [STrack(xyxy, s, c) for (xyxy, s, c) in zip(dets, scores_keep, clss_keep)]
203
+ else:
204
+ detections = []
205
+
206
+ ''' Add newly detected tracklets to tracked_stracks'''
207
+ unconfirmed = []
208
+ tracked_stracks = [] # type: list[STrack]
209
+ for track in self.tracked_stracks:
210
+ if not track.is_activated:
211
+ unconfirmed.append(track)
212
+ else:
213
+ tracked_stracks.append(track)
214
+
215
+ ''' Step 2: First association, with high score detection boxes'''
216
+ strack_pool = joint_stracks(tracked_stracks, self.lost_stracks)
217
+ # Predict the current location with KF
218
+ STrack.multi_predict(strack_pool)
219
+ dists = matching.iou_distance(strack_pool, detections)
220
+ #if not self.args.mot20:
221
+ dists = matching.fuse_score(dists, detections)
222
+ matches, u_track, u_detection = matching.linear_assignment(dists, thresh=self.match_thresh)
223
+
224
+ for itracked, idet in matches:
225
+ track = strack_pool[itracked]
226
+ det = detections[idet]
227
+ if track.state == TrackState.Tracked:
228
+ track.update(detections[idet], self.frame_id)
229
+ activated_starcks.append(track)
230
+ else:
231
+ track.re_activate(det, self.frame_id, new_id=False)
232
+ refind_stracks.append(track)
233
+
234
+ ''' Step 3: Second association, with low score detection boxes'''
235
+ # association the untrack to the low score detections
236
+ if len(dets_second) > 0:
237
+ '''Detections'''
238
+ detections_second = [STrack(xywh, s, c) for (xywh, s, c) in zip(dets_second, scores_second, clss_second)]
239
+ else:
240
+ detections_second = []
241
+ r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked]
242
+ dists = matching.iou_distance(r_tracked_stracks, detections_second)
243
+ matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.5)
244
+ for itracked, idet in matches:
245
+ track = r_tracked_stracks[itracked]
246
+ det = detections_second[idet]
247
+ if track.state == TrackState.Tracked:
248
+ track.update(det, self.frame_id)
249
+ activated_starcks.append(track)
250
+ else:
251
+ track.re_activate(det, self.frame_id, new_id=False)
252
+ refind_stracks.append(track)
253
+
254
+ for it in u_track:
255
+ track = r_tracked_stracks[it]
256
+ if not track.state == TrackState.Lost:
257
+ track.mark_lost()
258
+ lost_stracks.append(track)
259
+
260
+ '''Deal with unconfirmed tracks, usually tracks with only one beginning frame'''
261
+ detections = [detections[i] for i in u_detection]
262
+ dists = matching.iou_distance(unconfirmed, detections)
263
+ #if not self.args.mot20:
264
+ dists = matching.fuse_score(dists, detections)
265
+ matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7)
266
+ for itracked, idet in matches:
267
+ unconfirmed[itracked].update(detections[idet], self.frame_id)
268
+ activated_starcks.append(unconfirmed[itracked])
269
+ for it in u_unconfirmed:
270
+ track = unconfirmed[it]
271
+ track.mark_removed()
272
+ removed_stracks.append(track)
273
+
274
+ """ Step 4: Init new stracks"""
275
+ for inew in u_detection:
276
+ track = detections[inew]
277
+ if track.score < self.det_thresh:
278
+ continue
279
+ track.activate(self.kalman_filter, self.frame_id)
280
+ activated_starcks.append(track)
281
+ """ Step 5: Update state"""
282
+ for track in self.lost_stracks:
283
+ if self.frame_id - track.end_frame > self.max_time_lost:
284
+ track.mark_removed()
285
+ removed_stracks.append(track)
286
+
287
+ # print('Ramained match {} s'.format(t4-t3))
288
+
289
+ self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked]
290
+ self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks)
291
+ self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks)
292
+ self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks)
293
+ self.lost_stracks.extend(lost_stracks)
294
+ self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks)
295
+ self.removed_stracks.extend(removed_stracks)
296
+ self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks)
297
+ # get scores of lost tracks
298
+ output_stracks = [track for track in self.tracked_stracks if track.is_activated]
299
+ outputs = []
300
+ for t in output_stracks:
301
+ output= []
302
+ tlwh = t.tlwh
303
+ tid = t.track_id
304
+ tlwh = np.expand_dims(tlwh, axis=0)
305
+ xyxy = xywh2xyxy(tlwh)
306
+ xyxy = np.squeeze(xyxy, axis=0)
307
+ output.extend(xyxy)
308
+ output.append(tid)
309
+ output.append(t.cls)
310
+ outputs.append(output)
311
+
312
+ return outputs
313
+
314
+
315
+ def joint_stracks(tlista, tlistb):
316
+ exists = {}
317
+ res = []
318
+ for t in tlista:
319
+ exists[t.track_id] = 1
320
+ res.append(t)
321
+ for t in tlistb:
322
+ tid = t.track_id
323
+ if not exists.get(tid, 0):
324
+ exists[tid] = 1
325
+ res.append(t)
326
+ return res
327
+
328
+
329
+ def sub_stracks(tlista, tlistb):
330
+ stracks = {}
331
+ for t in tlista:
332
+ stracks[t.track_id] = t
333
+ for t in tlistb:
334
+ tid = t.track_id
335
+ if stracks.get(tid, 0):
336
+ del stracks[tid]
337
+ return list(stracks.values())
338
+
339
+
340
+ def remove_duplicate_stracks(stracksa, stracksb):
341
+ pdist = matching.iou_distance(stracksa, stracksb)
342
+ pairs = np.where(pdist < 0.15)
343
+ dupa, dupb = list(), list()
344
+ for p, q in zip(*pairs):
345
+ timep = stracksa[p].frame_id - stracksa[p].start_frame
346
+ timeq = stracksb[q].frame_id - stracksb[q].start_frame
347
+ if timep > timeq:
348
+ dupb.append(q)
349
+ else:
350
+ dupa.append(p)
351
+ resa = [t for i, t in enumerate(stracksa) if not i in dupa]
352
+ resb = [t for i, t in enumerate(stracksb) if not i in dupb]
353
+ return resa, resb
trackers/bytetrack/kalman_filter.py ADDED
@@ -0,0 +1,270 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # vim: expandtab:ts=4:sw=4
2
+ import numpy as np
3
+ import scipy.linalg
4
+
5
+
6
+ """
7
+ Table for the 0.95 quantile of the chi-square distribution with N degrees of
8
+ freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv
9
+ function and used as Mahalanobis gating threshold.
10
+ """
11
+ chi2inv95 = {
12
+ 1: 3.8415,
13
+ 2: 5.9915,
14
+ 3: 7.8147,
15
+ 4: 9.4877,
16
+ 5: 11.070,
17
+ 6: 12.592,
18
+ 7: 14.067,
19
+ 8: 15.507,
20
+ 9: 16.919}
21
+
22
+
23
+ class KalmanFilter(object):
24
+ """
25
+ A simple Kalman filter for tracking bounding boxes in image space.
26
+
27
+ The 8-dimensional state space
28
+
29
+ x, y, a, h, vx, vy, va, vh
30
+
31
+ contains the bounding box center position (x, y), aspect ratio a, height h,
32
+ and their respective velocities.
33
+
34
+ Object motion follows a constant velocity model. The bounding box location
35
+ (x, y, a, h) is taken as direct observation of the state space (linear
36
+ observation model).
37
+
38
+ """
39
+
40
+ def __init__(self):
41
+ ndim, dt = 4, 1.
42
+
43
+ # Create Kalman filter model matrices.
44
+ self._motion_mat = np.eye(2 * ndim, 2 * ndim)
45
+ for i in range(ndim):
46
+ self._motion_mat[i, ndim + i] = dt
47
+ self._update_mat = np.eye(ndim, 2 * ndim)
48
+
49
+ # Motion and observation uncertainty are chosen relative to the current
50
+ # state estimate. These weights control the amount of uncertainty in
51
+ # the model. This is a bit hacky.
52
+ self._std_weight_position = 1. / 20
53
+ self._std_weight_velocity = 1. / 160
54
+
55
+ def initiate(self, measurement):
56
+ """Create track from unassociated measurement.
57
+
58
+ Parameters
59
+ ----------
60
+ measurement : ndarray
61
+ Bounding box coordinates (x, y, a, h) with center position (x, y),
62
+ aspect ratio a, and height h.
63
+
64
+ Returns
65
+ -------
66
+ (ndarray, ndarray)
67
+ Returns the mean vector (8 dimensional) and covariance matrix (8x8
68
+ dimensional) of the new track. Unobserved velocities are initialized
69
+ to 0 mean.
70
+
71
+ """
72
+ mean_pos = measurement
73
+ mean_vel = np.zeros_like(mean_pos)
74
+ mean = np.r_[mean_pos, mean_vel]
75
+
76
+ std = [
77
+ 2 * self._std_weight_position * measurement[3],
78
+ 2 * self._std_weight_position * measurement[3],
79
+ 1e-2,
80
+ 2 * self._std_weight_position * measurement[3],
81
+ 10 * self._std_weight_velocity * measurement[3],
82
+ 10 * self._std_weight_velocity * measurement[3],
83
+ 1e-5,
84
+ 10 * self._std_weight_velocity * measurement[3]]
85
+ covariance = np.diag(np.square(std))
86
+ return mean, covariance
87
+
88
+ def predict(self, mean, covariance):
89
+ """Run Kalman filter prediction step.
90
+
91
+ Parameters
92
+ ----------
93
+ mean : ndarray
94
+ The 8 dimensional mean vector of the object state at the previous
95
+ time step.
96
+ covariance : ndarray
97
+ The 8x8 dimensional covariance matrix of the object state at the
98
+ previous time step.
99
+
100
+ Returns
101
+ -------
102
+ (ndarray, ndarray)
103
+ Returns the mean vector and covariance matrix of the predicted
104
+ state. Unobserved velocities are initialized to 0 mean.
105
+
106
+ """
107
+ std_pos = [
108
+ self._std_weight_position * mean[3],
109
+ self._std_weight_position * mean[3],
110
+ 1e-2,
111
+ self._std_weight_position * mean[3]]
112
+ std_vel = [
113
+ self._std_weight_velocity * mean[3],
114
+ self._std_weight_velocity * mean[3],
115
+ 1e-5,
116
+ self._std_weight_velocity * mean[3]]
117
+ motion_cov = np.diag(np.square(np.r_[std_pos, std_vel]))
118
+
119
+ #mean = np.dot(self._motion_mat, mean)
120
+ mean = np.dot(mean, self._motion_mat.T)
121
+ covariance = np.linalg.multi_dot((
122
+ self._motion_mat, covariance, self._motion_mat.T)) + motion_cov
123
+
124
+ return mean, covariance
125
+
126
+ def project(self, mean, covariance):
127
+ """Project state distribution to measurement space.
128
+
129
+ Parameters
130
+ ----------
131
+ mean : ndarray
132
+ The state's mean vector (8 dimensional array).
133
+ covariance : ndarray
134
+ The state's covariance matrix (8x8 dimensional).
135
+
136
+ Returns
137
+ -------
138
+ (ndarray, ndarray)
139
+ Returns the projected mean and covariance matrix of the given state
140
+ estimate.
141
+
142
+ """
143
+ std = [
144
+ self._std_weight_position * mean[3],
145
+ self._std_weight_position * mean[3],
146
+ 1e-1,
147
+ self._std_weight_position * mean[3]]
148
+ innovation_cov = np.diag(np.square(std))
149
+
150
+ mean = np.dot(self._update_mat, mean)
151
+ covariance = np.linalg.multi_dot((
152
+ self._update_mat, covariance, self._update_mat.T))
153
+ return mean, covariance + innovation_cov
154
+
155
+ def multi_predict(self, mean, covariance):
156
+ """Run Kalman filter prediction step (Vectorized version).
157
+ Parameters
158
+ ----------
159
+ mean : ndarray
160
+ The Nx8 dimensional mean matrix of the object states at the previous
161
+ time step.
162
+ covariance : ndarray
163
+ The Nx8x8 dimensional covariance matrics of the object states at the
164
+ previous time step.
165
+ Returns
166
+ -------
167
+ (ndarray, ndarray)
168
+ Returns the mean vector and covariance matrix of the predicted
169
+ state. Unobserved velocities are initialized to 0 mean.
170
+ """
171
+ std_pos = [
172
+ self._std_weight_position * mean[:, 3],
173
+ self._std_weight_position * mean[:, 3],
174
+ 1e-2 * np.ones_like(mean[:, 3]),
175
+ self._std_weight_position * mean[:, 3]]
176
+ std_vel = [
177
+ self._std_weight_velocity * mean[:, 3],
178
+ self._std_weight_velocity * mean[:, 3],
179
+ 1e-5 * np.ones_like(mean[:, 3]),
180
+ self._std_weight_velocity * mean[:, 3]]
181
+ sqr = np.square(np.r_[std_pos, std_vel]).T
182
+
183
+ motion_cov = []
184
+ for i in range(len(mean)):
185
+ motion_cov.append(np.diag(sqr[i]))
186
+ motion_cov = np.asarray(motion_cov)
187
+
188
+ mean = np.dot(mean, self._motion_mat.T)
189
+ left = np.dot(self._motion_mat, covariance).transpose((1, 0, 2))
190
+ covariance = np.dot(left, self._motion_mat.T) + motion_cov
191
+
192
+ return mean, covariance
193
+
194
+ def update(self, mean, covariance, measurement):
195
+ """Run Kalman filter correction step.
196
+
197
+ Parameters
198
+ ----------
199
+ mean : ndarray
200
+ The predicted state's mean vector (8 dimensional).
201
+ covariance : ndarray
202
+ The state's covariance matrix (8x8 dimensional).
203
+ measurement : ndarray
204
+ The 4 dimensional measurement vector (x, y, a, h), where (x, y)
205
+ is the center position, a the aspect ratio, and h the height of the
206
+ bounding box.
207
+
208
+ Returns
209
+ -------
210
+ (ndarray, ndarray)
211
+ Returns the measurement-corrected state distribution.
212
+
213
+ """
214
+ projected_mean, projected_cov = self.project(mean, covariance)
215
+
216
+ chol_factor, lower = scipy.linalg.cho_factor(
217
+ projected_cov, lower=True, check_finite=False)
218
+ kalman_gain = scipy.linalg.cho_solve(
219
+ (chol_factor, lower), np.dot(covariance, self._update_mat.T).T,
220
+ check_finite=False).T
221
+ innovation = measurement - projected_mean
222
+
223
+ new_mean = mean + np.dot(innovation, kalman_gain.T)
224
+ new_covariance = covariance - np.linalg.multi_dot((
225
+ kalman_gain, projected_cov, kalman_gain.T))
226
+ return new_mean, new_covariance
227
+
228
+ def gating_distance(self, mean, covariance, measurements,
229
+ only_position=False, metric='maha'):
230
+ """Compute gating distance between state distribution and measurements.
231
+ A suitable distance threshold can be obtained from `chi2inv95`. If
232
+ `only_position` is False, the chi-square distribution has 4 degrees of
233
+ freedom, otherwise 2.
234
+ Parameters
235
+ ----------
236
+ mean : ndarray
237
+ Mean vector over the state distribution (8 dimensional).
238
+ covariance : ndarray
239
+ Covariance of the state distribution (8x8 dimensional).
240
+ measurements : ndarray
241
+ An Nx4 dimensional matrix of N measurements, each in
242
+ format (x, y, a, h) where (x, y) is the bounding box center
243
+ position, a the aspect ratio, and h the height.
244
+ only_position : Optional[bool]
245
+ If True, distance computation is done with respect to the bounding
246
+ box center position only.
247
+ Returns
248
+ -------
249
+ ndarray
250
+ Returns an array of length N, where the i-th element contains the
251
+ squared Mahalanobis distance between (mean, covariance) and
252
+ `measurements[i]`.
253
+ """
254
+ mean, covariance = self.project(mean, covariance)
255
+ if only_position:
256
+ mean, covariance = mean[:2], covariance[:2, :2]
257
+ measurements = measurements[:, :2]
258
+
259
+ d = measurements - mean
260
+ if metric == 'gaussian':
261
+ return np.sum(d * d, axis=1)
262
+ elif metric == 'maha':
263
+ cholesky_factor = np.linalg.cholesky(covariance)
264
+ z = scipy.linalg.solve_triangular(
265
+ cholesky_factor, d.T, lower=True, check_finite=False,
266
+ overwrite_b=True)
267
+ squared_maha = np.sum(z * z, axis=0)
268
+ return squared_maha
269
+ else:
270
+ raise ValueError('invalid distance metric')
trackers/bytetrack/matching.py ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cv2
2
+ import numpy as np
3
+ import scipy
4
+ import lap
5
+ from scipy.spatial.distance import cdist
6
+ from cython_bbox import bbox_overlaps as bbox_ious
7
+ from trackers.bytetrack import kalman_filter
8
+ import time
9
+
10
+ def merge_matches(m1, m2, shape):
11
+ O,P,Q = shape
12
+ m1 = np.asarray(m1)
13
+ m2 = np.asarray(m2)
14
+
15
+ M1 = scipy.sparse.coo_matrix((np.ones(len(m1)), (m1[:, 0], m1[:, 1])), shape=(O, P))
16
+ M2 = scipy.sparse.coo_matrix((np.ones(len(m2)), (m2[:, 0], m2[:, 1])), shape=(P, Q))
17
+
18
+ mask = M1*M2
19
+ match = mask.nonzero()
20
+ match = list(zip(match[0], match[1]))
21
+ unmatched_O = tuple(set(range(O)) - set([i for i, j in match]))
22
+ unmatched_Q = tuple(set(range(Q)) - set([j for i, j in match]))
23
+
24
+ return match, unmatched_O, unmatched_Q
25
+
26
+
27
+ def _indices_to_matches(cost_matrix, indices, thresh):
28
+ matched_cost = cost_matrix[tuple(zip(*indices))]
29
+ matched_mask = (matched_cost <= thresh)
30
+
31
+ matches = indices[matched_mask]
32
+ unmatched_a = tuple(set(range(cost_matrix.shape[0])) - set(matches[:, 0]))
33
+ unmatched_b = tuple(set(range(cost_matrix.shape[1])) - set(matches[:, 1]))
34
+
35
+ return matches, unmatched_a, unmatched_b
36
+
37
+
38
+ def linear_assignment(cost_matrix, thresh):
39
+ if cost_matrix.size == 0:
40
+ return np.empty((0, 2), dtype=int), tuple(range(cost_matrix.shape[0])), tuple(range(cost_matrix.shape[1]))
41
+ matches, unmatched_a, unmatched_b = [], [], []
42
+ cost, x, y = lap.lapjv(cost_matrix, extend_cost=True, cost_limit=thresh)
43
+ for ix, mx in enumerate(x):
44
+ if mx >= 0:
45
+ matches.append([ix, mx])
46
+ unmatched_a = np.where(x < 0)[0]
47
+ unmatched_b = np.where(y < 0)[0]
48
+ matches = np.asarray(matches)
49
+ return matches, unmatched_a, unmatched_b
50
+
51
+
52
+ def ious(atlbrs, btlbrs):
53
+ """
54
+ Compute cost based on IoU
55
+ :type atlbrs: list[tlbr] | np.ndarray
56
+ :type atlbrs: list[tlbr] | np.ndarray
57
+
58
+ :rtype ious np.ndarray
59
+ """
60
+ ious = np.zeros((len(atlbrs), len(btlbrs)), dtype=np.float64)
61
+ if ious.size == 0:
62
+ return ious
63
+
64
+ ious = bbox_ious(
65
+ np.ascontiguousarray(atlbrs, dtype=np.float64),
66
+ np.ascontiguousarray(btlbrs, dtype=np.float64)
67
+ )
68
+
69
+ return ious
70
+
71
+
72
+ def iou_distance(atracks, btracks):
73
+ """
74
+ Compute cost based on IoU
75
+ :type atracks: list[STrack]
76
+ :type btracks: list[STrack]
77
+
78
+ :rtype cost_matrix np.ndarray
79
+ """
80
+
81
+ if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)):
82
+ atlbrs = atracks
83
+ btlbrs = btracks
84
+ else:
85
+ atlbrs = [track.tlbr for track in atracks]
86
+ btlbrs = [track.tlbr for track in btracks]
87
+ _ious = ious(atlbrs, btlbrs)
88
+ cost_matrix = 1 - _ious
89
+
90
+ return cost_matrix
91
+
92
+ def v_iou_distance(atracks, btracks):
93
+ """
94
+ Compute cost based on IoU
95
+ :type atracks: list[STrack]
96
+ :type btracks: list[STrack]
97
+
98
+ :rtype cost_matrix np.ndarray
99
+ """
100
+
101
+ if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)):
102
+ atlbrs = atracks
103
+ btlbrs = btracks
104
+ else:
105
+ atlbrs = [track.tlwh_to_tlbr(track.pred_bbox) for track in atracks]
106
+ btlbrs = [track.tlwh_to_tlbr(track.pred_bbox) for track in btracks]
107
+ _ious = ious(atlbrs, btlbrs)
108
+ cost_matrix = 1 - _ious
109
+
110
+ return cost_matrix
111
+
112
+ def embedding_distance(tracks, detections, metric='cosine'):
113
+ """
114
+ :param tracks: list[STrack]
115
+ :param detections: list[BaseTrack]
116
+ :param metric:
117
+ :return: cost_matrix np.ndarray
118
+ """
119
+
120
+ cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float64)
121
+ if cost_matrix.size == 0:
122
+ return cost_matrix
123
+ det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float64)
124
+ #for i, track in enumerate(tracks):
125
+ #cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric))
126
+ track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float64)
127
+ cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features
128
+ return cost_matrix
129
+
130
+
131
+ def gate_cost_matrix(kf, cost_matrix, tracks, detections, only_position=False):
132
+ if cost_matrix.size == 0:
133
+ return cost_matrix
134
+ gating_dim = 2 if only_position else 4
135
+ gating_threshold = kalman_filter.chi2inv95[gating_dim]
136
+ measurements = np.asarray([det.to_xyah() for det in detections])
137
+ for row, track in enumerate(tracks):
138
+ gating_distance = kf.gating_distance(
139
+ track.mean, track.covariance, measurements, only_position)
140
+ cost_matrix[row, gating_distance > gating_threshold] = np.inf
141
+ return cost_matrix
142
+
143
+
144
+ def fuse_motion(kf, cost_matrix, tracks, detections, only_position=False, lambda_=0.98):
145
+ if cost_matrix.size == 0:
146
+ return cost_matrix
147
+ gating_dim = 2 if only_position else 4
148
+ gating_threshold = kalman_filter.chi2inv95[gating_dim]
149
+ measurements = np.asarray([det.to_xyah() for det in detections])
150
+ for row, track in enumerate(tracks):
151
+ gating_distance = kf.gating_distance(
152
+ track.mean, track.covariance, measurements, only_position, metric='maha')
153
+ cost_matrix[row, gating_distance > gating_threshold] = np.inf
154
+ cost_matrix[row] = lambda_ * cost_matrix[row] + (1 - lambda_) * gating_distance
155
+ return cost_matrix
156
+
157
+
158
+ def fuse_iou(cost_matrix, tracks, detections):
159
+ if cost_matrix.size == 0:
160
+ return cost_matrix
161
+ reid_sim = 1 - cost_matrix
162
+ iou_dist = iou_distance(tracks, detections)
163
+ iou_sim = 1 - iou_dist
164
+ fuse_sim = reid_sim * (1 + iou_sim) / 2
165
+ det_scores = np.array([det.score for det in detections])
166
+ det_scores = np.expand_dims(det_scores, axis=0).repeat(cost_matrix.shape[0], axis=0)
167
+ #fuse_sim = fuse_sim * (1 + det_scores) / 2
168
+ fuse_cost = 1 - fuse_sim
169
+ return fuse_cost
170
+
171
+
172
+ def fuse_score(cost_matrix, detections):
173
+ if cost_matrix.size == 0:
174
+ return cost_matrix
175
+ iou_sim = 1 - cost_matrix
176
+ det_scores = np.array([det.score for det in detections])
177
+ det_scores = np.expand_dims(det_scores, axis=0).repeat(cost_matrix.shape[0], axis=0)
178
+ fuse_sim = iou_sim * det_scores
179
+ fuse_cost = 1 - fuse_sim
180
+ return fuse_cost
trackers/multi_tracker_zoo.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from trackers.strong_sort.utils.parser import get_config
2
+ from trackers.strong_sort.strong_sort import StrongSORT
3
+ from trackers.ocsort.ocsort import OCSort
4
+ from trackers.bytetrack.byte_tracker import BYTETracker
5
+
6
+
7
+ def create_tracker(tracker_type, appearance_descriptor_weights, device, half):
8
+ if tracker_type == 'strongsort':
9
+ # initialize StrongSORT
10
+ cfg = get_config()
11
+ cfg.merge_from_file('trackers/strong_sort/configs/strong_sort.yaml')
12
+
13
+ strongsort = StrongSORT(
14
+ appearance_descriptor_weights,
15
+ device,
16
+ half,
17
+ max_dist=cfg.STRONGSORT.MAX_DIST,
18
+ max_iou_distance=cfg.STRONGSORT.MAX_IOU_DISTANCE,
19
+ max_age=cfg.STRONGSORT.MAX_AGE,
20
+ n_init=cfg.STRONGSORT.N_INIT,
21
+ nn_budget=cfg.STRONGSORT.NN_BUDGET,
22
+ mc_lambda=cfg.STRONGSORT.MC_LAMBDA,
23
+ ema_alpha=cfg.STRONGSORT.EMA_ALPHA,
24
+
25
+ )
26
+ return strongsort
27
+ elif tracker_type == 'ocsort':
28
+ ocsort = OCSort(
29
+ det_thresh=0.45,
30
+ iou_threshold=0.2,
31
+ use_byte=False
32
+ )
33
+ return ocsort
34
+ elif tracker_type == 'bytetrack':
35
+ bytetracker = BYTETracker(
36
+ track_thresh=0.6,
37
+ track_buffer=30,
38
+ match_thresh=0.8,
39
+ frame_rate=30
40
+ )
41
+ return bytetracker
42
+ else:
43
+ print('No such tracker')
44
+ exit()
trackers/ocsort/__pycache__/association.cpython-38.pyc ADDED
Binary file (9.06 kB). View file