milselarch
commited on
Commit
•
3a3c68a
1
Parent(s):
b4d2b5d
Upload folder using huggingface_hub
Browse files- .gitignore +9 -0
- .idea/.gitignore +8 -0
- .idea/inspectionProfiles/Project_Default.xml +120 -0
- .idea/inspectionProfiles/profiles_settings.xml +6 -0
- .idea/lipread-project.iml +10 -0
- .idea/misc.xml +7 -0
- .idea/modules.xml +8 -0
- .idea/vcs.xml +6 -0
- .idea/workspace.xml +81 -0
- .ipynb_checkpoints/LipNet-checkpoint.ipynb +1220 -0
- .ipynb_checkpoints/train-checkpoint.py +44 -0
- LipNet.ipynb +0 -0
- Loader.py +74 -0
- README.md +5 -0
- __pycache__/Loader.cpython-39.pyc +0 -0
- __pycache__/helpers.cpython-39.pyc +0 -0
- __pycache__/model.cpython-39.pyc +0 -0
- app/__pycache__/modelutil.cpython-39.pyc +0 -0
- app/__pycache__/utils.cpython-39.pyc +0 -0
- app/animation.gif +0 -0
- app/modelutil.py +32 -0
- app/streamlitapp.py +57 -0
- app/test_video.mp4 +0 -0
- app/utils.py +48 -0
- check_videos.py +21 -0
- config.example.yml +4 -0
- corrupted.txt +107 -0
- data.zip +3 -0
- data/s1/Thumbs.db +0 -0
- helpers.py +84 -0
- model.py +96 -0
- move_videos.sh +19 -0
- requirements.txt +21 -0
- train.py +72 -0
- upload.py +11 -0
.gitignore
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
**/*.zip
|
2 |
+
**/*.mpg
|
3 |
+
**/*.align
|
4 |
+
**/__MACOSX
|
5 |
+
GRID-dataset.zip
|
6 |
+
GRID-dataset/**
|
7 |
+
models/**
|
8 |
+
venv/**
|
9 |
+
config.yml
|
.idea/.gitignore
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Default ignored files
|
2 |
+
/shelf/
|
3 |
+
/workspace.xml
|
4 |
+
# Editor-based HTTP Client requests
|
5 |
+
/httpRequests/
|
6 |
+
# Datasource local storage ignored files
|
7 |
+
/dataSources/
|
8 |
+
/dataSources.local.xml
|
.idea/inspectionProfiles/Project_Default.xml
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<component name="InspectionProjectProfileManager">
|
2 |
+
<profile version="1.0">
|
3 |
+
<option name="myName" value="Project Default" />
|
4 |
+
<inspection_tool class="DuplicatedCode" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
5 |
+
<Languages>
|
6 |
+
<language minSize="821" name="Python" />
|
7 |
+
</Languages>
|
8 |
+
</inspection_tool>
|
9 |
+
<inspection_tool class="Eslint" enabled="true" level="WARNING" enabled_by_default="true" />
|
10 |
+
<inspection_tool class="PyCompatibilityInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
11 |
+
<option name="ourVersions">
|
12 |
+
<value>
|
13 |
+
<list size="6">
|
14 |
+
<item index="0" class="java.lang.String" itemvalue="3.6" />
|
15 |
+
<item index="1" class="java.lang.String" itemvalue="3.7" />
|
16 |
+
<item index="2" class="java.lang.String" itemvalue="3.8" />
|
17 |
+
<item index="3" class="java.lang.String" itemvalue="3.9" />
|
18 |
+
<item index="4" class="java.lang.String" itemvalue="3.10" />
|
19 |
+
<item index="5" class="java.lang.String" itemvalue="3.11" />
|
20 |
+
</list>
|
21 |
+
</value>
|
22 |
+
</option>
|
23 |
+
</inspection_tool>
|
24 |
+
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
25 |
+
<option name="ignoredPackages">
|
26 |
+
<value>
|
27 |
+
<list size="41">
|
28 |
+
<item index="0" class="java.lang.String" itemvalue="torch" />
|
29 |
+
<item index="1" class="java.lang.String" itemvalue="pip" />
|
30 |
+
<item index="2" class="java.lang.String" itemvalue="telegram" />
|
31 |
+
<item index="3" class="java.lang.String" itemvalue="tulipy" />
|
32 |
+
<item index="4" class="java.lang.String" itemvalue="ipywidgets" />
|
33 |
+
<item index="5" class="java.lang.String" itemvalue="binance" />
|
34 |
+
<item index="6" class="java.lang.String" itemvalue="numpy" />
|
35 |
+
<item index="7" class="java.lang.String" itemvalue="tzlocal" />
|
36 |
+
<item index="8" class="java.lang.String" itemvalue="pygame" />
|
37 |
+
<item index="9" class="java.lang.String" itemvalue="joblib" />
|
38 |
+
<item index="10" class="java.lang.String" itemvalue="scikit-learn" />
|
39 |
+
<item index="11" class="java.lang.String" itemvalue="cython" />
|
40 |
+
<item index="12" class="java.lang.String" itemvalue="sklearn" />
|
41 |
+
<item index="13" class="java.lang.String" itemvalue="setuptools" />
|
42 |
+
<item index="14" class="java.lang.String" itemvalue="requests" />
|
43 |
+
<item index="15" class="java.lang.String" itemvalue="tsflex" />
|
44 |
+
<item index="16" class="java.lang.String" itemvalue="tensorflow" />
|
45 |
+
<item index="17" class="java.lang.String" itemvalue="jupyter" />
|
46 |
+
<item index="18" class="java.lang.String" itemvalue="seaborn" />
|
47 |
+
<item index="19" class="java.lang.String" itemvalue="pymysql" />
|
48 |
+
<item index="20" class="java.lang.String" itemvalue="polars" />
|
49 |
+
<item index="21" class="java.lang.String" itemvalue="python-binance" />
|
50 |
+
<item index="22" class="java.lang.String" itemvalue="websockets" />
|
51 |
+
<item index="23" class="java.lang.String" itemvalue="pyarrow" />
|
52 |
+
<item index="24" class="java.lang.String" itemvalue="scipy" />
|
53 |
+
<item index="25" class="java.lang.String" itemvalue="six" />
|
54 |
+
<item index="26" class="java.lang.String" itemvalue="cryptography" />
|
55 |
+
<item index="27" class="java.lang.String" itemvalue="ipython" />
|
56 |
+
<item index="28" class="java.lang.String" itemvalue="dill" />
|
57 |
+
<item index="29" class="java.lang.String" itemvalue="overrides" />
|
58 |
+
<item index="30" class="java.lang.String" itemvalue="toml" />
|
59 |
+
<item index="31" class="java.lang.String" itemvalue="python-telegram-bot" />
|
60 |
+
<item index="32" class="java.lang.String" itemvalue="click" />
|
61 |
+
<item index="33" class="java.lang.String" itemvalue="sparselinear" />
|
62 |
+
<item index="34" class="java.lang.String" itemvalue="seglearn" />
|
63 |
+
<item index="35" class="java.lang.String" itemvalue="pandas" />
|
64 |
+
<item index="36" class="java.lang.String" itemvalue="tqdm" />
|
65 |
+
<item index="37" class="java.lang.String" itemvalue="matplotlib" />
|
66 |
+
<item index="38" class="java.lang.String" itemvalue="frozendict" />
|
67 |
+
<item index="39" class="java.lang.String" itemvalue="lightgbm" />
|
68 |
+
<item index="40" class="java.lang.String" itemvalue="simpleeval" />
|
69 |
+
</list>
|
70 |
+
</value>
|
71 |
+
</option>
|
72 |
+
</inspection_tool>
|
73 |
+
<inspection_tool class="PyPep8Inspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
74 |
+
<option name="ignoredErrors">
|
75 |
+
<list>
|
76 |
+
<option value="E701" />
|
77 |
+
<option value="W292" />
|
78 |
+
<option value="E731" />
|
79 |
+
<option value="E402" />
|
80 |
+
<option value="E712" />
|
81 |
+
<option value="E128" />
|
82 |
+
<option value="E262" />
|
83 |
+
<option value="W605" />
|
84 |
+
</list>
|
85 |
+
</option>
|
86 |
+
</inspection_tool>
|
87 |
+
<inspection_tool class="PyPep8NamingInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
88 |
+
<option name="ignoredErrors">
|
89 |
+
<list>
|
90 |
+
<option value="N803" />
|
91 |
+
<option value="N806" />
|
92 |
+
<option value="N801" />
|
93 |
+
<option value="N802" />
|
94 |
+
<option value="N812" />
|
95 |
+
</list>
|
96 |
+
</option>
|
97 |
+
</inspection_tool>
|
98 |
+
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
99 |
+
<option name="ignoredIdentifiers">
|
100 |
+
<list>
|
101 |
+
<option value="Cython.Includes.libcpp.vector.vector.*" />
|
102 |
+
<option value="History.PriceHistory.*" />
|
103 |
+
<option value="Indicators.SMA.*" />
|
104 |
+
<option value="Datasets.*" />
|
105 |
+
<option value="deployments.binance.talib_test.ALGTR" />
|
106 |
+
<option value="deployments.binance.AlertsBot.*" />
|
107 |
+
<option value="History.ALGTR" />
|
108 |
+
<option value="deployments.binance.trainer.*" />
|
109 |
+
<option value="deployments.binance.Analyzer.ALGTR" />
|
110 |
+
<option value="deployments.binance.loader.*" />
|
111 |
+
<option value="deployments.binance.strided_loader.*" />
|
112 |
+
<option value="deployments.binance.BinancePuller.*" />
|
113 |
+
<option value="pullers.database_v2.*" />
|
114 |
+
<option value="pullers.Ncache.*" />
|
115 |
+
<option value="deployments.binance.tests.test_positions.Position" />
|
116 |
+
</list>
|
117 |
+
</option>
|
118 |
+
</inspection_tool>
|
119 |
+
</profile>
|
120 |
+
</component>
|
.idea/inspectionProfiles/profiles_settings.xml
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<component name="InspectionProjectProfileManager">
|
2 |
+
<settings>
|
3 |
+
<option name="USE_PROJECT_PROFILE" value="false" />
|
4 |
+
<version value="1.0" />
|
5 |
+
</settings>
|
6 |
+
</component>
|
.idea/lipread-project.iml
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<module type="PYTHON_MODULE" version="4">
|
3 |
+
<component name="NewModuleRootManager">
|
4 |
+
<content url="file://$MODULE_DIR$">
|
5 |
+
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
6 |
+
</content>
|
7 |
+
<orderEntry type="inheritedJdk" />
|
8 |
+
<orderEntry type="sourceFolder" forTests="false" />
|
9 |
+
</component>
|
10 |
+
</module>
|
.idea/misc.xml
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<project version="4">
|
3 |
+
<component name="Black">
|
4 |
+
<option name="sdkName" value="Python 3.9 (lipread-project)" />
|
5 |
+
</component>
|
6 |
+
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (lipread-project)" project-jdk-type="Python SDK" />
|
7 |
+
</project>
|
.idea/modules.xml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<project version="4">
|
3 |
+
<component name="ProjectModuleManager">
|
4 |
+
<modules>
|
5 |
+
<module fileurl="file://$PROJECT_DIR$/.idea/lipread-project.iml" filepath="$PROJECT_DIR$/.idea/lipread-project.iml" />
|
6 |
+
</modules>
|
7 |
+
</component>
|
8 |
+
</project>
|
.idea/vcs.xml
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<project version="4">
|
3 |
+
<component name="VcsDirectoryMappings">
|
4 |
+
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
5 |
+
</component>
|
6 |
+
</project>
|
.idea/workspace.xml
ADDED
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<project version="4">
|
3 |
+
<component name="AutoImportSettings">
|
4 |
+
<option name="autoReloadType" value="SELECTIVE" />
|
5 |
+
</component>
|
6 |
+
<component name="ChangeListManager">
|
7 |
+
<list default="true" id="98e215cf-e237-4c1b-a854-08c5c8eeb9db" name="Changes" comment="">
|
8 |
+
<change afterPath="$PROJECT_DIR$/check_videos.py" afterDir="false" />
|
9 |
+
<change afterPath="$PROJECT_DIR$/model.py" afterDir="false" />
|
10 |
+
<change afterPath="$PROJECT_DIR$/upload.py" afterDir="false" />
|
11 |
+
<change beforePath="$PROJECT_DIR$/.gitignore" beforeDir="false" afterPath="$PROJECT_DIR$/.gitignore" afterDir="false" />
|
12 |
+
<change beforePath="$PROJECT_DIR$/Loader.py" beforeDir="false" afterPath="$PROJECT_DIR$/Loader.py" afterDir="false" />
|
13 |
+
<change beforePath="$PROJECT_DIR$/helpers.py" beforeDir="false" afterPath="$PROJECT_DIR$/helpers.py" afterDir="false" />
|
14 |
+
<change beforePath="$PROJECT_DIR$/train.py" beforeDir="false" afterPath="$PROJECT_DIR$/train.py" afterDir="false" />
|
15 |
+
</list>
|
16 |
+
<option name="SHOW_DIALOG" value="false" />
|
17 |
+
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
18 |
+
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
19 |
+
<option name="LAST_RESOLUTION" value="IGNORE" />
|
20 |
+
</component>
|
21 |
+
<component name="FileTemplateManagerImpl">
|
22 |
+
<option name="RECENT_TEMPLATES">
|
23 |
+
<list>
|
24 |
+
<option value="Python Script" />
|
25 |
+
</list>
|
26 |
+
</option>
|
27 |
+
</component>
|
28 |
+
<component name="Git.Settings">
|
29 |
+
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
|
30 |
+
</component>
|
31 |
+
<component name="MacroExpansionManager">
|
32 |
+
<option name="directoryName" value="gkmeikw4" />
|
33 |
+
</component>
|
34 |
+
<component name="MarkdownSettingsMigration">
|
35 |
+
<option name="stateVersion" value="1" />
|
36 |
+
</component>
|
37 |
+
<component name="ProjectColorInfo">{
|
38 |
+
"customColor": "",
|
39 |
+
"associatedIndex": 5
|
40 |
+
}</component>
|
41 |
+
<component name="ProjectId" id="2XNeQDPHCdxMAOazMnZlDrhn0pM" />
|
42 |
+
<component name="ProjectViewState">
|
43 |
+
<option name="hideEmptyMiddlePackages" value="true" />
|
44 |
+
<option name="showLibraryContents" value="true" />
|
45 |
+
</component>
|
46 |
+
<component name="PropertiesComponent">{
|
47 |
+
"keyToString": {
|
48 |
+
"RunOnceActivity.OpenProjectViewOnStart": "true",
|
49 |
+
"RunOnceActivity.ShowReadmeOnStart": "true",
|
50 |
+
"WebServerToolWindowFactoryState": "false",
|
51 |
+
"last_opened_file_path": "/home/milselarch/projects/SUTD/50-035/lipread-project",
|
52 |
+
"node.js.detected.package.eslint": "true",
|
53 |
+
"node.js.detected.package.tslint": "true",
|
54 |
+
"node.js.selected.package.eslint": "(autodetect)",
|
55 |
+
"node.js.selected.package.tslint": "(autodetect)",
|
56 |
+
"vue.rearranger.settings.migration": "true"
|
57 |
+
}
|
58 |
+
}</component>
|
59 |
+
<component name="RecentsManager">
|
60 |
+
<key name="CopyFile.RECENT_KEYS">
|
61 |
+
<recent name="$PROJECT_DIR$" />
|
62 |
+
</key>
|
63 |
+
</component>
|
64 |
+
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
|
65 |
+
<component name="TaskManager">
|
66 |
+
<task active="true" id="Default" summary="Default task">
|
67 |
+
<changelist id="98e215cf-e237-4c1b-a854-08c5c8eeb9db" name="Changes" comment="" />
|
68 |
+
<created>1698473948341</created>
|
69 |
+
<option name="number" value="Default" />
|
70 |
+
<option name="presentableId" value="Default" />
|
71 |
+
<updated>1698473948341</updated>
|
72 |
+
<workItem from="1698473950406" duration="11154000" />
|
73 |
+
<workItem from="1698545246834" duration="6111000" />
|
74 |
+
<workItem from="1698579018554" duration="11954000" />
|
75 |
+
</task>
|
76 |
+
<servers />
|
77 |
+
</component>
|
78 |
+
<component name="TypeScriptGeneratedFilesManager">
|
79 |
+
<option name="version" value="3" />
|
80 |
+
</component>
|
81 |
+
</project>
|
.ipynb_checkpoints/LipNet-checkpoint.ipynb
ADDED
@@ -0,0 +1,1220 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"id": "a3573a47-3689-4668-b62f-5c8451b2b4e9",
|
6 |
+
"metadata": {
|
7 |
+
"tags": []
|
8 |
+
},
|
9 |
+
"source": [
|
10 |
+
"# 0. Install and Import Dependencies"
|
11 |
+
]
|
12 |
+
},
|
13 |
+
{
|
14 |
+
"cell_type": "code",
|
15 |
+
"execution_count": null,
|
16 |
+
"id": "ddfbccbe-41ae-4c23-98b1-a13868e2b499",
|
17 |
+
"metadata": {
|
18 |
+
"scrolled": true,
|
19 |
+
"tags": []
|
20 |
+
},
|
21 |
+
"outputs": [],
|
22 |
+
"source": [
|
23 |
+
"!pip list"
|
24 |
+
]
|
25 |
+
},
|
26 |
+
{
|
27 |
+
"cell_type": "code",
|
28 |
+
"execution_count": null,
|
29 |
+
"id": "02f907ea-f669-46c7-adcf-7f257e663448",
|
30 |
+
"metadata": {
|
31 |
+
"tags": []
|
32 |
+
},
|
33 |
+
"outputs": [],
|
34 |
+
"source": [
|
35 |
+
"!pip install opencv-python matplotlib imageio gdown tensorflow"
|
36 |
+
]
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"cell_type": "code",
|
40 |
+
"execution_count": null,
|
41 |
+
"id": "b24af50c-20b8-409d-ad78-30a933fdd669",
|
42 |
+
"metadata": {
|
43 |
+
"tags": []
|
44 |
+
},
|
45 |
+
"outputs": [],
|
46 |
+
"source": [
|
47 |
+
"import os\n",
|
48 |
+
"import cv2\n",
|
49 |
+
"import tensorflow as tf\n",
|
50 |
+
"import numpy as np\n",
|
51 |
+
"from typing import List\n",
|
52 |
+
"from matplotlib import pyplot as plt\n",
|
53 |
+
"import imageio"
|
54 |
+
]
|
55 |
+
},
|
56 |
+
{
|
57 |
+
"cell_type": "code",
|
58 |
+
"execution_count": null,
|
59 |
+
"id": "1e3db0b0-e559-4ad6-91fd-e7414b7d75e6",
|
60 |
+
"metadata": {},
|
61 |
+
"outputs": [],
|
62 |
+
"source": [
|
63 |
+
"tf.config.list_physical_devices('GPU')"
|
64 |
+
]
|
65 |
+
},
|
66 |
+
{
|
67 |
+
"cell_type": "code",
|
68 |
+
"execution_count": null,
|
69 |
+
"id": "378d045a-3003-4f93-b7d2-a25a97774a68",
|
70 |
+
"metadata": {
|
71 |
+
"tags": []
|
72 |
+
},
|
73 |
+
"outputs": [],
|
74 |
+
"source": [
|
75 |
+
"physical_devices = tf.config.list_physical_devices('GPU')\n",
|
76 |
+
"try:\n",
|
77 |
+
" tf.config.experimental.set_memory_growth(physical_devices[0], True)\n",
|
78 |
+
"except:\n",
|
79 |
+
" pass"
|
80 |
+
]
|
81 |
+
},
|
82 |
+
{
|
83 |
+
"cell_type": "markdown",
|
84 |
+
"id": "7a19e88e-c7b9-45c1-ae1e-f2109329c71b",
|
85 |
+
"metadata": {
|
86 |
+
"tags": []
|
87 |
+
},
|
88 |
+
"source": [
|
89 |
+
"# 1. Build Data Loading Functions"
|
90 |
+
]
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"cell_type": "code",
|
94 |
+
"execution_count": null,
|
95 |
+
"id": "8fb99c90-e05a-437f-839d-6e772f8c1dd5",
|
96 |
+
"metadata": {
|
97 |
+
"tags": []
|
98 |
+
},
|
99 |
+
"outputs": [],
|
100 |
+
"source": [
|
101 |
+
"import gdown"
|
102 |
+
]
|
103 |
+
},
|
104 |
+
{
|
105 |
+
"cell_type": "code",
|
106 |
+
"execution_count": null,
|
107 |
+
"id": "c019e4c6-2af3-4160-99ea-5c8cb009f1a7",
|
108 |
+
"metadata": {
|
109 |
+
"tags": []
|
110 |
+
},
|
111 |
+
"outputs": [],
|
112 |
+
"source": [
|
113 |
+
"url = 'https://drive.google.com/uc?id=1YlvpDLix3S-U8fd-gqRwPcWXAXm8JwjL'\n",
|
114 |
+
"output = 'data.zip'\n",
|
115 |
+
"gdown.download(url, output, quiet=False)\n",
|
116 |
+
"gdown.extractall('data.zip')"
|
117 |
+
]
|
118 |
+
},
|
119 |
+
{
|
120 |
+
"cell_type": "code",
|
121 |
+
"execution_count": null,
|
122 |
+
"id": "8548cc59-6dfc-4acc-abc3-3e65212db02e",
|
123 |
+
"metadata": {
|
124 |
+
"tags": []
|
125 |
+
},
|
126 |
+
"outputs": [],
|
127 |
+
"source": [
|
128 |
+
"def load_video(path:str) -> List[float]: \n",
|
129 |
+
"\n",
|
130 |
+
" cap = cv2.VideoCapture(path)\n",
|
131 |
+
" frames = []\n",
|
132 |
+
" for _ in range(int(cap.get(cv2.CAP_PROP_FRAME_COUNT))): \n",
|
133 |
+
" ret, frame = cap.read()\n",
|
134 |
+
" frame = tf.image.rgb_to_grayscale(frame)\n",
|
135 |
+
" frames.append(frame[190:236,80:220,:])\n",
|
136 |
+
" cap.release()\n",
|
137 |
+
" \n",
|
138 |
+
" mean = tf.math.reduce_mean(frames)\n",
|
139 |
+
" std = tf.math.reduce_std(tf.cast(frames, tf.float32))\n",
|
140 |
+
" return tf.cast((frames - mean), tf.float32) / std"
|
141 |
+
]
|
142 |
+
},
|
143 |
+
{
|
144 |
+
"cell_type": "code",
|
145 |
+
"execution_count": null,
|
146 |
+
"id": "ec735e0b-ec98-4eb0-8f49-c35527d6670a",
|
147 |
+
"metadata": {
|
148 |
+
"tags": []
|
149 |
+
},
|
150 |
+
"outputs": [],
|
151 |
+
"source": [
|
152 |
+
"vocab = [x for x in \"abcdefghijklmnopqrstuvwxyz'?!123456789 \"]"
|
153 |
+
]
|
154 |
+
},
|
155 |
+
{
|
156 |
+
"cell_type": "code",
|
157 |
+
"execution_count": null,
|
158 |
+
"id": "be04e972-d7a5-4a72-82d8-a6bdde1f3ce6",
|
159 |
+
"metadata": {
|
160 |
+
"tags": []
|
161 |
+
},
|
162 |
+
"outputs": [],
|
163 |
+
"source": [
|
164 |
+
"char_to_num = tf.keras.layers.StringLookup(vocabulary=vocab, oov_token=\"\")\n",
|
165 |
+
"num_to_char = tf.keras.layers.StringLookup(\n",
|
166 |
+
" vocabulary=char_to_num.get_vocabulary(), oov_token=\"\", invert=True\n",
|
167 |
+
")\n",
|
168 |
+
"\n",
|
169 |
+
"print(\n",
|
170 |
+
" f\"The vocabulary is: {char_to_num.get_vocabulary()} \"\n",
|
171 |
+
" f\"(size ={char_to_num.vocabulary_size()})\"\n",
|
172 |
+
")"
|
173 |
+
]
|
174 |
+
},
|
175 |
+
{
|
176 |
+
"cell_type": "code",
|
177 |
+
"execution_count": null,
|
178 |
+
"id": "559f7420-6802-45fa-9ca0-b1ff209b461c",
|
179 |
+
"metadata": {
|
180 |
+
"tags": []
|
181 |
+
},
|
182 |
+
"outputs": [],
|
183 |
+
"source": [
|
184 |
+
"char_to_num.get_vocabulary()"
|
185 |
+
]
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"cell_type": "code",
|
189 |
+
"execution_count": null,
|
190 |
+
"id": "797ff78b-b48f-4e14-bb62-8cd0ebf9501a",
|
191 |
+
"metadata": {
|
192 |
+
"tags": []
|
193 |
+
},
|
194 |
+
"outputs": [],
|
195 |
+
"source": [
|
196 |
+
"char_to_num(['n','i','c','k'])"
|
197 |
+
]
|
198 |
+
},
|
199 |
+
{
|
200 |
+
"cell_type": "code",
|
201 |
+
"execution_count": null,
|
202 |
+
"id": "8cd7f4f4-ae77-4509-a4f4-c723787ebad1",
|
203 |
+
"metadata": {},
|
204 |
+
"outputs": [],
|
205 |
+
"source": [
|
206 |
+
"num_to_char([14, 9, 3, 11])"
|
207 |
+
]
|
208 |
+
},
|
209 |
+
{
|
210 |
+
"cell_type": "code",
|
211 |
+
"execution_count": null,
|
212 |
+
"id": "9491bab5-6a3c-4f79-879a-8f9fbe73ae2e",
|
213 |
+
"metadata": {
|
214 |
+
"tags": []
|
215 |
+
},
|
216 |
+
"outputs": [],
|
217 |
+
"source": [
|
218 |
+
"def load_alignments(path:str) -> List[str]: \n",
|
219 |
+
" with open(path, 'r') as f: \n",
|
220 |
+
" lines = f.readlines() \n",
|
221 |
+
" tokens = []\n",
|
222 |
+
" for line in lines:\n",
|
223 |
+
" line = line.split()\n",
|
224 |
+
" if line[2] != 'sil': \n",
|
225 |
+
" tokens = [*tokens,' ',line[2]]\n",
|
226 |
+
" return char_to_num(tf.reshape(tf.strings.unicode_split(tokens, input_encoding='UTF-8'), (-1)))[1:]"
|
227 |
+
]
|
228 |
+
},
|
229 |
+
{
|
230 |
+
"cell_type": "code",
|
231 |
+
"execution_count": null,
|
232 |
+
"id": "dd01ca9f-77fb-4643-a2aa-47dd82c5d66b",
|
233 |
+
"metadata": {
|
234 |
+
"tags": []
|
235 |
+
},
|
236 |
+
"outputs": [],
|
237 |
+
"source": [
|
238 |
+
"def load_data(path: str): \n",
|
239 |
+
" path = bytes.decode(path.numpy())\n",
|
240 |
+
" #file_name = path.split('/')[-1].split('.')[0]\n",
|
241 |
+
" # File name splitting for windows\n",
|
242 |
+
" file_name = path.split('\\\\')[-1].split('.')[0]\n",
|
243 |
+
" video_path = os.path.join('data','s1',f'{file_name}.mpg')\n",
|
244 |
+
" alignment_path = os.path.join('data','alignments','s1',f'{file_name}.align')\n",
|
245 |
+
" frames = load_video(video_path) \n",
|
246 |
+
" alignments = load_alignments(alignment_path)\n",
|
247 |
+
" \n",
|
248 |
+
" return frames, alignments"
|
249 |
+
]
|
250 |
+
},
|
251 |
+
{
|
252 |
+
"cell_type": "code",
|
253 |
+
"execution_count": null,
|
254 |
+
"id": "8cb7cc58-31ae-4904-a805-1177a82717d2",
|
255 |
+
"metadata": {
|
256 |
+
"tags": []
|
257 |
+
},
|
258 |
+
"outputs": [],
|
259 |
+
"source": [
|
260 |
+
"test_path = '.\\\\data\\\\s1\\\\bbal6n.mpg'"
|
261 |
+
]
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"cell_type": "code",
|
265 |
+
"execution_count": null,
|
266 |
+
"id": "76aa964f-0c84-490d-897a-d00e3966e2c9",
|
267 |
+
"metadata": {},
|
268 |
+
"outputs": [],
|
269 |
+
"source": [
|
270 |
+
"tf.convert_to_tensor(test_path).numpy().decode('utf-8').split('\\\\')[-1].split('.')[0]"
|
271 |
+
]
|
272 |
+
},
|
273 |
+
{
|
274 |
+
"cell_type": "code",
|
275 |
+
"execution_count": null,
|
276 |
+
"id": "eb602c71-8560-4f9e-b26b-08202febb937",
|
277 |
+
"metadata": {
|
278 |
+
"scrolled": true,
|
279 |
+
"tags": []
|
280 |
+
},
|
281 |
+
"outputs": [],
|
282 |
+
"source": [
|
283 |
+
"frames, alignments = load_data(tf.convert_to_tensor(test_path))"
|
284 |
+
]
|
285 |
+
},
|
286 |
+
{
|
287 |
+
"cell_type": "code",
|
288 |
+
"execution_count": null,
|
289 |
+
"id": "0e3184a1-6b02-4b4f-84a8-a0a65f951ea2",
|
290 |
+
"metadata": {},
|
291 |
+
"outputs": [],
|
292 |
+
"source": [
|
293 |
+
"plt.imshow(frames[40])"
|
294 |
+
]
|
295 |
+
},
|
296 |
+
{
|
297 |
+
"cell_type": "code",
|
298 |
+
"execution_count": null,
|
299 |
+
"id": "d7ec0833-d54b-4073-84cf-92d011c60ec1",
|
300 |
+
"metadata": {},
|
301 |
+
"outputs": [],
|
302 |
+
"source": [
|
303 |
+
"alignments"
|
304 |
+
]
|
305 |
+
},
|
306 |
+
{
|
307 |
+
"cell_type": "code",
|
308 |
+
"execution_count": null,
|
309 |
+
"id": "fe1ad370-b287-4b46-85a2-7c45b0bd9b10",
|
310 |
+
"metadata": {},
|
311 |
+
"outputs": [],
|
312 |
+
"source": [
|
313 |
+
"tf.strings.reduce_join([bytes.decode(x) for x in num_to_char(alignments.numpy()).numpy()])"
|
314 |
+
]
|
315 |
+
},
|
316 |
+
{
|
317 |
+
"cell_type": "code",
|
318 |
+
"execution_count": null,
|
319 |
+
"id": "6871031a-b0ba-4c76-a852-f6329b0f2606",
|
320 |
+
"metadata": {
|
321 |
+
"tags": []
|
322 |
+
},
|
323 |
+
"outputs": [],
|
324 |
+
"source": [
|
325 |
+
"def mappable_function(path:str) ->List[str]:\n",
|
326 |
+
" result = tf.py_function(load_data, [path], (tf.float32, tf.int64))\n",
|
327 |
+
" return result"
|
328 |
+
]
|
329 |
+
},
|
330 |
+
{
|
331 |
+
"cell_type": "markdown",
|
332 |
+
"id": "c40a7eb4-0c3e-4eab-9291-5611cb68ce08",
|
333 |
+
"metadata": {
|
334 |
+
"tags": []
|
335 |
+
},
|
336 |
+
"source": [
|
337 |
+
"# 2. Create Data Pipeline"
|
338 |
+
]
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"cell_type": "code",
|
342 |
+
"execution_count": null,
|
343 |
+
"id": "7686355d-45aa-4c85-ad9c-053e6a9b4d81",
|
344 |
+
"metadata": {
|
345 |
+
"tags": []
|
346 |
+
},
|
347 |
+
"outputs": [],
|
348 |
+
"source": [
|
349 |
+
"from matplotlib import pyplot as plt"
|
350 |
+
]
|
351 |
+
},
|
352 |
+
{
|
353 |
+
"cell_type": "code",
|
354 |
+
"execution_count": null,
|
355 |
+
"id": "f066fea2-91b1-42ed-a67d-00566a1a53ff",
|
356 |
+
"metadata": {
|
357 |
+
"tags": []
|
358 |
+
},
|
359 |
+
"outputs": [],
|
360 |
+
"source": [
|
361 |
+
"data = tf.data.Dataset.list_files('./data/s1/*.mpg')\n",
|
362 |
+
"data = data.shuffle(500, reshuffle_each_iteration=False)\n",
|
363 |
+
"data = data.map(mappable_function)\n",
|
364 |
+
"data = data.padded_batch(2, padded_shapes=([75,None,None,None],[40]))\n",
|
365 |
+
"data = data.prefetch(tf.data.AUTOTUNE)\n",
|
366 |
+
"# Added for split \n",
|
367 |
+
"train = data.take(450)\n",
|
368 |
+
"test = data.skip(450)"
|
369 |
+
]
|
370 |
+
},
|
371 |
+
{
|
372 |
+
"cell_type": "code",
|
373 |
+
"execution_count": null,
|
374 |
+
"id": "6b1365bd-7742-41d1-95d4-247021751c3a",
|
375 |
+
"metadata": {},
|
376 |
+
"outputs": [],
|
377 |
+
"source": [
|
378 |
+
"len(test)"
|
379 |
+
]
|
380 |
+
},
|
381 |
+
{
|
382 |
+
"cell_type": "code",
|
383 |
+
"execution_count": null,
|
384 |
+
"id": "5281bde8-fdc8-4da1-bd55-5a7929a9e80c",
|
385 |
+
"metadata": {},
|
386 |
+
"outputs": [],
|
387 |
+
"source": [
|
388 |
+
"frames, alignments = data.as_numpy_iterator().next()"
|
389 |
+
]
|
390 |
+
},
|
391 |
+
{
|
392 |
+
"cell_type": "code",
|
393 |
+
"execution_count": null,
|
394 |
+
"id": "cbebe683-6afd-47fd-bba4-c83b4b13bb32",
|
395 |
+
"metadata": {},
|
396 |
+
"outputs": [],
|
397 |
+
"source": [
|
398 |
+
"len(frames)"
|
399 |
+
]
|
400 |
+
},
|
401 |
+
{
|
402 |
+
"cell_type": "code",
|
403 |
+
"execution_count": null,
|
404 |
+
"id": "5cf2d676-93a9-434c-b3c7-bdcc2577b2e7",
|
405 |
+
"metadata": {
|
406 |
+
"tags": []
|
407 |
+
},
|
408 |
+
"outputs": [],
|
409 |
+
"source": [
|
410 |
+
"sample = data.as_numpy_iterator()"
|
411 |
+
]
|
412 |
+
},
|
413 |
+
{
|
414 |
+
"cell_type": "code",
|
415 |
+
"execution_count": null,
|
416 |
+
"id": "efa6cd46-7079-46c0-b45b-832f339f6cb0",
|
417 |
+
"metadata": {
|
418 |
+
"scrolled": true,
|
419 |
+
"tags": []
|
420 |
+
},
|
421 |
+
"outputs": [],
|
422 |
+
"source": [
|
423 |
+
"val = sample.next(); val[0]"
|
424 |
+
]
|
425 |
+
},
|
426 |
+
{
|
427 |
+
"cell_type": "code",
|
428 |
+
"execution_count": null,
|
429 |
+
"id": "acf5eb4f-a0da-4a9a-bf24-af13e9cc2fbe",
|
430 |
+
"metadata": {
|
431 |
+
"tags": []
|
432 |
+
},
|
433 |
+
"outputs": [],
|
434 |
+
"source": [
|
435 |
+
"imageio.mimsave('./animation.gif', val[0][0], fps=10)"
|
436 |
+
]
|
437 |
+
},
|
438 |
+
{
|
439 |
+
"cell_type": "code",
|
440 |
+
"execution_count": 34,
|
441 |
+
"id": "c33a87a2-d5e0-4ec9-b174-73ebf41bf03a",
|
442 |
+
"metadata": {
|
443 |
+
"tags": []
|
444 |
+
},
|
445 |
+
"outputs": [
|
446 |
+
{
|
447 |
+
"data": {
|
448 |
+
"text/plain": [
|
449 |
+
"<matplotlib.image.AxesImage at 0x10c2ead8d30>"
|
450 |
+
]
|
451 |
+
},
|
452 |
+
"execution_count": 34,
|
453 |
+
"metadata": {},
|
454 |
+
"output_type": "execute_result"
|
455 |
+
},
|
456 |
+
{
|
457 |
+
"data": {
|
458 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAh8AAADSCAYAAADqtKKSAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABP9klEQVR4nO29e5Ad1XXvv7rPc97DCDTDSBoQNolwABskJAZ8HQJKxMM8jPwsYsuEisuOsAFVxVhxcMqOHVFxVfzIT+DYhXHlxhgb28JAAF0isDA3ehthMEaGi0AvZgSS5q3z7P37g/jstb6t3nPO0Zkzo5n1qZqq7tndvXfv3t2zZ6/1XcszxhhSFEVRFEWpE/5kN0BRFEVRlJmFTj4URVEURakrOvlQFEVRFKWu6ORDURRFUZS6opMPRVEURVHqik4+FEVRFEWpKzr5UBRFURSlrujkQ1EURVGUuqKTD0VRFEVR6opOPhRFURRFqSsTNvlYu3YtnX766ZROp2nJkiW0devWiapKURRFUZQTCG8icrv8+Mc/pk984hP0ne98h5YsWULf/OY36YEHHqBdu3bR7NmznecGQUAHDhyglpYW8jyv1k1TFEVRFGUCMMbQ8PAwdXd3k++Ps7ZhJoDFixeblStXlvaLxaLp7u42a9asGffcvXv3GiLSH/3RH/3RH/3RnxPwZ+/eveP+rY9TjcnlcrRjxw5avXp16Xe+79PSpUtp06ZNoeOz2Sxls9nSvvmfhZj3nfU5isdStW7esQmii7wJSvprHKs6E1InXpPX7yoLgugyF/mC3Oez4GIxuoyIKMGGJVzHy+cjzzNHBiKbE7xjrq2+QQ57/6itI0jGZH3F6GcRy8A9sr7yCtBvvI8LcP+iQi9y3yTgdY3b+3eNJyIiL2/r9HJ5WcafB5TxdpujGVEUjB2Nrs8xvCiQvzAFW+e6Xb8RZSOBrfOokf3Gn36rnxZlHz5roW2LL/vGFOxz+/9elN+kl3Lt8lhWy5z4oChr8Gx7CiTrGA4StszIMcXJwCd4zCRL26OB/P6NBPYeMyYhytpjY2K/wx8tq91pGDZpz95vC/Rpgex5+dCzsOc1+klR9oE/OoeiWPf758U+v+5okBVlA+z9aoFnmvJsHzdDu131e/E47NvreEl5HxRjzzEG7yJrD67YG/7NCmS/BUfZ3z78Zhr2DfH86DIiip3Uztoix1swPFzavmP7f4uyfzxvib1kAb61Cbj/iPoNe58LJk/PmIeppaUl+tz/oeaTj7feeouKxSJ1dnaK33d2dtJLL70UOn7NmjX05S9/OdywWKp+kw/Hd3vGTz68KicfAXxwxURhnMkHf7EDnAywY3Hy4UW/LEHMfpDwg+PH2eQjDvV5jslHDP5Qs77yjGPyYaqcfOAHL1bB5IN99LyY7DePtwefBZ98wFgIPPhY8mvydhOOL5h8sKa3tsj6/cDux2GcysmHPC/u2T/OoT8GbL8F6mvKwYeb1dIcl8c2svvIw0fEsHbnTfTycwwnJmzfwNgvFu3z92DS0gBjo5H9oXS12zX5wD4tsOeYDz0LPvmIfhYIPm9+Xf7siYiKbPjh5IO3u7mC+j0vHrnv4feETXDIr2DywdsK737A3qnwO+yYfMB/zDE+4cPJB7v/5pbovsH6PUe/8foNfiNNuA+OxaSrXVavXk2Dg4Oln7179052kxRFURRFmUBqvvJx8sknUywWo/7+fvH7/v5+6urqCh2fSqUolapghcNhInFOpSbBtDKlwKV+l2nFYfagpGs2zMA+RVMLB+vn58JKRKg9UdXDcf6QNRHkW9tEGV8J8MAkEBobQZljJXT/rgHoHXubSN5/LPo/+PFwjXH+n1n4flm7cTWn3LrxP0FYCXl0347SNv5HXWTH5qAsza6LZgDZVjmG1h/YWdrenGkQZWjOSDITRQJWfvh/3zFY+Wj37aqYT3KFTF5FmrIy7B4PF2Vb+vzW0jaaZFp8aQLritul9s6YrLGR/Uf/gbmLRZnHvsX43IKMbSvvQyKiK864sLT92Kub5TVh+d7kc6XtIoypMWPL8jBOEqw5GYNldh+vydu6bO5CUWbwfebXhW+Wx80uPq4SsJU+fIf4SgR+X/gqbMgcae/j0b3bRNmV8y6QdYjvuWz3f75qTYu/zeNKtuOPpuN9F/1mqvtG1HzlI5lM0sKFC2nDhg2l3wVBQBs2bKDe3t5aV6coiqIoyglGzVc+iIhWrVpFK1asoEWLFtHixYvpm9/8Jo2OjtKNN944EdUpiqIoinICMSGTj4985CP05ptv0pe+9CXq6+uj97znPfT444+HnFCrot6mleMxyThUDR4qF3iVnmMZXDguVtA2NF+I68DyPTdZoANimWaP0HlO+QPg6DfDvdHxOLYsiku93qBdhjbzwOzC2oNqF2wpV8aEYPdskrAsW3A4lrnGNF/OTcoxY5gZJqTKAbWNOBZfe65wScES+RDrNzAdiWVoR7tDRTA2rjrDroj+4tVnRBk3Z8yNN4uyy3sW2WuC6dZvZDuwfM5NNC1+TpTlSKpGWj2rRuiG+/3QH19m6wdlBFcY/PQ1eU8ZUb88LxHYZ5H3pbnG96xqJYB3tgWOnRu3/ZFHSxr7UP5kn1T7fHiufRZeY6Mo4318xTsvkhdljxRNGxTIPuZmEBy2CWYiC0KOynYzBeaCBEWPRWGGAZNE6DvBzSD4DePjKIB30fUOcxVJJaZLdo9XnQbmsQTcLx+bWakSOhJYk1yHX93fs7B5qjoTLGdCJh9ERDfffDPdfPPNE3V5RVEURVFOUCZd7aIoiqIoysxCJx+KoiiKotSVCTO7TBjTRTLraKtTQDmV7rHatlQSNRXqEFLYCuS83AbvF+ZAob2OV5DXDBIwP2cSu1BQHpecNea4R25bdgV8c4BuOx7Wx4zSGBTIK/Dos+DXkWByT4h+GggbOFyTB0CDe0IZ9HomtS1C0C0utV3W/R5R5jdaez36n5ic9TMIwAaeNfY+0p4cM+cnpfT1g+/4U1tfg4ycSew6fHwREf1o99Ol7QPg2DDG/AVSHspw+XOT99TEgrrhsBwAH4Tf5Xg0UNnfh5hMNwZ+Fd/bY/1TOiCQVgOT6F591iWyAcyHzYdnEYxKPxruc8Ofxdtltt1FaBu/5WHw3eCxs1D2HOf96KPvG/hG8bGCPk3Mr8eDaKA84F/oHeayXAicJsZtzBGOwB/nO8DfP3gXh9l+Aj8LBR4x2uHDhURKdP2wo1z0kYqiKIqiKPVDJx+KoiiKotSVqWt2CWgc+8Pb1MLUUvc8K5VQj/orqaPcnDDHU4cDEY0T586OREh86T82BkvdTF7rSiSHVCKD5ondUDIrD4RooA5zjZBkj7PWKY5FsxeTLxsoE9JDlIfzhJAFSFbH8zKNI9MbY1LMhCeXfkWSMsf4CkWV5G0BGeyH3nmJ3QE5pQemFb+V9U1GmmT+964nStuvF2Q00i3Zk0rbOTAljTGzB0ZUzbGcLUnIndMRGyltxyDa6nBRRmodCux+HuqPMdt1oy9NUrNZZNQ0mIQSrD33vPCoKLvxHZeWttE8FWuVEulrFlxS2n7kpY2ibMxYWegojBv+KuB/zdzUEop2y87jJj4iKdd++9jo/8dFxFc0g5hos4cTfh1HXqVQlGCXiddE91sylDuKRXeG91uYR1Fa64qMWia68qEoiqIoSl3RyYeiKIqiKHVFJx+KoiiKotSVqevzEUElPhjOMOWTTb3b4whZXjUh+2EFIdRrUGfIH0JI2kDeyfwT4gcHRVnutFn28hCWHPdNlVJbeZEKfGV4yHbHYd7xdLfIXOuwJWekf4DLts1DsaM/yEP7top9Hm48G0g/h0bf+kSs3/+sKLuchZxGPwOviYUGT0PWbN6nmHbgzcNi90fPPVTafiUvj/3vzCml7YGiDEX+VsFmoB0uyraNFG17MEy6zx6kDw+1LcYyM4f8SKRfS4FJOo9CmU+8Djm+G2L2WXXER0VZioVwfx78Qb7w0vbS9v9Ky2d49blLZf1NTbYMJLuPvmQlym+YEVE2xroDJaNjbAyFMt6Sbc/JsXH+32bvt4/Z1svOvg7fWv6eoK8IO1T4VwEGwwi4/ErQp4vhvHtX5vEJQFc+FEVRFEWpKzr5UBRFURSlrpwQZpdyl7Ndktmql8groVhlpr/JMF+US5URN2tVp4fmE4f0VETjhOVVnpHTDMvlXAqs2aXYIF8JL+8wu2CYSbYSHRpfPJskyua8aA2hMzJqBY+Cy2tDcmI2bjFTMI8UaiCqI5pTosrCZhZ5nYD1VQYkfQm2nB6HiJ9cGogZWE0jRCPl5zHzUbD/DVH2z7uk9POnw/NL27uzp4iyLIsqmoUIo8MFW/9wXi7X54Loz25j3PZ3HEwib/nNeHiJAAZDthhdx1jBmmEKkI6Vm4GSvhwLJ6VspNKmmMxUOxzY+82ZPaLsp889LvY/eM4yuwPyzivOuLC0ff8rT4myDDML4BB+rRgts5/F5MQBhMh+fM92sc9NeWgiEVF789FjPwS/jkPqagrSrMmlrh58zwxK63kdYD5Jszox+qvHQxeA2cfkKrjHKtCVD0VRFEVR6opOPhRFURRFqSs6+VAURVEUpa5MWZ8Pz5hJk8dWVG+5fh7V+krgebXqk3Kv42o3lnG7JPZLvcPEu2SgkGUzcchKCjPdLaLMB5u4z31Axss0GYXD/8iQwx/ERSjDL2auZe2GrLLCzyMrbfnE/Dy4/wcR0bq9WyKbE2PtHg7kefjGpJjdu81LQpn9RF057wJZRxvzpUAfnzEbCj04JOWz333p/5S2/zsjMxw/MvRusX8g217aHsjLEOYjzJcD/TryReufkitCeHM/+ptxmKzvSlNC9ltD3NrgUSKLZIrW/+kohH4/mrf7SUcmVZT68uvwthBJn5MRkBYfLh4Q+/c893Bpm4dl/59KS5tpD/882bYOBvKeDhRsOHsfRlgsPlDa7ghku+OQyZVLWkM+DzwDLbxDAvwu8FDk+O8+83EKyWlZWZCTZZjF2WPy8SK027nCIHxOHPc0AejKh6IoiqIodUUnH4qiKIqi1JUpa3aphlC2znKzrDqkjzUz/biW6E8kOa3LtFJuHRWYoEwl/YZZIXkRi4Bp0OwwaKW3wfw2eR5KT7lqDkw7rrHCJbNlm1LGQdSPppwAlnD5PeN7wpZp0bQizoP7GzMOKR47dBD6aRiWzBPMhNDiy/obWSTNn+35v6JMZKeFJesfvbi+tP1M5iRR9u8DC0vbPNooEVF/tlXsDxdsOZeoEkkzxFBGmhoKxeixmIhHmzrScdvfLUmZRZdLX1EiyyOaInkw+4xm7X0ESchcy8wwGH01xS6Dppx9Y+22rCjLRgqyjw8VrWT4K7+Xz/QMJjV+NS/rfzHXXdruK8j3lEd8PYVl5iUiyrBIqXmS91vAaKSMkBmER/jFbMjOKMUOqa0Dw94bNLOE/p4wmSy+JxkRGRbazaXO+F3g33cYC6aSzL0R6MqHoiiKoih1RScfiqIoiqLUFZ18KIqiKIpSV04Mnw+XT0SVfh3iEq7rVxsy/XiuU+49VUKtznPdBy9zZFacMBxZbfk+SsqKbx1ix0npJWa1FWVYR7l9jFN+V1e5TMRcTjuO1JY/Dw9lglxeeFT6GYjzIPwytx+PgY8JtzO/WZQS1b6itNcnPNuegWKTKMsw/5AmX4af/ocXrW074cn6HxmdW9reOdojyt7KWZ+DfCBt6RkIS84lq2+NybYNH7W+DPmcPC8Wt/3W0SKzw3LQH4P7XKDUlbdtMCf7FEOxc7IF2bajR5nPRxA9wIrgt1JotvstKfksuM/Joazsp8Gc9IcZyFs5McpyX0zYjNP7ch3yOgV7z+iPclLCyucbwW8oYxJsW8rsm+G9eXTvttL2lXMXijIZ7hx8MJgvnIdZlMVFIF2E+GbiO8vGNNbn+PYMQGbodJl+Jga+7SKcPLi/eBFycc947u8Zo+KVj6effpquvvpq6u7uJs/z6MEHHxTlxhj60pe+RKeeeio1NDTQ0qVL6eWXX660GkVRFEVRpikVTz5GR0fp3e9+N61du/aY5f/8z/9M3/72t+k73/kObdmyhZqammjZsmWUyWSOebyiKIqiKDOLis0uV1xxBV1xxRXHLDPG0De/+U36+7//e7r22muJiOjf//3fqbOzkx588EH66Ec/WnY9xvNKksSaCBNdMqI4LGlNJSZKhltJ5NKJvn4lZrVy+wOfadIuvfqQSdMwCV0sB1ls4xDhNBstzeO4zDUGhxuX4WImTZekjR8L2XedsnOUlnM5MZ7n86VXee9ZFoHxTcjUmmE3eSiINqW8XYW9DppWRgO7hH0gLyWzz46dVtp+IyMlsjz6KJovCszUkoZInSinHcxas8BIRi6nFwrR341kwi59N0OkUlfG2ZaEvX/MKsvvCaW9rWn5z12CnRuHKKYtTfbYppRs21jOPpss/HlwRWbl5iuU4cbhPBF9FWS5Zzbb/p+dGBJlc5M2Ui0fFwiOIdFOeJ1ccnHMxnzNHBthN/RWiqyy8C6KHfh/n79vBt89Nr7GiabMs+ziU0p70eOUS+sxw+8yMDuVg6ng71VNHU53795NfX19tHTp0tLv2traaMmSJbRp06ZaVqUoiqIoyglKTR1O+/r6iIios7NT/L6zs7NUhmSzWcpm7Ux1aGjomMcpiqIoijI9mHSp7Zo1a6itra30M2/evMlukqIoiqIoE0hNVz66urqIiKi/v59OPfXU0u/7+/vpPe95zzHPWb16Na1ataq0PzQ0RPPmzYvOauuyKdVKMlsreW21VHuP1dZRo3Df4poTlH3X6QNRwXVEEZPeJoakDbzQJG3SYkyinJX3o0NqjOM6cISF9/JMBguZW5124IpC3/NMvdE26fX7nxVFewr2PoYD6SsxHFhZZBFCgbfEjor9U2I2HPYpYK8PknbFtAjeXy/nZ5W2t/rvEGUHMu2lbQxFzv0TsAz9LE5K27Y2JqR/wEjO3jMPWU4k/UzC9dt9lIxyWeoohnNn2Whz4G+SATltwPyIUuDzUWR+TAkoa02zsZCWz6IjHS0ZbmS+M2Pg89EIfjXcH+XMxoOi7NLmF0vbp8A44c9/DHyMDgVWvovjjcuwc1AWgPdGrBbfQvCNEr5SIX8rljUavm3r9+0obV/es0jW4ZDeHgY/mg7fluXBr4TXuWzOefKa5Wpm5QXLPrSmKx/z58+nrq4u2rBhQ+l3Q0NDtGXLFurt7T3mOalUilpbW8WPoiiKoijTl4pXPkZGRuiVV14p7e/evZt27txJHR0d1NPTQ7feeit99atfpTPPPJPmz59Pd9xxB3V3d9N1111Xy3YriqIoinKCUvHkY/v27fRnf/Znpf0/mExWrFhBP/jBD+jzn/88jY6O0qc+9SkaGBig9773vfT4449TOp2OuqSiKIqiKDOIiicfl1xyiVPL63kefeUrX6GvfOUrx9UwCgIiR8jgY59TZRj2yfbxQGrlgzHRuEKoH08ckVr4i2Ad8eihztNhe4My/LLXKM2AQcLaWmMYyyPHbLuuMeXyY8F7Z5fBEAHE/EFCcT3QH4T7FmAdjv4OctZen3XERIiB7Tzt5Y+5TUTU4suYFO0sHHZnTPo5cBIQr6AzdqS0PS++Q5S91mhDc485YkK8mp0t9vdlZSwRjEPByTfY9vAYHEThWBectlR0wEXhKxLIB97E4oVg7IxGiCWC5VHHoo9LMmZ9ENIx8NVwfI9PTtm09Wc1HBBlHbERsc/9gU5PvCnKuJ9HJhQQJ7ot7b497xCE6OfPf8yT/ZQM5D6R3W+E8fbo/l+Xtq+cd4EoE397XGGjQu832w+iYwhhaoMQ7H1H3yieBsEHTwsRJh78j9AHpTz8YwRBiTxSURRFURSlfujkQ1EURVGUujJ1s9oWAzqm1AflhtUyEVLT6UgFS/RTCjBDcNMKmg15Nkd/SC4Reyc3y2MTDlksk956heglVOMabzjkeXV4T9zsgiYgNPvwe8blVJ7VFmSCfCm46Hj2LZBJtMOzpoUErMMm4PaTrD9c4a5jRmbr9Nl5p8VlHS2elXAOm+jPXNqX9cVgOf+tPMuAC2YALpNtT0hZaJZJQdF005qwfROHbLwBk4LmTfRYC1BO6kePt6Qv+42Hl8+CZLUhZp8jZgrmtMal6ejU5EBpe0HyDVGW9mT9ObKZa1vAJMdNLSjfbvS5KQ+zKDNzKDxD3ldokkDpbZFJRfPwMqaYSYxnvyU6hhmG45LEs/rWH9gZfRwCUlszxs1VON5tXwVwT6Zg+zSUqXeC0ZUPRVEURVHqik4+FEVRFEWpKzr5UBRFURSlrkxdn4+Yb/07ypXCVpKafby6o6hEluuU9zpSs7vOK9PnAsOQG0wxz3GFAq82nLlDbxUKE15Jna77d4Qpp4S1u6Ms1UtbKZ6B5+tnwV6eZJJKDK/Or4vPl7WN+2oQEZm4o91cauvw+UA/Di8v2y36rSDLDD8WrsPlhUcCaZ/ntx+WPtrtZk/a7tHuzEM+ZyA8M68RfU7Qd0SeZwsxhPmQsc/Qh7a0xaTUmvt5DBYaRNlR5pOA/hkp5mfhQ9+0MH+JGNTPfTBQIMxluHybKCzLLbL/K9F3g4dUj4OvCG9PCnxF2uLWr+CkuAy1PovJaQ8WW+Q18f79aKkxl2w3gj9OE/MdQRnuqEmyMulj08TktclxQobzGlE+PhLYcPPNvnw6IhT6aYujK8DQ6+xdXNb9nsjT/EaHRJaIiiP2eeDzjjPtbxG/y9X68EWFUZ+s8OqKoiiKoijjoZMPRVEURVHqytQ1u3BqJa+drOvXiepNJFVSKzPXRMh3XRJhbBs3Z2RlJs/YW0PyMgkbAdPPYHREB7wOV2RYMMEI00rIzMMz7KJEFqOv2gVlkwc5K+sbLr0jIsoyeatrQRWXqH1m9sDIpD58dvgycREiTooz4bHlWLsHA1iGpuiIjyiZ5bTEpEmAmzcaQU48XLQpI8aC6MisCJelYlsGCzY7Ky6fc1lwCkwSKL11tcd3mEQTzNSCz7Qjbk0rjZB9mJs6QmYPOJbfM7abg+aZHPtfeRTkpDyKKbY7wcw1jWBKQvLs1FA/efw4h5Qe3iFuIglJ2RncxElEFGORSa9450Wyjpwciz/Z80xpe1ceMhyz+whl7eUhCLBtoZDKtWV6/NVVFEVRFOWEQScfiqIoiqLUFZ18KIqiKIpSV04Mn49yqTZMOspnuQ/I8WS8rZVPRLnVsTC+Ffl/oETV5ZNQRVvChY6srrXC5fMB9+slmX0c7J7FvoNi33TbbKkmEe074FWSRZmHZUepmmsMsTIPr5mVNmEu6QtJbdk9r9//rCh7q+gId85uER9hhrUtFkibf6ufpihw5I2yCzf56Ltht9GvYyyIzirrChuOJJm/AMpw+XVajPRP4H4N3DeESPoktMRGI8uwnVyyi5mC0c+CtxX9SvixWIfLH4aDvhqZINrnIwHh1QdY1lmU4Yr7Cn2WrdQX28nrCPnKsLKiGee7y4qz8O7xfR98blrY+47vkEtCy/08YuBjwc+Ltco/1ZgigsvV28E36f1zLrZtwxDuzr9R0UXR/iCa1VZRFEVRlCmKTj4URVEURakr08vsUivJ5vGYWqYqKCeu9h6nclZbV9vKNCUZMEn8xytPiv2PXzW/tO2h9JXXD2YPSiaOfRxBdlpop8u0I8DzQqYdu28w+ikzu2Qhq2ze0aeu/1y4SSSD6+dBdITLnCOKKZbxKKYhqS9bzg9no+WZY+UnsFyzA1HY1MPhstxKTBvNTOrrkqgWwewRznLLTA34pERkWtk2Hql0KJARXXmdQ0aWvZW3UU0TIGflMli8zqy4zCLN4VFLiYh8ZloYKDaKsp74EXscRH/FDLiyLWjK4xlw5XVi7LoZGIsxFv3XB7Ouy7SCnxAON5FccebFoiw4Kt+hQWaejDY4hvHidvw/vme7KBPmopBE9/jXLXTlQ1EURVGUuqKTD0VRFEVR6opOPhRFURRFqStT1+ejGJA7oHOF1EA+ekKDdv4yCUtmy5MIh+SzNfIVcWXE9VzhznkWyFDGWSZRjrtfCX/IShhNI0hGuZ9FEiyv3A6MPh98H8sK7JrwDL2CI6st1C98WaAO9HPhhMIxixOZ9BCKuD9GJihfVp6HT1KLV964xVDYPAPqMPhDvBlYfwGUeiKjzF8hC/Jd7vOBodc5KaiD+1m4/D9GwVeCZ+dFOStKb7kvC2b15cdiGPYxkalXto3ff0i+y8ra6KgoQ/8M3lc5uA5vG94Tpx1kzzz0eprQxyb6m4HZmF0yUX6dMfjbxLMBF8GnqcW3fXpl9/mi7JH9NhtuFlMLsHdPhAMgop/t2iD2x8aTEP+hbY6ss5f3LILfRH8z1rN2C98QR9h5RFc+FEVRFEWpKzr5UBRFURSlrkxds0sQEP1hSYwvWbvMJxipc4bhihSKZc4IpLXAFWEUmYBor6H6nBFH7VIhZnaMgZkpePOQveRpcyLrNAn5avEIpKFnITLeYrvLlA+j1BZNKSb6WPLt0rcP/480e3Y5nWe4JSJKsfctAcu5XIo4CMvemJGUL+Gj9JXY8nqTwwQTwHPiJpEcRZs20CSAElZuIglAXphnJooRiGLKz/O96CV6NJ/wvnBFYkVpLZqr+HXHimC+YX2D5pN8EIssO1q094v9nWTy2iAmy9AkNTtuM0W3+NJEIyO8OsyBFC2nRWltI3uH0DyB1+Fy2uEAI7Xa54bn8RpR6pph783P9m0WZVnH6x1j49ZAtu3XC/LEU8pUiKPUV3zvKpDPuqK2lsvM/mutKIqiKErdqWjysWbNGrrggguopaWFZs+eTddddx3t2rVLHJPJZGjlypU0a9Ysam5upuXLl1N/f39NG60oiqIoyolLRZOPjRs30sqVK2nz5s30xBNPUD6fp7/4i7+g0VGbHOm2226jhx9+mB544AHauHEjHThwgK6//vqaN1xRFEVRlBOTinw+Hn/8cbH/gx/8gGbPnk07duyg973vfTQ4OEj33HMP3XfffXTppZcSEdG9995LZ511Fm3evJkuvPDC8ivzPGunL1emOcXktC4fjFr4XFSUDRYlnDyub7U+F2VmXA0xnm+O49xQ9tZyQXmtuKhtjxeTxlNXBtYQTAbs5UAmyGWy0N98D31FhHwXZca8H/GasC96FO7RS9jSq09bIsr+8/Wtpe2EJ8/zecthCBWYFTxG0l5dBOkt97sYM9I/gcstm2LSB0DY3UOSSSa9BMlmo2fbM+rJ+nyQUI6xDKzoA8H9QwaLMty4y1/D6cvBOjIFPg9Z9IeJaAvWcSjfJMow/DinIHw+/MiyFDwL7tfRFpN+HO2QubeDhXBv9eTY4JLZJDwL3m6UDyfYsS2O+0tDGX6JhgP7mxQ8J+4PEpKWs8um4V0I+PcMyng22kYvOjC6yTnCCBBRG5PzFh3fz2VzzhP7PANvyI+jTJl9tRyXz8fg4CAREXV0vJ1mfMeOHZTP52np0qWlYxYsWEA9PT20adOmY14jm83S0NCQ+FEURVEUZfpS9eQjCAK69dZb6eKLL6azzz6biIj6+voomUxSe3u7OLazs5P6+vqOeZ01a9ZQW1tb6WfevHnVNklRFEVRlBOAqqW2K1eupBdeeIGeeeaZ42rA6tWradWqVaX9oaGhCZmAVGKiqNYkUpEZpEompI6JyFSLS3aV1FELWW61pqRxnr1h5hTfEVHVQNZJ4tJXiKLqxe1ydshcw68ZykxcPPY2HSOrrcO0JM6FSIpXzl3ILuowh8E9GTZOH9or5YVjRi61p317z4fzzaIsh21lpNijymKWUS51RTklMwME8P8XSl8HmdklA9JLjA7KGWHyVm6uQFCGy0E5LTfJZIvxyDIEj+XmC6yf14ll7QlrTpmTGhBlc5OH7XFgZmn3ZTTSFp7xF0wrLcxEloBb4r2BEtVGdmyLL++XZ6cdC4pQJuGmFcx4y818GXg2acdz9B3fojE2btEcl2Xv0I9f+5Uo252vjVBVRDydiJAHDqqafNx88830yCOP0NNPP01z584t/b6rq4tyuRwNDAyI1Y/+/n7q6uo65rVSqRSlUqljlimKoiiKMv2oaPpkjKGbb76Z1q1bR08++STNnz9flC9cuJASiQRt2GDjzu/atYv27NlDvb29tWmxoiiKoignNBWtfKxcuZLuu+8++sUvfkEtLS0lP462tjZqaGigtrY2uummm2jVqlXU0dFBra2t9NnPfpZ6e3srU7ooiqIoijJtqWjycffddxMR0SWXXCJ+f++999InP/lJIiL6xje+Qb7v0/LlyymbzdKyZcvorrvuqklja4qQP4FMsUZ+FXWX0zovVF97npPjkUS7/EEcGW+dcH8FCEt+1ekwaWbPNPR8ufQVwiGLsOkYXp1tu7Lqhp4gDwufB18RHDeOEPKujL8im6YrfD/0PfeNiUN48zbot1Fj/WMGfNlvPBQ5Pt00G9OucNconw1YaVdMqusOBTLcekd8hKIQPhgF8MFg/gKjRekbwkOYF8B3AP1TOPzYTFH6n7jOa01K6Wtr3PZxypfjJsXCpDfGpE/TyfHh0va85CFRdkrMlmEY/JDPDfOrwOfG/TxQeMqzvKKcNe3xsPQgCWffjEYYRFlIC5B1yGJ5yP48jEYuC86HvlG2LIEh+oVEV46hYcP6Hy7ZEZPP7Zo57y1t80y5RCTSJ3jw7gmfrgrCq1eSvTaKiiYfpgynwXQ6TWvXrqW1a9dW3ShFURRFUaYvmttFURRFUZS6MnWz2taTSjKg1oh6yHInhImQ5U4ElTxDvhQZMqWEwhWyMpBCtthoqH4fmFYcslgR4RTGBZfhmgIsdbrMemhaYcd6MWkGENETMfosu07ItMKz+Doiz2ImzRZf1t/BpLexhFzO50v2uNTOl7AbYal9zNhlaVzazxOPKin7KeUNi30uf0QJ6QCT4Q7HZSRcLtnFjLc8GmkMJJqDBRsp9XBORiblZp40LLvHIHInzzI7KyHbPTvJs8pKSXgTM3u1YKRSJplNg2mlke0n4Z5S8Nz4k4rBe+r6b5g/7wSY8lLMZBGETHA82q67Pr6fA6kt73+UCKMsmMMzPOfBXMEzPA8H0szVX7StafHledju9Qd2sjpkmTC1wLvIX5uQObZMMwyve2g4oJP+qKzTdOVDURRFUZT6opMPRVEURVHqik4+FEVRFEWpK1PX58OY4/YvqNqvokZ+Ddz0WQvZ7dsXqrPPRbX1uXwu0K+gVqHXXRlgHe3xWHvw6l7S4R8Rl3bnQquN1JuEsOA83HrId4LJe0NlLAOsB/VxiaxTPksk/UMcMlx8NiIUO0qkHfJdni0T8eF/niZmW26KucYCZG5ldv8UyBS53HIkyEKZrWN4HNW3yIgLQ4r7S4zForPjYsj2Nwutpe0iXJRLXzHcNs+qi2Uoi+XXQb8OLh/GjL98Pw0yXFdYdO7ngT4eace7lw5lSrb9kQ8FP4+G+3mgX0XA2jocQGZkaJuQ+noow7XbLj+WNPhKjDI5LyZPSLPniFltGz2WKdiX/YS+KyJbLWYwZ6+GB1mzgwx7N0K+jxQN6zeeDbdg8kT0quNEi658KIqiKIpSV3TyoSiKoihKXdHJh6IoiqIodWXq+nxUgdPHY5LjU5xQcT2mUiyPav1BMCYGx0e/CkdaaYzJwcshtkUxydLWpyDd+rCNHxHk4Jqs3R7W38BsvejHwdsGYeFDfh0sZgEPfU4kfU6wDle4d4N1RrCMh3A+Bg/t3VzavmbOBZH1u/xKuN2ZSMYeaPalP0Yg/BWkr0Q7tG0WKx+AOAxjgW1bzMe4D0V2nKx/2LexPAaKMpw7j0nSHIMQ/fz6vux7jLvB/TpcKe7xPO6DgGHRMX6HaA8bth0QxwXDnQfkSD/P/h9GvwYXrmsW2X3gVwHeKEqy968I3x5+jxhLI8bLoN/GDI8PEv096y/K8ZUx0XFNXPfr8m8L+YYZx7fPOOqowd8IXflQFEVRFKWu6ORDURRFUZS6MnXNLp5nl4HKXOJBOaswdUxyVtdQ2xzhqMu/aAVLXyhvdV2n3L5yHYeyTH7s8SzZuc511VFuJl00bTjKvaxcJvWKLNx4R5so89mxxSNHoG2sTpDCCXMKPkNuEknJpX3Mqmvy0SYSbtoISYsdphVhkgF54eWnLbZFrtjTRHTtab322FS02Qevwut4dP9mUcYzi75/7iJR9ug+m/UTQ72jTDPDpJmNGN6dy2JhiVqYLEISXRu2PAbL5xnfyi0xGy8nCVJblMU2evb5N0Gm4Ha2nw6Fl7fbWDsb3sLMQETU4Uf/KfFDT852yPvnRJvkHt3/a7HPM7DyZ0hEVGAGFZRdB+zZJOGblXKEEM9DD/CROQzfl2FmWmkB08pwYMdYu4+mFXvV9hg8U9buD/T0ijIKMH1CmesIIbOLbSs3VVaLhldXFEVRFGXKopMPRVEURVHqik4+FEVRFEWpK1PX50OpLeX6PNQK9GnBcOcTAa8D6y/XJjoOItw41GHiLEx7AsJGtzbbazDZLRHRw69tKm1ffTrYdpnd12+WKda9NEvVDr4ZKKcVzx+lxszuy8PAv32sIyw7s5dj2GbXeDOh8M/RPieRbQGuBDmvCO8Odn2X7wC6J6SZvHQU/DrS7NgmkNqOsr6KedLOn4xbn59RCL0+ymS56KvBZbi+h/4Isk+5b0ERbqqddUcj+Ec0+3ZMFeF+uV9FxsjxxiWyKK1FhJ8HhA3n4/3KOefLMjZuseyh/dtK20OBHMNcaotfgaAC/7PRMsMlHA7kPR0q2vd2Xly+lxnmSNMI/YY+Jy68atN3RIRJJ6qND4gLXflQFEVRFKWu6ORDURRFUZS6cmKYXVySTseyWbmZZOsRfTRUR7XSXzN15MNO2SuWTXbU1GrNTii95fsOGSo3wRARmWYbyRLNJ1ljl2J/uvtpUcaXs6+f/14oY9dHM1Ml98vGpilg3k1eCZhLEsxE4qoPM+XCuBUmG+xv17vJrovmmst7mLwW5LPc1OI01xBRnJkFEkaaTzA7ryy0/RiKlMllsSHrpD02lLmWRSNt8aNloEThbK2cZpY9tRGkxly+jPDomDwqLZHsC4y++X6IWssltNj/ggq+b4Ms+mwO+pv3Tbhfyv8u5Zn5CiOVjjLJLJfWvl2n7Y8V77pclH3/t4+VtpfPle8376eQWbOIaXV59mnZtlBUU44wScrnxs0wE2GC0ZUPRVEURVHqik4+FEVRFEWpKzr5UBRFURSlrkxdn49yw6vXKmz3VKVW9zQRfTMZ/V1uWHpsm0tqW4l/BLOfGsic6+cdclbmA+Kd3CHKlvdcXNr+369Ln48ikzT+4rX/K8que8f/sjsJKdl0ZcB1ZrYEuA8ESi+53NEUwI8jbtvjoc8H2q8dIa5F29D/w/XcHDZwl5/BsjnnRbbtkb1bo+sDElyKiuZ55i+QAr8OLottCYVMt+c1QZ9hJlVOG/h1xEMeIhanfwADsw9zuOyVKBwmPeBtrSS1AxsLj+yXEul+JlkdBKmryNQL9aUxZD6rfwyOLbIss/hMxwI73jMgn35XYtCe1nWKKOPPBvsp5novpgEV3d3dd99N5557LrW2tlJrayv19vbSY49Zh5lMJkMrV66kWbNmUXNzMy1fvpz6+/tr3mhFURRFUU5cKpp8zJ07l+68807asWMHbd++nS699FK69tpr6be//S0REd1222308MMP0wMPPEAbN26kAwcO0PXXXz8hDVcURVEU5cTEMxhusEI6Ojro61//On3wgx+kU045he677z764Ac/SEREL730Ep111lm0adMmuvDCC8u63tDQELW1tdFlZ3yO4rHUuMdXLZOdjiYazmTfH5gkqo5wWm3230ru37HUbMaOyl84In7mzn9naTs+KpfMvaN23wNzgTc0Wtou7Nsvyr6355nSdhqWpE9i0SgxMqqXBDMM648gIyNnigyZUAdKT6NwmWQ8yLjrYwZe3qew1Cyy6uJzYv0YiprK7iP0iWPPzYNxGZIs87J8LrIMl8yPBHbcDMM44RlhM0b2d5Htxzx5Xgvbb8TIoECaRS5FSbArAik+R44wV6FJgJ330D5pnkIzTyAijsr75+MGTSv8WIxi2seGRtZE3x+auU6JyT7mdz8AXTHMTCsYUXYgaChtd8TGRNnt5/y53Tljrih77LEflbbHgujxdf3pF4l94/q7h8+wBm4L5Upt385q+yoNDg5Sa2ur89iqjUrFYpHuv/9+Gh0dpd7eXtqxYwfl83launRp6ZgFCxZQT08Pbdq0KfI62WyWhoaGxI+iKIqiKNOXiicfzz//PDU3N1MqlaJPf/rTtG7dOnrXu95FfX19lEwmqb29XRzf2dlJfX19kddbs2YNtbW1lX7mzZtX8U0oiqIoinLiUPHk44//+I9p586dtGXLFvrMZz5DK1asoBdffLHqBqxevZoGBwdLP3v37q36WoqiKIqiTH0qltomk0l65zvftm0vXLiQtm3bRt/61rfoIx/5COVyORoYGBCrH/39/dTV1RV5vVQqRSm0AQP1CH8+IZRraztR65vK4P3WygeGhyJHP4Oiow5u6EeTbKsNtx6f0y3KOpnf0yDYhI8wuzeGZefyXSKZ9RIlo3kWfjwFWU7zJto+Hzjkna4Q2uhVILLagkTY5KLt4MLnBuzchmfcRb8OV0httJdz3waHn4WrLxIhqS2/PoReZ/uZCoZsoyfltPxZZU10GgB8pi5CGYAZ8v7d/9Py8Os++INwPw+edoCIKMPG6WH0xzDR2ZDRP8MFD82eMfJdGA6sj1XaAxk0y0D8t6dL/8bYKSz7dB6k1Q4fmzF2/04fjwpA3w3MZFtPjltIHAQBZbNZWrhwISUSCdqwYUOpbNeuXbRnzx7q7e11XEFRFEVRlJlERSsfq1evpiuuuIJ6enpoeHiY7rvvPvrlL39J69evp7a2Nrrpppto1apV1NHRQa2trfTZz36Went7y1a6KIqiKIoy/alo8nHw4EH6xCc+QW+88Qa1tbXRueeeS+vXr6c///O3pUTf+MY3yPd9Wr58OWWzWVq2bBndddddx93IcrPTupbap5zppt5S2MmW3taKKjMcV41r7MESqsdkmkFCLir6XP6HWV3ZdUxjWpRdt+DS0vZju34lyvYVRkrbOVha/9keGQ21gS3LF0KGD2Jl0bJjPMuV1ZUvwz+yb7soQ6knz6RqfDARcHktRm1lGMwwLPoYZMeMx17dHFlGJJfFXX2Di8gJZq7BLK8pXmZQssnGENQwEPA6ZFuynpSe8iNjDtMKZnl1ZaflYKZeaUopX9qL5ip+nTHIRjzKvuGYOTbP3i80syRZNuCkJ+9pDD4Z3HIaoAya9eMomHnOjNsopn6TzFrNTXleJtqMiGZVcfcO88zb5VV++7gpsQKJbpS5yGVGQiqafNxzzz3O8nQ6TWvXrqW1a9dWcllFURRFUWYQ0zt4vKIoiqIoUw6dfCiKoiiKUldO/Ky2412jxBTzeah3Nt56S22rDac+3nXK9fmoNiz7eDB7PfoZcL+iIAV2bpbx1qDPB7PzexCWnmfAvXLB+0TZut9ZZdnhogyZPhyADwRXjML/HMLOHsj6E2Vm1kS/gjyz/Rbh3cNjH37NRkB2+Qdc3rMossyVOdUlJz1SlL4Sjb70D+G+DShZ5bJkdA0KhERZMsj6uBHGQhO70Cj4qaU93qcAfEO49wBmbuX3EfbdsGC7XW8Uvw76kTTCmTgeRB2G+3XI43h4cx7OnIioyYv2pfCZfHkU5LP5QN4lyms5PKttky/ft5NZdlqUi7t8lXjmWpRW87GAaQ5qJZHlEnzjcGnC+jD0/R9wSc4RXflQFEVRFKWu6ORDURRFUZS6MnXNLsZUbo7AiJP1kNdWmzGQ709UNE5O4Fg0rbdJZrznUq60GsmxqINxGNq8T6EvTJ5lnE3IZXeTg+y0TEb3+O4touzKS2zGynxPuygTiVuh2YYvy8blEi2X71IDyHD/6JLSdjAmM2niMi3PmInL4vxx5B0SSlyiL4rjJHl2KEb4TMDSrG9s25opOtrx43ukZHfZnPNK2zwbKhHRQ/u3sbaU/z69CeYrfh9NPspSbQ+MBBiNM9rswk0taNbiJqmEL3t1lJmykiGJrCTNrpMB+WOG9T9eh4NmD3F9xyuaw3EC5qoEqxPHDTc1HQrkWMgY+25yEwgRUV/QVtpu9+W70Ej22RwuNoqygUDu+6xFrb40yXXFh0vbaciOG2NS9pDse9hK4qlFynCvONNGIr7rxfUUxTKIEhy2ibH2YCRebgKFscDfKZdMNhYyv9o6+HmVRMzVlQ9FURRFUeqKTj4URVEURakrOvlQFEVRFKWuTF2fjyqo2scDbcK18oEo19Y8ET4elVxzuoRe5/fhuif0DWJ+HgYkuh74WXjMJ+NIUdqW6dCR0qbf3SaKDJMMe3mwrfKMtzD2DKsP20LMtuyBbwr3hyCSPiBjEMZZymIB1ldYxu8CE/rmhE0epZaSPLNX572jokz4REAd6/ZanxsMBT7Mr+nwVRmPvPCdwDFl6xyAR8p9EjCkN29PAn0H2Haigu/QWMjPwu67/DNcfjx5h/0ew8IXK7D18+c4Fsg/QVzqiv4YvI48SGITLIQ69w0hInqzaP0sDhWbRVkAviPtMftO82sSESXY827xovsb8VjWdg++L8a39eNzEi0DCbzzbxQe64BnDkYJfiX+G9WgKx+KoiiKotQVnXwoiqIoilJXdPKhKIqiKEpdmVY+HxXF0qjkOtUe64rzUe/YGshEhR//A/HokMLHFeq+Wv+UYrQdVOjy4foY5yMYHCpt/3e2Q5T96Ln/LG1/8Ia/kZWwGBEexlzhdWI4+UJ0u7mvit8kw00Ho/JYHifgob0yjXyC/Q+SdDwb9J3ImOgxHGM28fHCdGdYiOuih6X2/mMhmzgrw7bybUybzmzZRbiHsH+GbVsCvEVGWX9gWG6ejj1PGMuD+WOErmmv0+FLn4Msu48k9AX63Ah/Dcc7MxYKi2/b6rvGguP/VvT/yINfBS8fc8TyyBnXN0TC+3QokL5R3D8kE0h/EAyTzv080uDz0cTirnTEouPRhMKp85hDjvd5AEK987GI4cwxDQH38Xp0nzyWx+jAcZLyZH9EgeHVeToDHlJdw6sriqIoijJl0cmHoiiKoih1ZXqZXWpFteaaiWCizDW1yjobRQVS1xBVWoQMu67nMLOEYCYor1GaL4pHBsX+F1+xS5r/Z+gcUdYVY8udsL7psFAQ+fxZQFZbxzItpWxIZ+xTH2TnwVEbKhqXWrnEDuV2Y0aanaJA84UgZCKQv8iy5fWsY6kdl/P5UnsjmCh4WQauyc0AObjfJIYiZ0v2eViGl9JPWQdfsg/gfgOHhJG3Let4TVyhz98uj15Ol30T/SegxUdJNs/A6sj+CveH0le+nwmSVC4xntUXTDk5ZngLmXJYX6C0uRHMLmnPjvdGeN5p9u29Zt6FsnFs3MTaoW8c6SI89g4fBnMR9j8H5fKP7LNh0vHzyUcmZmZOsNL3z5Eh3LlpZf2BnaKMm2Dzhsvay/9468qHoiiKoih1RScfiqIoiqLUFZ18KIqiKIpSV054nw8RUn0ywpTz+tG2N9HtQf+PqRQmPVeer0At8fwq59LMTmnGZHjvf/p/m8T+Y8PnlrZfG5slyvY2Wemth4+i2udWpv+Rl5S2cwM+Lx67zuU9i6Krg3Tg6/ZtLW3nDdig2U3K5ONuHxCXz0PYX4D5XDj8DEbBls3t5Qmwgic8ngJc9v1YEO2fgHA/A/T58FkdMYf8MGcwvLitLwZ+Hb7H7ezusX6IhRRHOan0+QD/H9ZurIP7x4waOd7QB4MzZqQPBpfQYnhzV7+56nA9J+7n0RKT7/es2IjY7/DtSObSWiIpSSfwbRDvH/4d4M8R3i8uy30p2x1qu637VbHfDm1r8e04Gg7wXYh+b3w2BUA5r6ihAl+OctGVD0VRFEVR6opOPhRFURRFqSsnvNllRuNarscIphMhrXVFSZ0MExDPTpsDEwE3Q3iyL7wGK6/lEUyJiP5f/hSxz5fXC7AMPFS0UjkvDxJZh9xONsaR1RbllRgplV8mDq82X+4FE43rOinPXqfRl/c0xrJnFnGJ3BFFFOHmhAxkOeWmFoxcGQiJMEYmtcvnKKd0mUHQnDDKZJtFeIbDgR03aBLgZY2erJ+3G81MPIpqHqJYuqJ4JkFCKkwbQfS7H4pGyvp/2JN1cFksmjn4M8a2FOF/XP4OYb/xCKMxsF2KMscz5HJZIqIEG7ctvjS7oJy1hZkzmuA7wd8FD6KYivfNFQ4BTMP8OthPr2VOLm3PSxwSZcnEEbHfwcZKDPo/w6SwjSCz59JblOHy+8WIqlxeyzNKY3ZpF7ryoSiKoihKXTmuycedd95JnufRrbfeWvpdJpOhlStX0qxZs6i5uZmWL19O/f39x9tORVEURVGmCVVPPrZt20b/9m//Rueee674/W233UYPP/wwPfDAA7Rx40Y6cOAAXX/99cfdUEVRFEVRpgdV+XyMjIzQDTfcQN/73vfoq1/9aun3g4ODdM8999B9991Hl156KRER3XvvvXTWWWfR5s2b6cILL4y6ZAjPmJKN24iMoFX6EhxPuO9qqXdodpcPxkRksXX1G/oc1KFOk2G2dZSGcZ+HlJT+BUcGStvf//1/ibLvHlks9vcctXLaXFHaQd8qtJa2/Yy0OxebWZ3lZkImElI848vzRHbc8aRwCebLgGOB1wlSwCtZNtx1e7fIS/Jw1yEbPJNsorS2ytctBhlnUabJ4b4bmaK0c3OfAJcfCVH5vhPDAfpH2DrR58RVH/ddaQJ/BH7/oxBCHP0cXJlj0beAw9uN8mHRTvDH4MdiCHMXGHo+JqS28jroS8IRcmoYbvy8tC/7yeU7gjT67B1Cn49kdJmQ2mLaBxZePQb+Ei8MWeltAvytXk0MiP3Tk2+VttGv5Yz4WGl7EOTyjT6Ti0Nf8KzRx5OIPIqqVj5WrlxJV111FS1dulT8fseOHZTP58XvFyxYQD09PbRp0ya8DBERZbNZGhoaEj+KoiiKokxfKv739P7776df//rXtG3btlBZX18fJZNJam9vF7/v7Oykvr6+Y15vzZo19OUvf7nSZiiKoiiKcoJS0eRj7969dMstt9ATTzxB6XR6/BPKYPXq1bRq1arS/tDQEM2bN08c4zS1TKWoni4qWWovl4kwpVSCq90T9VwcslAKuJw2OsogRgN99NXNpe2dWblEP1KUS9ZH2RI+Lufvz7bbOo6C1JebXUJtYxFWY7IMEmtK+HXwmrDvMcmuyIZLRCabY8dFfxJiocisrAyOrTa+rQ+mFW6GSDuuipJNLtHNoCSa7LdruCizGKNph9c/bOSxLrgZZrDYKMpOio+ydsqeG2Ny1jaIxsmX5dF0kqHoCJ9YPycFZghuyhouym8875vmmIxpm+XmGi/aXDMurPsTMWlq4M8CnxOXKGMZN600edGZYomIkmyMc6kpEdGyOefZtjTAPXLXAJDTGvZdMvDN5m/UOem9ouz7Ry8qbe9LnSTKBgtyLHLpc4svn03Cs9c9PS7vn2e1zVabTrxKKjK77Nixgw4ePEjnn38+xeNxisfjtHHjRvr2t79N8XicOjs7KZfL0cDAgDivv7+furq6jnnNVCpFra2t4kdRFEVRlOlLRSsfl112GT3//PPidzfeeCMtWLCAbr/9dpo3bx4lEgnasGEDLV++nIiIdu3aRXv27KHe3t7atVpRFEVRlBOWiiYfLS0tdPbZZ4vfNTU10axZs0q/v+mmm2jVqlXU0dFBra2t9NnPfpZ6e3srUrooiqIoijJ9qbke8hvf+Ab5vk/Lly+nbDZLy5Yto7vuuqvyCxlzbL+BSnwJuI16onwQahG2vJK2TbWQ5lGgpMyBgXZ7LEx6KAskByVton6QZWat3NGH0Ou/yVkb6Y7M6aLsrVyz2D+Ssfbzkbz0nXiTHWvQr4LZhA34TvhFR2Zmh4+LCL0+jt+QCDdfpa/QNXMuEPs8C+aRQNqZuZ0d5YyYAZfLLZNod2YSUpQiinDjRvoncB+QBDjOxCi6DOG+I+hnwf0z3LJU2e4R5kuB4c25z8UI+Fy4Mr5i/a4Q5rwsA1l8XffB79flD4K+Kii95feMEmEuhcVnw2XJLv+f0DXZddpBvoxf72bP3iOGFOeYvGybF7f7Bn0Uk7aPxbeNSLyL6I9xZMR+a140naKsMSHvcV+q3VYHstx9zTY8wKImmR33otTh0nbCkW26vyjb1hnjofZtfdkKst8e9+Tjl7/8pdhPp9O0du1aWrt27fFeWlEURVGUaYjmdlEURVEUpa5Mr6y2kxHFtBZUK5mt5J5cEtVq63Qs9YdMKb5jnov34aqDlRk0yXA5LZg9fC4hzcklyzeLTaXtUDRIiMA4ykwtIxl5bK6FZb0Es5PnGppMXusVqhunBsx/HkTj5P0fOpYtyxuQbPK7x+Xk98+x0U9/tm+zKCuyDJmVyG4TYKLg1oV8SNAbDV/az8MzFVFL4f8vjBzKQZNE1hENFE0dHG6icJlEUDKacpiIuEkEr3O0GC3DxUilHDSX8GNDphTH/YYy97JzsX7MTlwuvK+wbdxElYDPCfYMl5P7hHJ1Zj6p0txu8vJt4ObSj5/2PlEW/7m9D/zWZHJgLmPRlrFPG2LMdAljikuP350cEWVpJjVOQ5+OGW4es30RryBqhK58KIqiKIpSV3TyoSiKoihKXdHJh6IoiqIodeXE9/moReZYh18BjZdFt9zp20SEQq9HeHNXFlnwIxFyT+w3bofEjJB44WJ0mPSQn0dUWRKsuUwCFoyNiaIB5vPRn5cRdscK0pY+zGyv+YK8jwL3swApnuhHHDPCXu7IOIt+NAVHOHl8bqzPPSgT/jk4Tl2+OgyUJXK7exHqS4BMlGfEDRxyv7Bk07YNM55yXw4Mr87L0MfAlZEVs+jy0OQJEy0ndZF1SF1RBjvi8EfB62QD+2nHjKi+6H8IJ8/eU/Qd4DJYH56h9OOIlkTjsUmHnBb7W2ROxuTPZaZdLcJhLTC+48yv6Mo554syv9Hx59J3PG/+PcN3j23HWqWsv73BitL3v9kuymIx2cf82+PBcysEs0vbR3IyLPuRvJXz9jXvFmWXNu4rbae96O/AMAsfP1KBb6GufCiKoiiKUld08qEoiqIoSl058cwuLlPDeEvPURyPiaJcc8pEZLV1MRHXDFXhqAOXIZ3HOubAGCnVIcP1Gu2SotcgIzCaEZtJ9D9f3STKHhxtL203QgTE4Zy8TjZnXxnfl8+UR+oMZWIu93m4ZMeVnIdwaWCxgmfDD4Nn6gpmyOV3eTBJhC0SPHOs/CRljCPLLjsPzSciyiUOL3YoRi1NUrRZL+bLOvi5rsigaD7gx2JWWS5ZDcl3Hf8rhrIBs/2kjxFebVncRxMJk6yCuUaYVtCUwupAEwjeI0aqFe3mzxQz1zITDZrZ+PPGKLmNrG0hqS2YE2J834dnyt8T/GY5zBKyLPrevWZpdllyymul7Z/uXSgPBgtcNsNkwPBdKiRt37wxKs3KBTbGuCSXiOgClmX3HYkmUTbCIxpXENWUoysfiqIoiqLUFZ18KIqiKIpSV6ac2eUPnveFIDfOkWVd7NjblTCe2qUWy+J1MJFUHeG0yn4LRTgV89wK7hfabQK2NIj9xhQXXoAmAjuehoblNcdG7RJuJiuXHgujWbFfHLPLjQaWN/Ojto5CIF+tQsGe5xchKRVzwfeKEA/UkaDPCxzJ+1zPO5D18z41Bt47ZjIxRrbNsCim2KcZthR7NEBzhSTPunEElnDxXHkd+4xHoS8MW/pHtQvfD2AZPO8Y7xjFU5pdHKqwkNnFHouqmGzRHpuFMZxzfIsCg8eyJHQx+by5aaPoMLsEYHYpOswuRX4sKlgcZh+MuMnbFqCCie3HQm2z5xXgmgG7xyJGVPWjjy3AePf5c4Rn6rlMD3wMO47Db1Z2xNYfHIV0jBhBOW6vi2aXom+/YaiEybP3PQtmlxF2zaEEJEdk9zTMtkdG3t7G7/+x8Ew5R9WRffv20bx58ya7GYqiKIqiVMHevXtp7ty5zmOm3OQjCAI6cOAAGWOop6eH9u7dS62treOfOIMYGhqiefPmad8cA+2baLRvotG+OTbaL9Fo34QxxtDw8DB1d3eTP06MoClndvF9n+bOnUtDQ0NERNTa2qoPNgLtm2i0b6LRvolG++bYaL9Eo30jaWtrK+s4dThVFEVRFKWu6ORDURRFUZS6MmUnH6lUiv7hH/6BUqnofAYzFe2baLRvotG+iUb75thov0SjfXN8TDmHU0VRFEVRpjdTduVDURRFUZTpiU4+FEVRFEWpKzr5UBRFURSlrujkQ1EURVGUujJlJx9r166l008/ndLpNC1ZsoS2bt062U2qK2vWrKELLriAWlpaaPbs2XTdddfRrl27xDGZTIZWrlxJs2bNoubmZlq+fDn19/dPUosnjzvvvJM8z6Nbb7219LuZ3Df79++nv/zLv6RZs2ZRQ0MDnXPOObR9+/ZSuTGGvvSlL9Gpp55KDQ0NtHTpUnr55ZcnscX1oVgs0h133EHz58+nhoYGesc73kH/+I//KPJQzJS+efrpp+nqq6+m7u5u8jyPHnzwQVFeTj8cPnyYbrjhBmptbaX29na66aabaGRkpI53MTG4+iafz9Ptt99O55xzDjU1NVF3dzd94hOfoAMHDohrTNe+qSlmCnL//febZDJpvv/975vf/va35q//+q9Ne3u76e/vn+ym1Y1ly5aZe++917zwwgtm586d5sorrzQ9PT1mZGSkdMynP/1pM2/ePLNhwwazfft2c+GFF5qLLrpoEltdf7Zu3WpOP/10c+6555pbbrml9PuZ2jeHDx82p512mvnkJz9ptmzZYl599VWzfv1688orr5SOufPOO01bW5t58MEHzXPPPWeuueYaM3/+fHP06NFJbPnE87Wvfc3MmjXLPPLII2b37t3mgQceMM3NzeZb3/pW6ZiZ0jePPvqo+eIXv2h+/vOfGyIy69atE+Xl9MPll19u3v3ud5vNmzebX/3qV+ad73yn+djHPlbnO6k9rr4ZGBgwS5cuNT/+8Y/NSy+9ZDZt2mQWL15sFi5cKK4xXfumlkzJycfixYvNypUrS/vFYtF0d3ebNWvWTGKrJpeDBw8aIjIbN240xrz9EiQSCfPAAw+Ujvnd735niMhs2rRpsppZV4aHh82ZZ55pnnjiCfOnf/qnpcnHTO6b22+/3bz3ve+NLA+CwHR1dZmvf/3rpd8NDAyYVCplfvSjH9WjiZPGVVddZf7qr/5K/O766683N9xwgzFm5vYN/oEtpx9efPFFQ0Rm27ZtpWMee+wx43me2b9/f93aPtEca2KGbN261RCRef31140xM6dvjpcpZ3bJ5XK0Y8cOWrp0ael3vu/T0qVLadOmTZPYssllcHCQiIg6OjqIiGjHjh2Uz+dFPy1YsIB6enpmTD+tXLmSrrrqKtEHRDO7bx566CFatGgRfehDH6LZs2fTeeedR9/73vdK5bt376a+vj7RN21tbbRkyZJp3zcXXXQRbdiwgX7/+98TEdFzzz1HzzzzDF1xxRVENLP7hlNOP2zatIna29tp0aJFpWOWLl1Kvu/Tli1b6t7myWRwcJA8z6P29nYi0r4plymXWO6tt96iYrFInZ2d4vednZ300ksvTVKrJpcgCOjWW2+liy++mM4++2wiIurr66NkMlka8H+gs7OT+vr6JqGV9eX++++nX//617Rt27ZQ2Uzum1dffZXuvvtuWrVqFf3d3/0dbdu2jT73uc9RMpmkFStWlO7/WO/XdO+bL3zhCzQ0NEQLFiygWCxGxWKRvva1r9ENN9xARDSj+4ZTTj/09fXR7NmzRXk8HqeOjo4Z1VeZTIZuv/12+tjHPlZKLqd9Ux5TbvKhhFm5ciW98MIL9Mwzz0x2U6YEe/fupVtuuYWeeOIJSqfTk92cKUUQBLRo0SL6p3/6JyIiOu+88+iFF16g73znO7RixYpJbt3k8pOf/IR++MMf0n333Ud/8id/Qjt37qRbb72Vuru7Z3zfKJWTz+fpwx/+MBlj6O67757s5pxwTDmzy8knn0yxWCykTOjv76eurq5JatXkcfPNN9MjjzxCTz31FM2dO7f0+66uLsrlcjQwMCCOnwn9tGPHDjp48CCdf/75FI/HKR6P08aNG+nb3/42xeNx6uzsnLF9c+qpp9K73vUu8buzzjqL9uzZQ0RUuv+Z+H797d/+LX3hC1+gj370o3TOOefQxz/+cbrttttozZo1RDSz+4ZTTj90dXXRwYMHRXmhUKDDhw/PiL76w8Tj9ddfpyeeeKK06kGkfVMuU27ykUwmaeHChbRhw4bS74IgoA0bNlBvb+8ktqy+GGPo5ptvpnXr1tGTTz5J8+fPF+ULFy6kRCIh+mnXrl20Z8+ead9Pl112GT3//PO0c+fO0s+iRYvohhtuKG3P1L65+OKLQ5Ls3//+93TaaacREdH8+fOpq6tL9M3Q0BBt2bJl2vfN2NgY+b785MViMQqCgIhmdt9wyumH3t5eGhgYoB07dpSOefLJJykIAlqyZEnd21xP/jDxePnll+m//uu/aNasWaJ8JvdNRUy2x+uxuP/++00qlTI/+MEPzIsvvmg+9alPmfb2dtPX1zfZTasbn/nMZ0xbW5v55S9/ad54443Sz9jYWOmYT3/606anp8c8+eSTZvv27aa3t9f09vZOYqsnD652MWbm9s3WrVtNPB43X/va18zLL79sfvjDH5rGxkbzH//xH6Vj7rzzTtPe3m5+8YtfmN/85jfm2muvnZZyUmTFihVmzpw5Jantz3/+c3PyySebz3/+86VjZkrfDA8Pm2effdY8++yzhojMv/zLv5hnn322pNgopx8uv/xyc95555ktW7aYZ555xpx55pnTQk7q6ptcLmeuueYaM3fuXLNz507xbc5ms6VrTNe+qSVTcvJhjDH/+q//anp6ekwymTSLFy82mzdvnuwm1RUiOubPvffeWzrm6NGj5m/+5m/MSSedZBobG80HPvAB88Ybb0xeoycRnHzM5L55+OGHzdlnn21SqZRZsGCB+e53vyvKgyAwd9xxh+ns7DSpVMpcdtllZteuXZPU2voxNDRkbrnlFtPT02PS6bQ544wzzBe/+EXxR2Om9M1TTz11zO/LihUrjDHl9cOhQ4fMxz72MdPc3GxaW1vNjTfeaIaHhyfhbmqLq292794d+W1+6qmnSteYrn1TSzxjWHg/RVEURVGUCWbK+XwoiqIoijK90cmHoiiKoih1RScfiqIoiqLUFZ18KIqiKIpSV3TyoSiKoihKXdHJh6IoiqIodUUnH4qiKIqi1BWdfCiKoiiKUld08qEoiqIoSl3RyYeiKIqiKHVFJx+KoiiKotQVnXwoiqIoilJX/n+CmoP3JYfyDwAAAABJRU5ErkJggg==\n",
|
459 |
+
"text/plain": [
|
460 |
+
"<Figure size 640x480 with 1 Axes>"
|
461 |
+
]
|
462 |
+
},
|
463 |
+
"metadata": {},
|
464 |
+
"output_type": "display_data"
|
465 |
+
}
|
466 |
+
],
|
467 |
+
"source": [
|
468 |
+
"# 0:videos, 0: 1st video out of the batch, 0: return the first frame in the video \n",
|
469 |
+
"plt.imshow(val[0][0][35])"
|
470 |
+
]
|
471 |
+
},
|
472 |
+
{
|
473 |
+
"cell_type": "code",
|
474 |
+
"execution_count": 35,
|
475 |
+
"id": "84593332-133c-4205-b7a6-8e235d5e2b3b",
|
476 |
+
"metadata": {
|
477 |
+
"tags": []
|
478 |
+
},
|
479 |
+
"outputs": [
|
480 |
+
{
|
481 |
+
"data": {
|
482 |
+
"text/plain": [
|
483 |
+
"<tf.Tensor: shape=(), dtype=string, numpy=b'lay blue by e two please'>"
|
484 |
+
]
|
485 |
+
},
|
486 |
+
"execution_count": 35,
|
487 |
+
"metadata": {},
|
488 |
+
"output_type": "execute_result"
|
489 |
+
}
|
490 |
+
],
|
491 |
+
"source": [
|
492 |
+
"tf.strings.reduce_join([num_to_char(word) for word in val[1][0]])"
|
493 |
+
]
|
494 |
+
},
|
495 |
+
{
|
496 |
+
"cell_type": "markdown",
|
497 |
+
"id": "0f47733c-83bc-465c-b118-b198b492ad37",
|
498 |
+
"metadata": {
|
499 |
+
"tags": []
|
500 |
+
},
|
501 |
+
"source": [
|
502 |
+
"# 3. Design the Deep Neural Network"
|
503 |
+
]
|
504 |
+
},
|
505 |
+
{
|
506 |
+
"cell_type": "code",
|
507 |
+
"execution_count": 36,
|
508 |
+
"id": "d8e9a497-191b-4842-afbd-26f5e13c43ba",
|
509 |
+
"metadata": {
|
510 |
+
"tags": []
|
511 |
+
},
|
512 |
+
"outputs": [],
|
513 |
+
"source": [
|
514 |
+
"from tensorflow.keras.models import Sequential \n",
|
515 |
+
"from tensorflow.keras.layers import Conv3D, LSTM, Dense, Dropout, Bidirectional, MaxPool3D, Activation, Reshape, SpatialDropout3D, BatchNormalization, TimeDistributed, Flatten\n",
|
516 |
+
"from tensorflow.keras.optimizers import Adam\n",
|
517 |
+
"from tensorflow.keras.callbacks import ModelCheckpoint, LearningRateScheduler"
|
518 |
+
]
|
519 |
+
},
|
520 |
+
{
|
521 |
+
"cell_type": "code",
|
522 |
+
"execution_count": 37,
|
523 |
+
"id": "3f753ed2-70b9-4236-8c1c-08ca065dc8bf",
|
524 |
+
"metadata": {},
|
525 |
+
"outputs": [
|
526 |
+
{
|
527 |
+
"data": {
|
528 |
+
"text/plain": [
|
529 |
+
"(75, 46, 140, 1)"
|
530 |
+
]
|
531 |
+
},
|
532 |
+
"execution_count": 37,
|
533 |
+
"metadata": {},
|
534 |
+
"output_type": "execute_result"
|
535 |
+
}
|
536 |
+
],
|
537 |
+
"source": [
|
538 |
+
"data.as_numpy_iterator().next()[0][0].shape"
|
539 |
+
]
|
540 |
+
},
|
541 |
+
{
|
542 |
+
"cell_type": "code",
|
543 |
+
"execution_count": 38,
|
544 |
+
"id": "f9171056-a352-491a-9ed9-92b28ced268e",
|
545 |
+
"metadata": {
|
546 |
+
"tags": []
|
547 |
+
},
|
548 |
+
"outputs": [],
|
549 |
+
"source": [
|
550 |
+
"model = Sequential()\n",
|
551 |
+
"model.add(Conv3D(128, 3, input_shape=(75,46,140,1), padding='same'))\n",
|
552 |
+
"model.add(Activation('relu'))\n",
|
553 |
+
"model.add(MaxPool3D((1,2,2)))\n",
|
554 |
+
"\n",
|
555 |
+
"model.add(Conv3D(256, 3, padding='same'))\n",
|
556 |
+
"model.add(Activation('relu'))\n",
|
557 |
+
"model.add(MaxPool3D((1,2,2)))\n",
|
558 |
+
"\n",
|
559 |
+
"model.add(Conv3D(75, 3, padding='same'))\n",
|
560 |
+
"model.add(Activation('relu'))\n",
|
561 |
+
"model.add(MaxPool3D((1,2,2)))\n",
|
562 |
+
"\n",
|
563 |
+
"model.add(TimeDistributed(Flatten()))\n",
|
564 |
+
"\n",
|
565 |
+
"model.add(Bidirectional(LSTM(128, kernel_initializer='Orthogonal', return_sequences=True)))\n",
|
566 |
+
"model.add(Dropout(.5))\n",
|
567 |
+
"\n",
|
568 |
+
"model.add(Bidirectional(LSTM(128, kernel_initializer='Orthogonal', return_sequences=True)))\n",
|
569 |
+
"model.add(Dropout(.5))\n",
|
570 |
+
"\n",
|
571 |
+
"model.add(Dense(char_to_num.vocabulary_size()+1, kernel_initializer='he_normal', activation='softmax'))"
|
572 |
+
]
|
573 |
+
},
|
574 |
+
{
|
575 |
+
"cell_type": "code",
|
576 |
+
"execution_count": 39,
|
577 |
+
"id": "78851825-2bcd-42a9-b7f2-28bb5a6bf43a",
|
578 |
+
"metadata": {
|
579 |
+
"collapsed": true,
|
580 |
+
"jupyter": {
|
581 |
+
"outputs_hidden": true
|
582 |
+
},
|
583 |
+
"tags": []
|
584 |
+
},
|
585 |
+
"outputs": [
|
586 |
+
{
|
587 |
+
"name": "stdout",
|
588 |
+
"output_type": "stream",
|
589 |
+
"text": [
|
590 |
+
"Model: \"sequential\"\n",
|
591 |
+
"_________________________________________________________________\n",
|
592 |
+
" Layer (type) Output Shape Param # \n",
|
593 |
+
"=================================================================\n",
|
594 |
+
" conv3d (Conv3D) (None, 75, 46, 140, 128) 3584 \n",
|
595 |
+
" \n",
|
596 |
+
" activation (Activation) (None, 75, 46, 140, 128) 0 \n",
|
597 |
+
" \n",
|
598 |
+
" max_pooling3d (MaxPooling3D (None, 75, 23, 70, 128) 0 \n",
|
599 |
+
" ) \n",
|
600 |
+
" \n",
|
601 |
+
" conv3d_1 (Conv3D) (None, 75, 23, 70, 256) 884992 \n",
|
602 |
+
" \n",
|
603 |
+
" activation_1 (Activation) (None, 75, 23, 70, 256) 0 \n",
|
604 |
+
" \n",
|
605 |
+
" max_pooling3d_1 (MaxPooling (None, 75, 11, 35, 256) 0 \n",
|
606 |
+
" 3D) \n",
|
607 |
+
" \n",
|
608 |
+
" conv3d_2 (Conv3D) (None, 75, 11, 35, 75) 518475 \n",
|
609 |
+
" \n",
|
610 |
+
" activation_2 (Activation) (None, 75, 11, 35, 75) 0 \n",
|
611 |
+
" \n",
|
612 |
+
" max_pooling3d_2 (MaxPooling (None, 75, 5, 17, 75) 0 \n",
|
613 |
+
" 3D) \n",
|
614 |
+
" \n",
|
615 |
+
" time_distributed (TimeDistr (None, 75, 6375) 0 \n",
|
616 |
+
" ibuted) \n",
|
617 |
+
" \n",
|
618 |
+
" bidirectional (Bidirectiona (None, 75, 256) 6660096 \n",
|
619 |
+
" l) \n",
|
620 |
+
" \n",
|
621 |
+
" dropout (Dropout) (None, 75, 256) 0 \n",
|
622 |
+
" \n",
|
623 |
+
" bidirectional_1 (Bidirectio (None, 75, 256) 394240 \n",
|
624 |
+
" nal) \n",
|
625 |
+
" \n",
|
626 |
+
" dropout_1 (Dropout) (None, 75, 256) 0 \n",
|
627 |
+
" \n",
|
628 |
+
" dense (Dense) (None, 75, 41) 10537 \n",
|
629 |
+
" \n",
|
630 |
+
"=================================================================\n",
|
631 |
+
"Total params: 8,471,924\n",
|
632 |
+
"Trainable params: 8,471,924\n",
|
633 |
+
"Non-trainable params: 0\n",
|
634 |
+
"_________________________________________________________________\n"
|
635 |
+
]
|
636 |
+
}
|
637 |
+
],
|
638 |
+
"source": [
|
639 |
+
"model.summary()"
|
640 |
+
]
|
641 |
+
},
|
642 |
+
{
|
643 |
+
"cell_type": "code",
|
644 |
+
"execution_count": null,
|
645 |
+
"id": "f4b4798c-a65a-4c47-9e2a-3b09dc98d320",
|
646 |
+
"metadata": {},
|
647 |
+
"outputs": [],
|
648 |
+
"source": [
|
649 |
+
"5*17*75"
|
650 |
+
]
|
651 |
+
},
|
652 |
+
{
|
653 |
+
"cell_type": "code",
|
654 |
+
"execution_count": 40,
|
655 |
+
"id": "e5c2eae0-c359-41a4-97a0-75c44dccb7d1",
|
656 |
+
"metadata": {
|
657 |
+
"tags": []
|
658 |
+
},
|
659 |
+
"outputs": [
|
660 |
+
{
|
661 |
+
"name": "stdout",
|
662 |
+
"output_type": "stream",
|
663 |
+
"text": [
|
664 |
+
"1/1 [==============================] - 3s 3s/step\n"
|
665 |
+
]
|
666 |
+
}
|
667 |
+
],
|
668 |
+
"source": [
|
669 |
+
"yhat = model.predict(val[0])"
|
670 |
+
]
|
671 |
+
},
|
672 |
+
{
|
673 |
+
"cell_type": "code",
|
674 |
+
"execution_count": 41,
|
675 |
+
"id": "ffdc7319-0d69-4f7e-a6d4-ce72deb81c0b",
|
676 |
+
"metadata": {},
|
677 |
+
"outputs": [
|
678 |
+
{
|
679 |
+
"data": {
|
680 |
+
"text/plain": [
|
681 |
+
"<tf.Tensor: shape=(), dtype=string, numpy=b'444iiiiiiiiiiiiiimmmmmmmmmmmmmmmmmmmmmmiiiiiiiiiiimmmmmmmmiiiiiiiiiiixxxxxx'>"
|
682 |
+
]
|
683 |
+
},
|
684 |
+
"execution_count": 41,
|
685 |
+
"metadata": {},
|
686 |
+
"output_type": "execute_result"
|
687 |
+
}
|
688 |
+
],
|
689 |
+
"source": [
|
690 |
+
"tf.strings.reduce_join([num_to_char(x) for x in tf.argmax(yhat[0],axis=1)])"
|
691 |
+
]
|
692 |
+
},
|
693 |
+
{
|
694 |
+
"cell_type": "code",
|
695 |
+
"execution_count": 42,
|
696 |
+
"id": "6ed47531-8317-4255-9a12-b757642258e6",
|
697 |
+
"metadata": {
|
698 |
+
"tags": []
|
699 |
+
},
|
700 |
+
"outputs": [
|
701 |
+
{
|
702 |
+
"data": {
|
703 |
+
"text/plain": [
|
704 |
+
"<tf.Tensor: shape=(), dtype=string, numpy=b'444iiiiiiiiiiiiiimmmmmmmmmmmmmmmmmmmmmmiiiiiiiiiiimmmmmmmmiiiiiiiiiiixxxxxx'>"
|
705 |
+
]
|
706 |
+
},
|
707 |
+
"execution_count": 42,
|
708 |
+
"metadata": {},
|
709 |
+
"output_type": "execute_result"
|
710 |
+
}
|
711 |
+
],
|
712 |
+
"source": [
|
713 |
+
"tf.strings.reduce_join([num_to_char(tf.argmax(x)) for x in yhat[0]])"
|
714 |
+
]
|
715 |
+
},
|
716 |
+
{
|
717 |
+
"cell_type": "code",
|
718 |
+
"execution_count": 43,
|
719 |
+
"id": "7c37b9b9-5298-4038-9c33-5031d1b457f0",
|
720 |
+
"metadata": {
|
721 |
+
"tags": []
|
722 |
+
},
|
723 |
+
"outputs": [
|
724 |
+
{
|
725 |
+
"data": {
|
726 |
+
"text/plain": [
|
727 |
+
"(None, 75, 46, 140, 1)"
|
728 |
+
]
|
729 |
+
},
|
730 |
+
"execution_count": 43,
|
731 |
+
"metadata": {},
|
732 |
+
"output_type": "execute_result"
|
733 |
+
}
|
734 |
+
],
|
735 |
+
"source": [
|
736 |
+
"model.input_shape"
|
737 |
+
]
|
738 |
+
},
|
739 |
+
{
|
740 |
+
"cell_type": "code",
|
741 |
+
"execution_count": 44,
|
742 |
+
"id": "98b316a4-5322-4782-8e36-4b3c1a696d85",
|
743 |
+
"metadata": {
|
744 |
+
"tags": []
|
745 |
+
},
|
746 |
+
"outputs": [
|
747 |
+
{
|
748 |
+
"data": {
|
749 |
+
"text/plain": [
|
750 |
+
"(None, 75, 41)"
|
751 |
+
]
|
752 |
+
},
|
753 |
+
"execution_count": 44,
|
754 |
+
"metadata": {},
|
755 |
+
"output_type": "execute_result"
|
756 |
+
}
|
757 |
+
],
|
758 |
+
"source": [
|
759 |
+
"model.output_shape"
|
760 |
+
]
|
761 |
+
},
|
762 |
+
{
|
763 |
+
"cell_type": "markdown",
|
764 |
+
"id": "2ec02176-5c26-46c3-aff7-8352e6563c7d",
|
765 |
+
"metadata": {
|
766 |
+
"tags": []
|
767 |
+
},
|
768 |
+
"source": [
|
769 |
+
"# 4. Setup Training Options and Train"
|
770 |
+
]
|
771 |
+
},
|
772 |
+
{
|
773 |
+
"cell_type": "code",
|
774 |
+
"execution_count": 45,
|
775 |
+
"id": "ab015fd0-7fb4-4d5d-9fa2-30a05dbd515a",
|
776 |
+
"metadata": {
|
777 |
+
"tags": []
|
778 |
+
},
|
779 |
+
"outputs": [],
|
780 |
+
"source": [
|
781 |
+
"def scheduler(epoch, lr):\n",
|
782 |
+
" if epoch < 30:\n",
|
783 |
+
" return lr\n",
|
784 |
+
" else:\n",
|
785 |
+
" return lr * tf.math.exp(-0.1)"
|
786 |
+
]
|
787 |
+
},
|
788 |
+
{
|
789 |
+
"cell_type": "code",
|
790 |
+
"execution_count": 46,
|
791 |
+
"id": "c564d5c9-db54-4e88-b311-9aeab7fb3e69",
|
792 |
+
"metadata": {
|
793 |
+
"tags": []
|
794 |
+
},
|
795 |
+
"outputs": [],
|
796 |
+
"source": [
|
797 |
+
"def CTCLoss(y_true, y_pred):\n",
|
798 |
+
" batch_len = tf.cast(tf.shape(y_true)[0], dtype=\"int64\")\n",
|
799 |
+
" input_length = tf.cast(tf.shape(y_pred)[1], dtype=\"int64\")\n",
|
800 |
+
" label_length = tf.cast(tf.shape(y_true)[1], dtype=\"int64\")\n",
|
801 |
+
"\n",
|
802 |
+
" input_length = input_length * tf.ones(shape=(batch_len, 1), dtype=\"int64\")\n",
|
803 |
+
" label_length = label_length * tf.ones(shape=(batch_len, 1), dtype=\"int64\")\n",
|
804 |
+
"\n",
|
805 |
+
" loss = tf.keras.backend.ctc_batch_cost(y_true, y_pred, input_length, label_length)\n",
|
806 |
+
" return loss"
|
807 |
+
]
|
808 |
+
},
|
809 |
+
{
|
810 |
+
"cell_type": "code",
|
811 |
+
"execution_count": 47,
|
812 |
+
"id": "a26dc3fc-a19c-4378-bd8c-e2b597a1d15c",
|
813 |
+
"metadata": {
|
814 |
+
"tags": []
|
815 |
+
},
|
816 |
+
"outputs": [],
|
817 |
+
"source": [
|
818 |
+
"class ProduceExample(tf.keras.callbacks.Callback): \n",
|
819 |
+
" def __init__(self, dataset) -> None: \n",
|
820 |
+
" self.dataset = dataset.as_numpy_iterator()\n",
|
821 |
+
" \n",
|
822 |
+
" def on_epoch_end(self, epoch, logs=None) -> None:\n",
|
823 |
+
" data = self.dataset.next()\n",
|
824 |
+
" yhat = self.model.predict(data[0])\n",
|
825 |
+
" decoded = tf.keras.backend.ctc_decode(yhat, [75,75], greedy=False)[0][0].numpy()\n",
|
826 |
+
" for x in range(len(yhat)): \n",
|
827 |
+
" print('Original:', tf.strings.reduce_join(num_to_char(data[1][x])).numpy().decode('utf-8'))\n",
|
828 |
+
" print('Prediction:', tf.strings.reduce_join(num_to_char(decoded[x])).numpy().decode('utf-8'))\n",
|
829 |
+
" print('~'*100)"
|
830 |
+
]
|
831 |
+
},
|
832 |
+
{
|
833 |
+
"cell_type": "code",
|
834 |
+
"execution_count": 48,
|
835 |
+
"id": "04be90d8-2482-46f9-b513-d5f4f8001c7e",
|
836 |
+
"metadata": {
|
837 |
+
"tags": []
|
838 |
+
},
|
839 |
+
"outputs": [],
|
840 |
+
"source": [
|
841 |
+
"model.compile(optimizer=Adam(learning_rate=0.0001), loss=CTCLoss)"
|
842 |
+
]
|
843 |
+
},
|
844 |
+
{
|
845 |
+
"cell_type": "code",
|
846 |
+
"execution_count": 49,
|
847 |
+
"id": "eab49367-3f1e-4464-ae76-dbd07549d97e",
|
848 |
+
"metadata": {
|
849 |
+
"tags": []
|
850 |
+
},
|
851 |
+
"outputs": [],
|
852 |
+
"source": [
|
853 |
+
"checkpoint_callback = ModelCheckpoint(os.path.join('models','checkpoint'), monitor='loss', save_weights_only=True) "
|
854 |
+
]
|
855 |
+
},
|
856 |
+
{
|
857 |
+
"cell_type": "code",
|
858 |
+
"execution_count": 50,
|
859 |
+
"id": "e085a632-d464-46ef-8777-959cad4adb2c",
|
860 |
+
"metadata": {
|
861 |
+
"tags": []
|
862 |
+
},
|
863 |
+
"outputs": [],
|
864 |
+
"source": [
|
865 |
+
"schedule_callback = LearningRateScheduler(scheduler)"
|
866 |
+
]
|
867 |
+
},
|
868 |
+
{
|
869 |
+
"cell_type": "code",
|
870 |
+
"execution_count": 51,
|
871 |
+
"id": "48eca991-90ab-4592-8a79-b50e9ca015b6",
|
872 |
+
"metadata": {
|
873 |
+
"tags": []
|
874 |
+
},
|
875 |
+
"outputs": [],
|
876 |
+
"source": [
|
877 |
+
"example_callback = ProduceExample(test)"
|
878 |
+
]
|
879 |
+
},
|
880 |
+
{
|
881 |
+
"cell_type": "code",
|
882 |
+
"execution_count": 52,
|
883 |
+
"id": "8ffba483-aa61-4bbe-a15f-a73e1ddf097c",
|
884 |
+
"metadata": {
|
885 |
+
"tags": []
|
886 |
+
},
|
887 |
+
"outputs": [
|
888 |
+
{
|
889 |
+
"name": "stdout",
|
890 |
+
"output_type": "stream",
|
891 |
+
"text": [
|
892 |
+
"Epoch 1/100\n",
|
893 |
+
" 2/450 [..............................] - ETA: 3:03 - loss: 213.9969 "
|
894 |
+
]
|
895 |
+
},
|
896 |
+
{
|
897 |
+
"name": "stderr",
|
898 |
+
"output_type": "stream",
|
899 |
+
"text": [
|
900 |
+
"\n",
|
901 |
+
"KeyboardInterrupt\n",
|
902 |
+
"\n"
|
903 |
+
]
|
904 |
+
}
|
905 |
+
],
|
906 |
+
"source": [
|
907 |
+
"model.fit(train, validation_data=test, epochs=100, callbacks=[checkpoint_callback, schedule_callback, example_callback])"
|
908 |
+
]
|
909 |
+
},
|
910 |
+
{
|
911 |
+
"cell_type": "markdown",
|
912 |
+
"id": "fa8ee94b-89f7-4733-8a0c-a86f86ff590a",
|
913 |
+
"metadata": {
|
914 |
+
"tags": []
|
915 |
+
},
|
916 |
+
"source": [
|
917 |
+
"# 5. Make a Prediction "
|
918 |
+
]
|
919 |
+
},
|
920 |
+
{
|
921 |
+
"cell_type": "code",
|
922 |
+
"execution_count": null,
|
923 |
+
"id": "01fa7204-ce0e-49a8-8dbd-14fe5dfead40",
|
924 |
+
"metadata": {
|
925 |
+
"tags": []
|
926 |
+
},
|
927 |
+
"outputs": [],
|
928 |
+
"source": [
|
929 |
+
"url = 'https://drive.google.com/uc?id=1vWscXs4Vt0a_1IH1-ct2TCgXAZT-N3_Y'\n",
|
930 |
+
"output = 'checkpoints.zip'\n",
|
931 |
+
"gdown.download(url, output, quiet=False)\n",
|
932 |
+
"gdown.extractall('checkpoints.zip', 'models')"
|
933 |
+
]
|
934 |
+
},
|
935 |
+
{
|
936 |
+
"cell_type": "code",
|
937 |
+
"execution_count": 53,
|
938 |
+
"id": "247f664d-3c87-4e96-946e-930dad0e1c2c",
|
939 |
+
"metadata": {
|
940 |
+
"tags": []
|
941 |
+
},
|
942 |
+
"outputs": [
|
943 |
+
{
|
944 |
+
"data": {
|
945 |
+
"text/plain": [
|
946 |
+
"<tensorflow.python.checkpoint.checkpoint.CheckpointLoadStatus at 0x10cfb56c6a0>"
|
947 |
+
]
|
948 |
+
},
|
949 |
+
"execution_count": 53,
|
950 |
+
"metadata": {},
|
951 |
+
"output_type": "execute_result"
|
952 |
+
}
|
953 |
+
],
|
954 |
+
"source": [
|
955 |
+
"model.load_weights('models/checkpoint')"
|
956 |
+
]
|
957 |
+
},
|
958 |
+
{
|
959 |
+
"cell_type": "code",
|
960 |
+
"execution_count": 54,
|
961 |
+
"id": "7f8d689f-b7bb-443c-9b88-e40c1d800828",
|
962 |
+
"metadata": {
|
963 |
+
"tags": []
|
964 |
+
},
|
965 |
+
"outputs": [],
|
966 |
+
"source": [
|
967 |
+
"test_data = test.as_numpy_iterator()"
|
968 |
+
]
|
969 |
+
},
|
970 |
+
{
|
971 |
+
"cell_type": "code",
|
972 |
+
"execution_count": 56,
|
973 |
+
"id": "38546dc2-bee9-4837-864b-8a884df40ad7",
|
974 |
+
"metadata": {
|
975 |
+
"tags": []
|
976 |
+
},
|
977 |
+
"outputs": [],
|
978 |
+
"source": [
|
979 |
+
"sample = test_data.next()"
|
980 |
+
]
|
981 |
+
},
|
982 |
+
{
|
983 |
+
"cell_type": "code",
|
984 |
+
"execution_count": 57,
|
985 |
+
"id": "a43621f0-229d-4c0d-9554-9c3a3da9c61a",
|
986 |
+
"metadata": {
|
987 |
+
"tags": []
|
988 |
+
},
|
989 |
+
"outputs": [
|
990 |
+
{
|
991 |
+
"name": "stdout",
|
992 |
+
"output_type": "stream",
|
993 |
+
"text": [
|
994 |
+
"1/1 [==============================] - 1s 973ms/step\n"
|
995 |
+
]
|
996 |
+
}
|
997 |
+
],
|
998 |
+
"source": [
|
999 |
+
"yhat = model.predict(sample[0])"
|
1000 |
+
]
|
1001 |
+
},
|
1002 |
+
{
|
1003 |
+
"cell_type": "code",
|
1004 |
+
"execution_count": 58,
|
1005 |
+
"id": "ea462999-f87e-4a7e-a057-5be7b6d8f7d5",
|
1006 |
+
"metadata": {
|
1007 |
+
"tags": []
|
1008 |
+
},
|
1009 |
+
"outputs": [
|
1010 |
+
{
|
1011 |
+
"name": "stdout",
|
1012 |
+
"output_type": "stream",
|
1013 |
+
"text": [
|
1014 |
+
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ REAL TEXT\n"
|
1015 |
+
]
|
1016 |
+
},
|
1017 |
+
{
|
1018 |
+
"data": {
|
1019 |
+
"text/plain": [
|
1020 |
+
"[<tf.Tensor: shape=(), dtype=string, numpy=b'place white at x six please'>,\n",
|
1021 |
+
" <tf.Tensor: shape=(), dtype=string, numpy=b'lay blue in x four now'>]"
|
1022 |
+
]
|
1023 |
+
},
|
1024 |
+
"execution_count": 58,
|
1025 |
+
"metadata": {},
|
1026 |
+
"output_type": "execute_result"
|
1027 |
+
}
|
1028 |
+
],
|
1029 |
+
"source": [
|
1030 |
+
"print('~'*100, 'REAL TEXT')\n",
|
1031 |
+
"[tf.strings.reduce_join([num_to_char(word) for word in sentence]) for sentence in sample[1]]"
|
1032 |
+
]
|
1033 |
+
},
|
1034 |
+
{
|
1035 |
+
"cell_type": "code",
|
1036 |
+
"execution_count": 59,
|
1037 |
+
"id": "82bd4c10-dd6e-411e-834b-2a3b43fd12c5",
|
1038 |
+
"metadata": {
|
1039 |
+
"tags": []
|
1040 |
+
},
|
1041 |
+
"outputs": [],
|
1042 |
+
"source": [
|
1043 |
+
"decoded = tf.keras.backend.ctc_decode(yhat, input_length=[75,75], greedy=True)[0][0].numpy()"
|
1044 |
+
]
|
1045 |
+
},
|
1046 |
+
{
|
1047 |
+
"cell_type": "code",
|
1048 |
+
"execution_count": 60,
|
1049 |
+
"id": "5d68ac46-c90b-4eab-a709-f19aee569ff5",
|
1050 |
+
"metadata": {
|
1051 |
+
"tags": []
|
1052 |
+
},
|
1053 |
+
"outputs": [
|
1054 |
+
{
|
1055 |
+
"name": "stdout",
|
1056 |
+
"output_type": "stream",
|
1057 |
+
"text": [
|
1058 |
+
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PREDICTIONS\n"
|
1059 |
+
]
|
1060 |
+
},
|
1061 |
+
{
|
1062 |
+
"data": {
|
1063 |
+
"text/plain": [
|
1064 |
+
"[<tf.Tensor: shape=(), dtype=string, numpy=b'place white at x six please'>,\n",
|
1065 |
+
" <tf.Tensor: shape=(), dtype=string, numpy=b'lay blue in x four now'>]"
|
1066 |
+
]
|
1067 |
+
},
|
1068 |
+
"execution_count": 60,
|
1069 |
+
"metadata": {},
|
1070 |
+
"output_type": "execute_result"
|
1071 |
+
}
|
1072 |
+
],
|
1073 |
+
"source": [
|
1074 |
+
"print('~'*100, 'PREDICTIONS')\n",
|
1075 |
+
"[tf.strings.reduce_join([num_to_char(word) for word in sentence]) for sentence in decoded]"
|
1076 |
+
]
|
1077 |
+
},
|
1078 |
+
{
|
1079 |
+
"cell_type": "markdown",
|
1080 |
+
"id": "64622f98-e99b-4fed-a2cc-f0da82eb5431",
|
1081 |
+
"metadata": {},
|
1082 |
+
"source": [
|
1083 |
+
"# Test on a Video"
|
1084 |
+
]
|
1085 |
+
},
|
1086 |
+
{
|
1087 |
+
"cell_type": "code",
|
1088 |
+
"execution_count": 61,
|
1089 |
+
"id": "a8b0c4d0-2031-4331-b91d-d87b1ae6f6e2",
|
1090 |
+
"metadata": {},
|
1091 |
+
"outputs": [],
|
1092 |
+
"source": [
|
1093 |
+
"sample = load_data(tf.convert_to_tensor('.\\\\data\\\\s1\\\\bras9a.mpg'))"
|
1094 |
+
]
|
1095 |
+
},
|
1096 |
+
{
|
1097 |
+
"cell_type": "code",
|
1098 |
+
"execution_count": 62,
|
1099 |
+
"id": "0cca60e4-47a9-4683-8a75-48f4684f723d",
|
1100 |
+
"metadata": {},
|
1101 |
+
"outputs": [
|
1102 |
+
{
|
1103 |
+
"name": "stdout",
|
1104 |
+
"output_type": "stream",
|
1105 |
+
"text": [
|
1106 |
+
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ REAL TEXT\n"
|
1107 |
+
]
|
1108 |
+
},
|
1109 |
+
{
|
1110 |
+
"data": {
|
1111 |
+
"text/plain": [
|
1112 |
+
"[<tf.Tensor: shape=(), dtype=string, numpy=b'bin red at s nine again'>]"
|
1113 |
+
]
|
1114 |
+
},
|
1115 |
+
"execution_count": 62,
|
1116 |
+
"metadata": {},
|
1117 |
+
"output_type": "execute_result"
|
1118 |
+
}
|
1119 |
+
],
|
1120 |
+
"source": [
|
1121 |
+
"print('~'*100, 'REAL TEXT')\n",
|
1122 |
+
"[tf.strings.reduce_join([num_to_char(word) for word in sentence]) for sentence in [sample[1]]]"
|
1123 |
+
]
|
1124 |
+
},
|
1125 |
+
{
|
1126 |
+
"cell_type": "code",
|
1127 |
+
"execution_count": 63,
|
1128 |
+
"id": "8cc5037c-1e32-435c-b0cc-01e1fb3b863c",
|
1129 |
+
"metadata": {},
|
1130 |
+
"outputs": [
|
1131 |
+
{
|
1132 |
+
"name": "stdout",
|
1133 |
+
"output_type": "stream",
|
1134 |
+
"text": [
|
1135 |
+
"1/1 [==============================] - 1s 720ms/step\n"
|
1136 |
+
]
|
1137 |
+
}
|
1138 |
+
],
|
1139 |
+
"source": [
|
1140 |
+
"yhat = model.predict(tf.expand_dims(sample[0], axis=0))"
|
1141 |
+
]
|
1142 |
+
},
|
1143 |
+
{
|
1144 |
+
"cell_type": "code",
|
1145 |
+
"execution_count": 64,
|
1146 |
+
"id": "22c4f77d-715d-409f-bc5e-3ebe48704e8f",
|
1147 |
+
"metadata": {},
|
1148 |
+
"outputs": [],
|
1149 |
+
"source": [
|
1150 |
+
"decoded = tf.keras.backend.ctc_decode(yhat, input_length=[75], greedy=True)[0][0].numpy()"
|
1151 |
+
]
|
1152 |
+
},
|
1153 |
+
{
|
1154 |
+
"cell_type": "code",
|
1155 |
+
"execution_count": 65,
|
1156 |
+
"id": "e4d12ecc-b634-499e-a4bc-db9f010835fb",
|
1157 |
+
"metadata": {},
|
1158 |
+
"outputs": [
|
1159 |
+
{
|
1160 |
+
"name": "stdout",
|
1161 |
+
"output_type": "stream",
|
1162 |
+
"text": [
|
1163 |
+
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PREDICTIONS\n"
|
1164 |
+
]
|
1165 |
+
},
|
1166 |
+
{
|
1167 |
+
"data": {
|
1168 |
+
"text/plain": [
|
1169 |
+
"[<tf.Tensor: shape=(), dtype=string, numpy=b'bin red at s nine again'>]"
|
1170 |
+
]
|
1171 |
+
},
|
1172 |
+
"execution_count": 65,
|
1173 |
+
"metadata": {},
|
1174 |
+
"output_type": "execute_result"
|
1175 |
+
}
|
1176 |
+
],
|
1177 |
+
"source": [
|
1178 |
+
"print('~'*100, 'PREDICTIONS')\n",
|
1179 |
+
"[tf.strings.reduce_join([num_to_char(word) for word in sentence]) for sentence in decoded]"
|
1180 |
+
]
|
1181 |
+
},
|
1182 |
+
{
|
1183 |
+
"cell_type": "code",
|
1184 |
+
"execution_count": null,
|
1185 |
+
"id": "551dfea2-de6b-4400-b71a-a17631529e3f",
|
1186 |
+
"metadata": {},
|
1187 |
+
"outputs": [],
|
1188 |
+
"source": []
|
1189 |
+
},
|
1190 |
+
{
|
1191 |
+
"cell_type": "code",
|
1192 |
+
"execution_count": null,
|
1193 |
+
"id": "fa95863d-3832-47bf-8a77-ebaa38054ace",
|
1194 |
+
"metadata": {},
|
1195 |
+
"outputs": [],
|
1196 |
+
"source": []
|
1197 |
+
}
|
1198 |
+
],
|
1199 |
+
"metadata": {
|
1200 |
+
"kernelspec": {
|
1201 |
+
"display_name": "lips",
|
1202 |
+
"language": "python",
|
1203 |
+
"name": "lips"
|
1204 |
+
},
|
1205 |
+
"language_info": {
|
1206 |
+
"codemirror_mode": {
|
1207 |
+
"name": "ipython",
|
1208 |
+
"version": 3
|
1209 |
+
},
|
1210 |
+
"file_extension": ".py",
|
1211 |
+
"mimetype": "text/x-python",
|
1212 |
+
"name": "python",
|
1213 |
+
"nbconvert_exporter": "python",
|
1214 |
+
"pygments_lexer": "ipython3",
|
1215 |
+
"version": "3.9.12"
|
1216 |
+
}
|
1217 |
+
},
|
1218 |
+
"nbformat": 4,
|
1219 |
+
"nbformat_minor": 5
|
1220 |
+
}
|
.ipynb_checkpoints/train-checkpoint.py
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import cv2
|
3 |
+
import tensorflow as tf
|
4 |
+
import numpy as np
|
5 |
+
import imageio
|
6 |
+
import yaml
|
7 |
+
|
8 |
+
from matplotlib import pyplot as plt
|
9 |
+
from helpers import *
|
10 |
+
from typing import List
|
11 |
+
from Loader import GridLoader
|
12 |
+
|
13 |
+
with open('config.yml', 'r') as config_file_obj:
|
14 |
+
yaml_config = yaml.safe_load(config_file_obj)
|
15 |
+
|
16 |
+
dataset_config = yaml_config['datasets']
|
17 |
+
VIDEO_DIR = dataset_config['video_dir']
|
18 |
+
ALIGNMENTS_DIR = dataset_config['alignments_dir']
|
19 |
+
|
20 |
+
loader = GridLoader()
|
21 |
+
data = tf.data.Dataset.from_tensor_slices(loader.load_videos())
|
22 |
+
# print('DATA', data)
|
23 |
+
|
24 |
+
# List to store filenames
|
25 |
+
filenames = []
|
26 |
+
|
27 |
+
# Iterate over the dataset to get all filenames
|
28 |
+
for file_path in data:
|
29 |
+
filenames.append(file_path.numpy().decode("utf-8"))
|
30 |
+
|
31 |
+
# print(filenames)
|
32 |
+
data = data.shuffle(500, reshuffle_each_iteration=False)
|
33 |
+
data = data.map(mappable_function)
|
34 |
+
data = data.padded_batch(2, padded_shapes=(
|
35 |
+
[75, None, None, None], [40]
|
36 |
+
))
|
37 |
+
|
38 |
+
data = data.prefetch(tf.data.AUTOTUNE)
|
39 |
+
# Added for split
|
40 |
+
train = data.take(450)
|
41 |
+
test = data.skip(450)
|
42 |
+
|
43 |
+
# print(load_data('GRID-dataset/videos/s1/briz8p.mpg'))
|
44 |
+
frames, alignments = data.as_numpy_iterator().next()
|
LipNet.ipynb
ADDED
The diff for this file is too large to render.
See raw diff
|
|
Loader.py
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from typing import List
|
3 |
+
|
4 |
+
import yaml
|
5 |
+
|
6 |
+
|
7 |
+
class GridLoader(object):
|
8 |
+
def __init__(self):
|
9 |
+
with open('config.yml', 'r') as config_file_obj:
|
10 |
+
yaml_config = yaml.safe_load(config_file_obj)
|
11 |
+
|
12 |
+
self.dataset_config = yaml_config['datasets']
|
13 |
+
self.video_dir = self.dataset_config['video_dir']
|
14 |
+
self.alignment_dir = self.dataset_config['alignments_dir']
|
15 |
+
self.usable_video_filepaths = None
|
16 |
+
|
17 |
+
def load_videos(
|
18 |
+
self, verbose=False, blacklist=frozenset({
|
19 |
+
'GRID-dataset/videos/s8/lgazzs.mpg',
|
20 |
+
'GRID-dataset/videos/s8/lbwx9n.mpg'
|
21 |
+
})
|
22 |
+
) -> List[str]:
|
23 |
+
usable_video_filepaths = []
|
24 |
+
videos_without_alignment = []
|
25 |
+
|
26 |
+
for speaker_no in range(1, 35):
|
27 |
+
speaker_dirname = f's{speaker_no}'
|
28 |
+
speaker_dir = os.path.join(self.video_dir, speaker_dirname)
|
29 |
+
|
30 |
+
if not os.path.exists(speaker_dir):
|
31 |
+
# speaker does not exist (its just s21 right now)
|
32 |
+
continue
|
33 |
+
|
34 |
+
video_filenames = os.listdir(speaker_dir)
|
35 |
+
|
36 |
+
for video_filename in video_filenames:
|
37 |
+
if not video_filename.endswith('.mpg'):
|
38 |
+
continue
|
39 |
+
|
40 |
+
# get name of file without the extension
|
41 |
+
base_name = os.path.splitext(video_filename)[0]
|
42 |
+
video_path = os.path.join(
|
43 |
+
self.video_dir, speaker_dirname, f'{base_name}.mpg'
|
44 |
+
)
|
45 |
+
|
46 |
+
if video_path in blacklist:
|
47 |
+
continue
|
48 |
+
|
49 |
+
alignment_path = os.path.join(
|
50 |
+
self.alignment_dir, speaker_dirname, f'{base_name}.align'
|
51 |
+
)
|
52 |
+
|
53 |
+
if os.path.exists(alignment_path):
|
54 |
+
# don't include video if it has no corresponding
|
55 |
+
# alignment path
|
56 |
+
usable_video_filepaths.append(video_path)
|
57 |
+
else:
|
58 |
+
videos_without_alignment.append(alignment_path)
|
59 |
+
|
60 |
+
if verbose:
|
61 |
+
num_usable_videos = len(usable_video_filepaths)
|
62 |
+
num_unusable_videos = len(videos_without_alignment)
|
63 |
+
# print(videos_without_alignment)
|
64 |
+
|
65 |
+
print(f'videos with alignment: {num_usable_videos}')
|
66 |
+
print(f'videos without alignment: {num_unusable_videos}')
|
67 |
+
|
68 |
+
self.usable_video_filepaths = usable_video_filepaths
|
69 |
+
return usable_video_filepaths
|
70 |
+
|
71 |
+
|
72 |
+
if __name__ == '__main__':
|
73 |
+
loader = GridLoader()
|
74 |
+
loader.load_videos(True)
|
README.md
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Forked from [https://github.com/nicknochnack/LipNet](https://github.com/nicknochnack/LipNet)
|
2 |
+
Using Python 3.9.17
|
3 |
+
Dataset is downloaded from [https://zenodo.org/records/3625687](https://zenodo.org/records/3625687).
|
4 |
+
Note that videos are stored like: VIDEO_DIR/s1/s1/name.mpg, so I wrote move_videos.sh to move them to VIDEO_DIR/s1/name.mpg
|
5 |
+
|
__pycache__/Loader.cpython-39.pyc
ADDED
Binary file (1.79 kB). View file
|
|
__pycache__/helpers.cpython-39.pyc
ADDED
Binary file (2.57 kB). View file
|
|
__pycache__/model.cpython-39.pyc
ADDED
Binary file (3.64 kB). View file
|
|
app/__pycache__/modelutil.cpython-39.pyc
ADDED
Binary file (1.26 kB). View file
|
|
app/__pycache__/utils.cpython-39.pyc
ADDED
Binary file (1.94 kB). View file
|
|
app/animation.gif
ADDED
app/modelutil.py
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from tensorflow.keras.models import Sequential
|
3 |
+
from tensorflow.keras.layers import Conv3D, LSTM, Dense, Dropout, Bidirectional, MaxPool3D, Activation, Reshape, SpatialDropout3D, BatchNormalization, TimeDistributed, Flatten
|
4 |
+
|
5 |
+
def load_model() -> Sequential:
|
6 |
+
model = Sequential()
|
7 |
+
|
8 |
+
model.add(Conv3D(128, 3, input_shape=(75,46,140,1), padding='same'))
|
9 |
+
model.add(Activation('relu'))
|
10 |
+
model.add(MaxPool3D((1,2,2)))
|
11 |
+
|
12 |
+
model.add(Conv3D(256, 3, padding='same'))
|
13 |
+
model.add(Activation('relu'))
|
14 |
+
model.add(MaxPool3D((1,2,2)))
|
15 |
+
|
16 |
+
model.add(Conv3D(75, 3, padding='same'))
|
17 |
+
model.add(Activation('relu'))
|
18 |
+
model.add(MaxPool3D((1,2,2)))
|
19 |
+
|
20 |
+
model.add(TimeDistributed(Flatten()))
|
21 |
+
|
22 |
+
model.add(Bidirectional(LSTM(128, kernel_initializer='Orthogonal', return_sequences=True)))
|
23 |
+
model.add(Dropout(.5))
|
24 |
+
|
25 |
+
model.add(Bidirectional(LSTM(128, kernel_initializer='Orthogonal', return_sequences=True)))
|
26 |
+
model.add(Dropout(.5))
|
27 |
+
|
28 |
+
model.add(Dense(41, kernel_initializer='he_normal', activation='softmax'))
|
29 |
+
|
30 |
+
model.load_weights(os.path.join('..','models','checkpoint'))
|
31 |
+
|
32 |
+
return model
|
app/streamlitapp.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Import all of the dependencies
|
2 |
+
import streamlit as st
|
3 |
+
import os
|
4 |
+
import imageio
|
5 |
+
|
6 |
+
import tensorflow as tf
|
7 |
+
from utils import load_data, num_to_char
|
8 |
+
from modelutil import load_model
|
9 |
+
|
10 |
+
# Set the layout to the streamlit app as wide
|
11 |
+
st.set_page_config(layout='wide')
|
12 |
+
|
13 |
+
# Setup the sidebar
|
14 |
+
with st.sidebar:
|
15 |
+
st.image('https://www.onepointltd.com/wp-content/uploads/2020/03/inno2.png')
|
16 |
+
st.title('LipBuddy')
|
17 |
+
st.info('This application is originally developed from the LipNet deep learning model.')
|
18 |
+
|
19 |
+
st.title('LipNet Full Stack App')
|
20 |
+
# Generating a list of options or videos
|
21 |
+
options = os.listdir(os.path.join('..', 'data', 's1'))
|
22 |
+
selected_video = st.selectbox('Choose video', options)
|
23 |
+
|
24 |
+
# Generate two columns
|
25 |
+
col1, col2 = st.columns(2)
|
26 |
+
|
27 |
+
if options:
|
28 |
+
|
29 |
+
# Rendering the video
|
30 |
+
with col1:
|
31 |
+
st.info('The video below displays the converted video in mp4 format')
|
32 |
+
file_path = os.path.join('..','data','s1', selected_video)
|
33 |
+
os.system(f'ffmpeg -i {file_path} -vcodec libx264 test_video.mp4 -y')
|
34 |
+
|
35 |
+
# Rendering inside of the app
|
36 |
+
video = open('test_video.mp4', 'rb')
|
37 |
+
video_bytes = video.read()
|
38 |
+
st.video(video_bytes)
|
39 |
+
|
40 |
+
|
41 |
+
with col2:
|
42 |
+
st.info('This is all the machine learning model sees when making a prediction')
|
43 |
+
video, annotations = load_data(tf.convert_to_tensor(file_path))
|
44 |
+
imageio.mimsave('animation.gif', video, fps=10)
|
45 |
+
st.image('animation.gif', width=400)
|
46 |
+
|
47 |
+
st.info('This is the output of the machine learning model as tokens')
|
48 |
+
model = load_model()
|
49 |
+
yhat = model.predict(tf.expand_dims(video, axis=0))
|
50 |
+
decoder = tf.keras.backend.ctc_decode(yhat, [75], greedy=True)[0][0].numpy()
|
51 |
+
st.text(decoder)
|
52 |
+
|
53 |
+
# Convert prediction to text
|
54 |
+
st.info('Decode the raw tokens into words')
|
55 |
+
converted_prediction = tf.strings.reduce_join(num_to_char(decoder)).numpy().decode('utf-8')
|
56 |
+
st.text(converted_prediction)
|
57 |
+
|
app/test_video.mp4
ADDED
Binary file (111 kB). View file
|
|
app/utils.py
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import tensorflow as tf
|
2 |
+
from typing import List
|
3 |
+
import cv2
|
4 |
+
import os
|
5 |
+
|
6 |
+
vocab = [x for x in "abcdefghijklmnopqrstuvwxyz'?!123456789 "]
|
7 |
+
char_to_num = tf.keras.layers.StringLookup(vocabulary=vocab, oov_token="")
|
8 |
+
# Mapping integers back to original characters
|
9 |
+
num_to_char = tf.keras.layers.StringLookup(
|
10 |
+
vocabulary=char_to_num.get_vocabulary(), oov_token="", invert=True
|
11 |
+
)
|
12 |
+
|
13 |
+
def load_video(path:str) -> List[float]:
|
14 |
+
#print(path)
|
15 |
+
cap = cv2.VideoCapture(path)
|
16 |
+
frames = []
|
17 |
+
for _ in range(int(cap.get(cv2.CAP_PROP_FRAME_COUNT))):
|
18 |
+
ret, frame = cap.read()
|
19 |
+
frame = tf.image.rgb_to_grayscale(frame)
|
20 |
+
frames.append(frame[190:236,80:220,:])
|
21 |
+
cap.release()
|
22 |
+
|
23 |
+
mean = tf.math.reduce_mean(frames)
|
24 |
+
std = tf.math.reduce_std(tf.cast(frames, tf.float32))
|
25 |
+
return tf.cast((frames - mean), tf.float32) / std
|
26 |
+
|
27 |
+
def load_alignments(path:str) -> List[str]:
|
28 |
+
#print(path)
|
29 |
+
with open(path, 'r') as f:
|
30 |
+
lines = f.readlines()
|
31 |
+
tokens = []
|
32 |
+
for line in lines:
|
33 |
+
line = line.split()
|
34 |
+
if line[2] != 'sil':
|
35 |
+
tokens = [*tokens,' ',line[2]]
|
36 |
+
return char_to_num(tf.reshape(tf.strings.unicode_split(tokens, input_encoding='UTF-8'), (-1)))[1:]
|
37 |
+
|
38 |
+
def load_data(path: str):
|
39 |
+
path = bytes.decode(path.numpy())
|
40 |
+
file_name = path.split('/')[-1].split('.')[0]
|
41 |
+
# File name splitting for windows
|
42 |
+
file_name = path.split('\\')[-1].split('.')[0]
|
43 |
+
video_path = os.path.join('..','data','s1',f'{file_name}.mpg')
|
44 |
+
alignment_path = os.path.join('..','data','alignments','s1',f'{file_name}.align')
|
45 |
+
frames = load_video(video_path)
|
46 |
+
alignments = load_alignments(alignment_path)
|
47 |
+
|
48 |
+
return frames, alignments
|
check_videos.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from tqdm.auto import tqdm
|
2 |
+
from Loader import GridLoader
|
3 |
+
from helpers import *
|
4 |
+
|
5 |
+
loader = GridLoader()
|
6 |
+
all_videos = loader.load_videos(verbose=True)
|
7 |
+
print(all_videos[-10:])
|
8 |
+
|
9 |
+
corrupted_videos = []
|
10 |
+
|
11 |
+
for video_path in tqdm(all_videos):
|
12 |
+
try:
|
13 |
+
load_video(video_path)
|
14 |
+
except (cv2.error, ValueError) as e:
|
15 |
+
corrupted_videos.append(video_path)
|
16 |
+
print('corrupted video', video_path)
|
17 |
+
|
18 |
+
print(f'corrupted videos: {len(corrupted_videos)}')
|
19 |
+
record_file = open('corrupted.txt', 'w')
|
20 |
+
record_file.write('\n'.join(corrupted_videos))
|
21 |
+
record_file.close()
|
config.example.yml
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
datasets:
|
2 |
+
video_dir: 'GRID-dataset/videos'
|
3 |
+
audio_dir: 'GRID-dataset/audio_25k/audio_25k'
|
4 |
+
alignments_dir: 'GRID-dataset/alignments/alignments'
|
corrupted.txt
ADDED
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
GRID-dataset/videos/s8/lgazzs.mpg
|
2 |
+
GRID-dataset/videos/s8/lbwx9n.mpg
|
3 |
+
GRID-dataset/videos/s8/lwiyzs.mpg
|
4 |
+
GRID-dataset/videos/s8/lrwy5n.mpg
|
5 |
+
GRID-dataset/videos/s8/pbwv6a.mpg
|
6 |
+
GRID-dataset/videos/s8/bgwn6s.mpg
|
7 |
+
GRID-dataset/videos/s8/lbip3n.mpg
|
8 |
+
GRID-dataset/videos/s8/sgih5n.mpg
|
9 |
+
GRID-dataset/videos/s8/lbbx5n.mpg
|
10 |
+
GRID-dataset/videos/s8/bgim5p.mpg
|
11 |
+
GRID-dataset/videos/s8/brwf9n.mpg
|
12 |
+
GRID-dataset/videos/s8/lrak2a.mpg
|
13 |
+
GRID-dataset/videos/s8/lbbd6a.mpg
|
14 |
+
GRID-dataset/videos/s8/sgbo7n.mpg
|
15 |
+
GRID-dataset/videos/s8/pgbj9n.mpg
|
16 |
+
GRID-dataset/videos/s8/bwwa2s.mpg
|
17 |
+
GRID-dataset/videos/s8/srbh4a.mpg
|
18 |
+
GRID-dataset/videos/s8/bganza.mpg
|
19 |
+
GRID-dataset/videos/s8/bbie3p.mpg
|
20 |
+
GRID-dataset/videos/s8/pwwx7p.mpg
|
21 |
+
GRID-dataset/videos/s8/sran1n.mpg
|
22 |
+
GRID-dataset/videos/s8/pwwq2s.mpg
|
23 |
+
GRID-dataset/videos/s8/pbbb8s.mpg
|
24 |
+
GRID-dataset/videos/s8/lbwy2a.mpg
|
25 |
+
GRID-dataset/videos/s8/swia6s.mpg
|
26 |
+
GRID-dataset/videos/s8/prwc9p.mpg
|
27 |
+
GRID-dataset/videos/s8/bgag6a.mpg
|
28 |
+
GRID-dataset/videos/s8/prav1n.mpg
|
29 |
+
GRID-dataset/videos/s8/braz5p.mpg
|
30 |
+
GRID-dataset/videos/s8/sgih8a.mpg
|
31 |
+
GRID-dataset/videos/s8/bbir1p.mpg
|
32 |
+
GRID-dataset/videos/s8/pgwe1p.mpg
|
33 |
+
GRID-dataset/videos/s8/lrbd9n.mpg
|
34 |
+
GRID-dataset/videos/s8/pbioza.mpg
|
35 |
+
GRID-dataset/videos/s8/lrad8a.mpg
|
36 |
+
GRID-dataset/videos/s8/lgbf4a.mpg
|
37 |
+
GRID-dataset/videos/s8/sbwn6a.mpg
|
38 |
+
GRID-dataset/videos/s8/pwav7n.mpg
|
39 |
+
GRID-dataset/videos/s8/lrid4a.mpg
|
40 |
+
GRID-dataset/videos/s8/pwii6s.mpg
|
41 |
+
GRID-dataset/videos/s8/bwwt5p.mpg
|
42 |
+
GRID-dataset/videos/s8/swit9p.mpg
|
43 |
+
GRID-dataset/videos/s8/bwaf7n.mpg
|
44 |
+
GRID-dataset/videos/s8/sgbv2s.mpg
|
45 |
+
GRID-dataset/videos/s8/pbwc1n.mpg
|
46 |
+
GRID-dataset/videos/s8/lraq3n.mpg
|
47 |
+
GRID-dataset/videos/s8/pgwd9n.mpg
|
48 |
+
GRID-dataset/videos/s8/prii2a.mpg
|
49 |
+
GRID-dataset/videos/s8/pgbq6a.mpg
|
50 |
+
GRID-dataset/videos/s8/pwic2s.mpg
|
51 |
+
GRID-dataset/videos/s8/prwx2a.mpg
|
52 |
+
GRID-dataset/videos/s8/pwbqza.mpg
|
53 |
+
GRID-dataset/videos/s8/brie8s.mpg
|
54 |
+
GRID-dataset/videos/s8/sgbi4s.mpg
|
55 |
+
GRID-dataset/videos/s8/swwb8s.mpg
|
56 |
+
GRID-dataset/videos/s8/prio4s.mpg
|
57 |
+
GRID-dataset/videos/s8/sbaaza.mpg
|
58 |
+
GRID-dataset/videos/s8/bwbg1n.mpg
|
59 |
+
GRID-dataset/videos/s8/pwap4s.mpg
|
60 |
+
GRID-dataset/videos/s8/prwp5n.mpg
|
61 |
+
GRID-dataset/videos/s8/lbbx6s.mpg
|
62 |
+
GRID-dataset/videos/s8/lrid1n.mpg
|
63 |
+
GRID-dataset/videos/s8/lbwd8s.mpg
|
64 |
+
GRID-dataset/videos/s8/pwic1n.mpg
|
65 |
+
GRID-dataset/videos/s8/bbirzs.mpg
|
66 |
+
GRID-dataset/videos/s8/swwv2a.mpg
|
67 |
+
GRID-dataset/videos/s8/lgar7p.mpg
|
68 |
+
GRID-dataset/videos/s8/lbaj5p.mpg
|
69 |
+
GRID-dataset/videos/s8/lbbq2s.mpg
|
70 |
+
GRID-dataset/videos/s8/lgaz1p.mpg
|
71 |
+
GRID-dataset/videos/s8/bgwa8s.mpg
|
72 |
+
GRID-dataset/videos/s8/lbip6a.mpg
|
73 |
+
GRID-dataset/videos/s8/prao9p.mpg
|
74 |
+
GRID-dataset/videos/s8/pwbj4s.mpg
|
75 |
+
GRID-dataset/videos/s8/lgir2s.mpg
|
76 |
+
GRID-dataset/videos/s8/pwbj6a.mpg
|
77 |
+
GRID-dataset/videos/s8/srwb1n.mpg
|
78 |
+
GRID-dataset/videos/s8/sgau7n.mpg
|
79 |
+
GRID-dataset/videos/s8/bbar6a.mpg
|
80 |
+
GRID-dataset/videos/s8/bbay9p.mpg
|
81 |
+
GRID-dataset/videos/s8/lwid8s.mpg
|
82 |
+
GRID-dataset/videos/s8/pgbd7p.mpg
|
83 |
+
GRID-dataset/videos/s8/sgao6a.mpg
|
84 |
+
GRID-dataset/videos/s8/bbik5n.mpg
|
85 |
+
GRID-dataset/videos/s8/lgiy7p.mpg
|
86 |
+
GRID-dataset/videos/s8/lwwl6a.mpg
|
87 |
+
GRID-dataset/videos/s8/pwbp8s.mpg
|
88 |
+
GRID-dataset/videos/s8/swihzs.mpg
|
89 |
+
GRID-dataset/videos/s8/brbf7p.mpg
|
90 |
+
GRID-dataset/videos/s8/lwak7p.mpg
|
91 |
+
GRID-dataset/videos/s8/brwg2a.mpg
|
92 |
+
GRID-dataset/videos/s8/lwwz3p.mpg
|
93 |
+
GRID-dataset/videos/s8/prwj3p.mpg
|
94 |
+
GRID-dataset/videos/s8/sgai1p.mpg
|
95 |
+
GRID-dataset/videos/s8/pbau5n.mpg
|
96 |
+
GRID-dataset/videos/s8/swbh8s.mpg
|
97 |
+
GRID-dataset/videos/s8/brbz9p.mpg
|
98 |
+
GRID-dataset/videos/s8/brbm1p.mpg
|
99 |
+
GRID-dataset/videos/s8/srag8s.mpg
|
100 |
+
GRID-dataset/videos/s8/lwae1n.mpg
|
101 |
+
GRID-dataset/videos/s8/lrip9n.mpg
|
102 |
+
GRID-dataset/videos/s8/srbh1n.mpg
|
103 |
+
GRID-dataset/videos/s8/lwae4a.mpg
|
104 |
+
GRID-dataset/videos/s8/swih2a.mpg
|
105 |
+
GRID-dataset/videos/s8/pbab6a.mpg
|
106 |
+
GRID-dataset/videos/s8/sbigza.mpg
|
107 |
+
GRID-dataset/videos/s8/pwiv5p.mpg
|
data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d22a1412335757d8fac9e4bfb8cbf9ed05067e1fec82c384c461dd7522a78fa1
|
3 |
+
size 423447196
|
data/s1/Thumbs.db
ADDED
Binary file (673 kB). View file
|
|
helpers.py
ADDED
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import cv2
|
3 |
+
import tensorflow as tf
|
4 |
+
import numpy as np
|
5 |
+
import yaml
|
6 |
+
|
7 |
+
from typing import List
|
8 |
+
|
9 |
+
with open('config.yml', 'r') as config_file_obj:
|
10 |
+
yaml_config = yaml.safe_load(config_file_obj)
|
11 |
+
|
12 |
+
dataset_config = yaml_config['datasets']
|
13 |
+
VIDEO_DIR = dataset_config['video_dir']
|
14 |
+
ALIGNMENTS_DIR = dataset_config['alignments_dir']
|
15 |
+
|
16 |
+
vocab = [x for x in "abcdefghijklmnopqrstuvwxyz'?!123456789 "]
|
17 |
+
char_to_num = tf.keras.layers.StringLookup(vocabulary=vocab, oov_token="")
|
18 |
+
num_to_char = tf.keras.layers.StringLookup(
|
19 |
+
vocabulary=char_to_num.get_vocabulary(), oov_token="", invert=True
|
20 |
+
)
|
21 |
+
|
22 |
+
|
23 |
+
def load_video(path: str) -> List[float]:
|
24 |
+
cap = cv2.VideoCapture(path)
|
25 |
+
frames = []
|
26 |
+
for _ in range(int(cap.get(cv2.CAP_PROP_FRAME_COUNT))):
|
27 |
+
ret, frame = cap.read()
|
28 |
+
frame = tf.image.rgb_to_grayscale(frame)
|
29 |
+
frames.append(frame[190:236, 80:220, :])
|
30 |
+
cap.release()
|
31 |
+
|
32 |
+
mean = tf.math.reduce_mean(frames)
|
33 |
+
std = tf.math.reduce_std(tf.cast(frames, tf.float32))
|
34 |
+
return tf.cast((frames - mean), tf.float32) / std
|
35 |
+
|
36 |
+
|
37 |
+
def load_alignments(path: str) -> List[str]:
|
38 |
+
with open(path, 'r') as f:
|
39 |
+
lines = f.readlines()
|
40 |
+
|
41 |
+
tokens = []
|
42 |
+
for line in lines:
|
43 |
+
line = line.split()
|
44 |
+
if line[2] != 'sil':
|
45 |
+
tokens = [*tokens, ' ', line[2]]
|
46 |
+
|
47 |
+
return char_to_num(tf.reshape(
|
48 |
+
tf.strings.unicode_split(tokens, input_encoding='UTF-8'), (-1)
|
49 |
+
))[1:]
|
50 |
+
|
51 |
+
|
52 |
+
def load_data(tf_path):
|
53 |
+
# print('PATH', tf_path, type(tf_path))
|
54 |
+
path = tf_path.numpy().decode('utf-8')
|
55 |
+
|
56 |
+
# get dirname of dir
|
57 |
+
dir_name = os.path.basename(os.path.dirname(path))
|
58 |
+
# get filename of the current path
|
59 |
+
base_filename = os.path.basename(path)
|
60 |
+
base_name = os.path.splitext(base_filename)[0]
|
61 |
+
new_base_path = os.path.join(dir_name, base_name)
|
62 |
+
|
63 |
+
# file_name = path.split('/')[-1].split('.')[0]
|
64 |
+
# File name splitting for windows
|
65 |
+
video_path = os.path.join(VIDEO_DIR, f'{new_base_path}.mpg')
|
66 |
+
alignment_path = os.path.join(
|
67 |
+
ALIGNMENTS_DIR, f'{new_base_path}.align'
|
68 |
+
)
|
69 |
+
|
70 |
+
try:
|
71 |
+
frames = load_video(video_path)
|
72 |
+
except Exception as e:
|
73 |
+
print('BAD_VIDEO', video_path)
|
74 |
+
raise e
|
75 |
+
|
76 |
+
alignments = load_alignments(alignment_path)
|
77 |
+
return frames, alignments
|
78 |
+
|
79 |
+
|
80 |
+
def mappable_function(path:str) -> List[str]:
|
81 |
+
result = tf.py_function(
|
82 |
+
load_data, [path], (tf.float32, tf.int64)
|
83 |
+
)
|
84 |
+
return result
|
model.py
ADDED
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Any
|
2 |
+
|
3 |
+
import keras
|
4 |
+
|
5 |
+
from helpers import *
|
6 |
+
|
7 |
+
from tensorflow.keras.models import Sequential
|
8 |
+
from tensorflow.keras.layers import Conv3D, LSTM, Dense, Dropout, Bidirectional, MaxPool3D, Activation, Reshape, SpatialDropout3D, BatchNormalization, TimeDistributed, Flatten
|
9 |
+
from tensorflow.keras.optimizers import Adam
|
10 |
+
from tensorflow.keras.callbacks import ModelCheckpoint, LearningRateScheduler
|
11 |
+
|
12 |
+
|
13 |
+
class Predictor(keras.Model):
|
14 |
+
def __init__(self, *args, **kwargs):
|
15 |
+
super().__init__(*args, **kwargs)
|
16 |
+
self.model = self.create_model()
|
17 |
+
|
18 |
+
@classmethod
|
19 |
+
def create_model(cls):
|
20 |
+
model = Sequential()
|
21 |
+
model.add(Conv3D(128, 3, input_shape=(75, 46, 140, 1), padding='same'))
|
22 |
+
model.add(Activation('relu'))
|
23 |
+
model.add(MaxPool3D((1, 2, 2)))
|
24 |
+
|
25 |
+
model.add(Conv3D(256, 3, padding='same'))
|
26 |
+
model.add(Activation('relu'))
|
27 |
+
model.add(MaxPool3D((1, 2, 2)))
|
28 |
+
|
29 |
+
model.add(Conv3D(75, 3, padding='same'))
|
30 |
+
model.add(Activation('relu'))
|
31 |
+
model.add(MaxPool3D((1, 2, 2)))
|
32 |
+
|
33 |
+
model.add(TimeDistributed(Flatten()))
|
34 |
+
|
35 |
+
model.add(Bidirectional(LSTM(
|
36 |
+
128, kernel_initializer='Orthogonal', return_sequences=True
|
37 |
+
)))
|
38 |
+
model.add(Dropout(.5))
|
39 |
+
|
40 |
+
model.add(Bidirectional(LSTM(
|
41 |
+
128, kernel_initializer='Orthogonal', return_sequences=True
|
42 |
+
)))
|
43 |
+
model.add(Dropout(.5))
|
44 |
+
|
45 |
+
model.add(Dense(
|
46 |
+
char_to_num.vocabulary_size() + 1,
|
47 |
+
kernel_initializer='he_normal',
|
48 |
+
activation='softmax'
|
49 |
+
))
|
50 |
+
|
51 |
+
return model
|
52 |
+
|
53 |
+
def call(self, *args, **kwargs):
|
54 |
+
return self.model.call(*args, **kwargs)
|
55 |
+
|
56 |
+
@classmethod
|
57 |
+
def scheduler(cls, epoch, lr):
|
58 |
+
if epoch < 30:
|
59 |
+
return lr
|
60 |
+
else:
|
61 |
+
return lr * tf.math.exp(-0.1)
|
62 |
+
|
63 |
+
@classmethod
|
64 |
+
def CTCLoss(cls, y_true, y_pred):
|
65 |
+
batch_len = tf.cast(tf.shape(y_true)[0], dtype="int64")
|
66 |
+
input_length = tf.cast(tf.shape(y_pred)[1], dtype="int64")
|
67 |
+
label_length = tf.cast(tf.shape(y_true)[1], dtype="int64")
|
68 |
+
|
69 |
+
input_length = input_length * tf.ones(shape=(batch_len, 1), dtype="int64")
|
70 |
+
label_length = label_length * tf.ones(shape=(batch_len, 1), dtype="int64")
|
71 |
+
|
72 |
+
loss = tf.keras.backend.ctc_batch_cost(
|
73 |
+
y_true, y_pred, input_length, label_length
|
74 |
+
)
|
75 |
+
return loss
|
76 |
+
|
77 |
+
|
78 |
+
class ProduceExample(tf.keras.callbacks.Callback):
|
79 |
+
def __init__(self, dataset) -> None:
|
80 |
+
self.dataset = dataset.as_numpy_iterator()
|
81 |
+
|
82 |
+
def on_epoch_end(self, epoch, logs=None) -> None:
|
83 |
+
data = self.dataset.next()
|
84 |
+
yhat = self.model.predict(data[0])
|
85 |
+
decoded = tf.keras.backend.ctc_decode(
|
86 |
+
yhat, [75, 75], greedy=False
|
87 |
+
)[0][0].numpy()
|
88 |
+
|
89 |
+
for x in range(len(yhat)):
|
90 |
+
print('Original:', tf.strings.reduce_join(
|
91 |
+
num_to_char(data[1][x])
|
92 |
+
).numpy().decode('utf-8'))
|
93 |
+
print('Prediction:', tf.strings.reduce_join(
|
94 |
+
num_to_char(decoded[x])
|
95 |
+
).numpy().decode('utf-8'))
|
96 |
+
print('~' * 100)
|
move_videos.sh
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
# Check if a base directory was provided as an argument
|
4 |
+
if [ "$#" -gt 1 ]; then
|
5 |
+
echo "Usage: $0 <base_directory>"
|
6 |
+
exit 1
|
7 |
+
elif [ "$#" -eq 1 ]; then
|
8 |
+
BASE_DIR="$1"
|
9 |
+
else
|
10 |
+
BASE_DIR="GRID-dataset"
|
11 |
+
fi
|
12 |
+
|
13 |
+
# Loop from s1 to s34
|
14 |
+
for i in {1..34}; do
|
15 |
+
# Check if the directory exists before attempting to move files
|
16 |
+
if [[ -d "${BASE_DIR}/s${i}/s${i}" ]]; then
|
17 |
+
mv "${BASE_DIR}/s${i}/s${i}"/* "${BASE_DIR}/s${i}/"
|
18 |
+
fi
|
19 |
+
done
|
requirements.txt
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
tensorflow~=2.14.0
|
2 |
+
opencv-python~=4.8.1.78
|
3 |
+
imageio~=2.31.6
|
4 |
+
Pillow~=10.0.1
|
5 |
+
ipython~=8.16.1
|
6 |
+
beautifulsoup4~=4.12.2
|
7 |
+
numpy~=1.26.1
|
8 |
+
pip~=23.0.1
|
9 |
+
tornado~=6.3.3
|
10 |
+
requests~=2.31.0
|
11 |
+
keras~=2.14.0
|
12 |
+
ipywidgets~=8.1.1
|
13 |
+
notebook~=7.0.6
|
14 |
+
PyYAML~=6.0.1
|
15 |
+
six~=1.16.0
|
16 |
+
tqdm~=4.66.1
|
17 |
+
overrides~=7.4.0
|
18 |
+
urllib3~=2.0.7
|
19 |
+
matplotlib~=3.8.0
|
20 |
+
jupyterlab~=4.0.7
|
21 |
+
tensorboard~=2.14.1
|
train.py
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import cv2
|
3 |
+
import numpy as np
|
4 |
+
import tensorflow as tf
|
5 |
+
import imageio
|
6 |
+
import yaml
|
7 |
+
|
8 |
+
from keras.src.callbacks import ModelCheckpoint, LearningRateScheduler
|
9 |
+
from matplotlib import pyplot as plt
|
10 |
+
from helpers import *
|
11 |
+
from typing import List
|
12 |
+
from Loader import GridLoader
|
13 |
+
from model import Predictor, ProduceExample
|
14 |
+
from tensorflow.keras.optimizers import Adam
|
15 |
+
|
16 |
+
with open('config.yml', 'r') as config_file_obj:
|
17 |
+
yaml_config = yaml.safe_load(config_file_obj)
|
18 |
+
|
19 |
+
dataset_config = yaml_config['datasets']
|
20 |
+
VIDEO_DIR = dataset_config['video_dir']
|
21 |
+
ALIGNMENTS_DIR = dataset_config['alignments_dir']
|
22 |
+
corrupt_video_paths = set(
|
23 |
+
open('corrupted.txt').read().strip().split('\n')
|
24 |
+
)
|
25 |
+
|
26 |
+
loader = GridLoader()
|
27 |
+
video_filepaths = loader.load_videos(blacklist=corrupt_video_paths)
|
28 |
+
data = tf.data.Dataset.from_tensor_slices(video_filepaths)
|
29 |
+
# print('DATA', data)
|
30 |
+
|
31 |
+
# List to store filenames
|
32 |
+
filenames = []
|
33 |
+
|
34 |
+
# Iterate over the dataset to get all filenames
|
35 |
+
for file_path in data:
|
36 |
+
filenames.append(file_path.numpy().decode("utf-8"))
|
37 |
+
|
38 |
+
# print(filenames)
|
39 |
+
data = data.shuffle(500, reshuffle_each_iteration=False)
|
40 |
+
data = data.map(mappable_function)
|
41 |
+
data = data.padded_batch(2, padded_shapes=(
|
42 |
+
[75, None, None, None], [40]
|
43 |
+
))
|
44 |
+
|
45 |
+
data = data.prefetch(tf.data.AUTOTUNE)
|
46 |
+
# Added for split
|
47 |
+
train = data.take(450)
|
48 |
+
test = data.skip(450)
|
49 |
+
|
50 |
+
# print(load_data('GRID-dataset/videos/s1/briz8p.mpg'))
|
51 |
+
frames, alignments = data.as_numpy_iterator().next()
|
52 |
+
|
53 |
+
predictor = Predictor()
|
54 |
+
predictor.compile(
|
55 |
+
optimizer=Adam(learning_rate=0.0001),
|
56 |
+
loss=predictor.CTCLoss
|
57 |
+
)
|
58 |
+
|
59 |
+
checkpoint_callback = ModelCheckpoint(
|
60 |
+
os.path.join('models', 'checkpoint'),
|
61 |
+
monitor='loss', save_weights_only=True
|
62 |
+
)
|
63 |
+
schedule_callback = LearningRateScheduler(predictor.scheduler)
|
64 |
+
example_callback = ProduceExample(test)
|
65 |
+
|
66 |
+
predictor.fit(
|
67 |
+
train, validation_data=test, epochs=100,
|
68 |
+
callbacks=[
|
69 |
+
checkpoint_callback, schedule_callback,
|
70 |
+
example_callback
|
71 |
+
]
|
72 |
+
)
|
upload.py
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from huggingface_hub import HfApi
|
2 |
+
api = HfApi()
|
3 |
+
|
4 |
+
# Upload all the content from the local folder to your remote Space.
|
5 |
+
# By default, files are uploaded at the root of the repo
|
6 |
+
api.upload_folder(
|
7 |
+
folder_path="./",
|
8 |
+
repo_id="SilentSpeak/lipnet",
|
9 |
+
repo_type="model",
|
10 |
+
ignore_patterns=open('.gitignore').read().split('\n')
|
11 |
+
)
|