Ahmed-Salah commited on
Commit
d625688
1 Parent(s): be1196c

Upload 57 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. LLMatic-main/.gitignore +9 -0
  2. LLMatic-main/LICENSE +201 -0
  3. LLMatic-main/README.md +35 -0
  4. LLMatic-main/__pycache__/datasets.cpython-312.pyc +0 -0
  5. LLMatic-main/__pycache__/datasets.cpython-39.pyc +0 -0
  6. LLMatic-main/__pycache__/evaluations.cpython-39.pyc +0 -0
  7. LLMatic-main/__pycache__/initial_net.cpython-39.pyc +0 -0
  8. LLMatic-main/__pycache__/mutation_models.cpython-312.pyc +0 -0
  9. LLMatic-main/__pycache__/mutation_models.cpython-39.pyc +0 -0
  10. LLMatic-main/__pycache__/train.cpython-39.pyc +0 -0
  11. LLMatic-main/__pycache__/utils.cpython-312.pyc +0 -0
  12. LLMatic-main/__pycache__/utils.cpython-39.pyc +0 -0
  13. LLMatic-main/centroids_100_2.dat +100 -0
  14. LLMatic-main/codegen_tests.py +306 -0
  15. LLMatic-main/conf/__pycache__/config.cpython-39.pyc +0 -0
  16. LLMatic-main/conf/config.py +43 -0
  17. LLMatic-main/conf/config.yaml +23 -0
  18. LLMatic-main/datasets.py +29 -0
  19. LLMatic-main/environment.yaml +91 -0
  20. LLMatic-main/evaluations.py +41 -0
  21. LLMatic-main/initial_net.py +13 -0
  22. LLMatic-main/llmatic.py +566 -0
  23. LLMatic-main/logs/gen-nets_codegen-6B-mono_networks-10_temp-0.0_net-training-epochs-50_niches-100_infer-and-flops-as-bd/cvt.dat +0 -0
  24. LLMatic-main/logs/gen-nets_codegen-6B-mono_networks-10_temp-0.0_net-training-epochs-50_niches-100_infer-and-flops-as-bd/exp_results.csv +4 -0
  25. LLMatic-main/map_elites/LICENSE +518 -0
  26. LLMatic-main/map_elites/__init__.py +0 -0
  27. LLMatic-main/map_elites/__pycache__/__init__.cpython-311.pyc +0 -0
  28. LLMatic-main/map_elites/__pycache__/__init__.cpython-312.pyc +0 -0
  29. LLMatic-main/map_elites/__pycache__/__init__.cpython-39.pyc +0 -0
  30. LLMatic-main/map_elites/__pycache__/common.cpython-311.pyc +0 -0
  31. LLMatic-main/map_elites/__pycache__/common.cpython-312.pyc +0 -0
  32. LLMatic-main/map_elites/__pycache__/common.cpython-39.pyc +0 -0
  33. LLMatic-main/map_elites/common.py +231 -0
  34. LLMatic-main/map_elites/cvt.py +142 -0
  35. LLMatic-main/map_elites/multitask.py +242 -0
  36. LLMatic-main/mutation_models.py +73 -0
  37. LLMatic-main/outputs/2024-09-03/22-14-50/.hydra/config.yaml +19 -0
  38. LLMatic-main/outputs/2024-09-03/22-14-50/.hydra/hydra.yaml +182 -0
  39. LLMatic-main/outputs/2024-09-03/22-14-50/.hydra/overrides.yaml +1 -0
  40. LLMatic-main/outputs/2024-09-03/22-14-50/llmatic.log +0 -0
  41. LLMatic-main/outputs/2024-09-03/23-04-26/.hydra/config.yaml +19 -0
  42. LLMatic-main/outputs/2024-09-03/23-04-26/.hydra/hydra.yaml +182 -0
  43. LLMatic-main/outputs/2024-09-03/23-04-26/.hydra/overrides.yaml +1 -0
  44. LLMatic-main/outputs/2024-09-03/23-04-26/llmatic.log +0 -0
  45. LLMatic-main/outputs/2024-09-06/05-03-40/.hydra/config.yaml +19 -0
  46. LLMatic-main/outputs/2024-09-06/05-03-40/.hydra/hydra.yaml +182 -0
  47. LLMatic-main/outputs/2024-09-06/05-03-40/.hydra/overrides.yaml +1 -0
  48. LLMatic-main/outputs/2024-09-06/05-03-40/llmatic.log +0 -0
  49. LLMatic-main/outputs/2024-09-06/06-51-07/.hydra/config.yaml +19 -0
  50. LLMatic-main/outputs/2024-09-06/06-51-07/.hydra/hydra.yaml +182 -0
LLMatic-main/.gitignore ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ # Ignore python compile stuff
2
+ *.pyc
3
+ *.dat
4
+
5
+ data
6
+ outputs
7
+ multirun
8
+ logs
9
+ rough
LLMatic-main/LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright [yyyy] [name of copyright owner]
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
LLMatic-main/README.md ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # LLMatic
2
+
3
+ ### Abstract:
4
+ _Large Language Models (LLMs) have emerged as powerful tools capable of accomplishing a broad spectrum of tasks. Their abilities span numerous areas, and one area where they have made a significant impact is in the domain of code generation. Here, we propose using the coding abilities of LLMs to introduce meaningful variations to code defining neural networks. Meanwhile, Quality-Diversity (QD) algorithms are known to discover diverse and robust solutions. By merging the code-generating abilities of LLMs with the diversity and robustness of QD solutions, we introduce \texttt{LLMatic}, a Neural Architecture Search (NAS) algorithm. While LLMs struggle to conduct NAS directly through prompts, \texttt{LLMatic} uses a procedural approach, leveraging QD for prompts and network architecture to create diverse and high-performing networks. We test \texttt{LLMatic} on the CIFAR-10 and NAS-bench-201 benchmarks, demonstrating that it can produce competitive networks while evaluating just $2,000$ candidates, even without prior knowledge of the benchmark domain or exposure to any previous top-performing models for the benchmark._
5
+
6
+ ### To run experiments:
7
+
8
+ Clone this repository:
9
+
10
+ ```git clone https://github.com/umair-nasir14/LLMatic.git```
11
+
12
+ Install all dependencies:
13
+
14
+ ```
15
+ cd LLMatic
16
+ conda env create -f environment.yaml
17
+ conda activate llmatic
18
+ ```
19
+
20
+ Run:
21
+
22
+ ```python llmatic.py```
23
+
24
+ All configs are present in `conf/config.py`.
25
+
26
+ ### Cite:
27
+
28
+ ```
29
+ @article{nasir2023llmatic,
30
+ title={Llmatic: Neural architecture search via large language models and quality-diversity optimization},
31
+ author={Nasir, Muhammad U and Earle, Sam and Togelius, Julian and James, Steven and Cleghorn, Christopher},
32
+ journal={arXiv preprint arXiv:2306.01102},
33
+ year={2023}
34
+ }
35
+ ```
LLMatic-main/__pycache__/datasets.cpython-312.pyc ADDED
Binary file (1.34 kB). View file
 
LLMatic-main/__pycache__/datasets.cpython-39.pyc ADDED
Binary file (791 Bytes). View file
 
LLMatic-main/__pycache__/evaluations.cpython-39.pyc ADDED
Binary file (1.4 kB). View file
 
LLMatic-main/__pycache__/initial_net.cpython-39.pyc ADDED
Binary file (819 Bytes). View file
 
LLMatic-main/__pycache__/mutation_models.cpython-312.pyc ADDED
Binary file (3.1 kB). View file
 
LLMatic-main/__pycache__/mutation_models.cpython-39.pyc ADDED
Binary file (1.8 kB). View file
 
LLMatic-main/__pycache__/train.cpython-39.pyc ADDED
Binary file (5.77 kB). View file
 
LLMatic-main/__pycache__/utils.cpython-312.pyc ADDED
Binary file (11.3 kB). View file
 
LLMatic-main/__pycache__/utils.cpython-39.pyc ADDED
Binary file (6.01 kB). View file
 
LLMatic-main/centroids_100_2.dat ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0.05459372771370208 0.135647896224746
2
+ 0.6227804995491316 0.620973950991632
3
+ 0.21168382671045838 0.8904385735015196
4
+ 0.700275078817918 0.04356929374920682
5
+ 0.8080341614153019 0.8058439837924393
6
+ 0.3922132877052243 0.44063549660620993
7
+ 0.8132389948752509 0.28784541640638156
8
+ 0.45653356129741574 0.17266145801433014
9
+ 0.0355106346127958 0.5629750610104168
10
+ 0.4962906743786589 0.9569775031930101
11
+ 0.9468102033875045 0.5258146523863926
12
+ 0.1475730605346059 0.7122914637978367
13
+ 0.2520828797888778 0.23808718686363978
14
+ 0.4889507652592643 0.7755743489096112
15
+ 0.19174969726624036 0.4952428499828997
16
+ 0.8906526372894228 0.04193851585183922
17
+ 0.9329030540827177 0.7506930855472149
18
+ 0.59136282962791 0.41329807515041905
19
+ 0.6355953508715751 0.3148937220930016
20
+ 0.34723257319648576 0.6313767569569555
21
+ 0.8815166115208439 0.22332231114693418
22
+ 0.16358808916536455 0.295756441443098
23
+ 0.24582569523354886 0.04910604002517388
24
+ 0.5311846180954723 0.047691255520401876
25
+ 0.8835962151865034 0.3372294169283959
26
+ 0.0558813473930706 0.32087168690965023
27
+ 0.043100462234259895 0.7647606812046712
28
+ 0.7992651087241485 0.6988371538857203
29
+ 0.9335042331220507 0.958837206129763
30
+ 0.7263123112928429 0.613156974524149
31
+ 0.27566551680923307 0.9583726919066804
32
+ 0.4113011442097963 0.8474048251095265
33
+ 0.7648189300879635 0.9529300438351493
34
+ 0.5204926958535333 0.8738166487640304
35
+ 0.5173661044627706 0.605198351446326
36
+ 0.6032346523608383 0.8109905514114759
37
+ 0.15498025316141673 0.1795887949460116
38
+ 0.8845255523855198 0.6558801579256681
39
+ 0.04717355622805225 0.9565417381877419
40
+ 0.6295421858505851 0.9477822599657907
41
+ 0.049858623877461095 0.044047166614117006
42
+ 0.36395118944143035 0.1422264461018352
43
+ 0.5555212444348316 0.1386768859008406
44
+ 0.6888516615376656 0.4103856698335078
45
+ 0.358433507681592 0.245396284336028
46
+ 0.08352166196689959 0.49747277473767343
47
+ 0.8699091661840197 0.45887585887627474
48
+ 0.30549892309247895 0.5158147550419125
49
+ 0.767447776570032 0.20803603536067589
50
+ 0.4916614464244339 0.40964981895759284
51
+ 0.5649528510723348 0.7071586302130887
52
+ 0.0489799468380579 0.6648339986768609
53
+ 0.5678797326828701 0.5066914140894362
54
+ 0.13053358223511236 0.6040455367857318
55
+ 0.9523629983889952 0.8501259166437479
56
+ 0.15043319239103564 0.06641673226779093
57
+ 0.29576443400742447 0.7667357093261276
58
+ 0.4560765848185523 0.2840236668786583
59
+ 0.17850076319699248 0.39679824938101965
60
+ 0.8051521183656705 0.040711810741070276
61
+ 0.6180577706820564 0.05778578099655124
62
+ 0.8337909366205374 0.5726154841341241
63
+ 0.31219039063739523 0.8638317318810518
64
+ 0.9573217294231177 0.16440921354138838
65
+ 0.2335528921139179 0.5898913988893592
66
+ 0.2890664948176134 0.3242451831765145
67
+ 0.4088212620352536 0.5612304872490749
68
+ 0.44236261674681854 0.0590579148436543
69
+ 0.9561745236490129 0.40188271811886944
70
+ 0.060514336770431054 0.23172999841610276
71
+ 0.34429632198928545 0.044405342957909266
72
+ 0.04862366994210715 0.8683195689963357
73
+ 0.789472912718755 0.3895622749717449
74
+ 0.14897018829209263 0.955949071967015
75
+ 0.7025234749615219 0.8542659924064914
76
+ 0.9635145859127145 0.636144022104827
77
+ 0.6935794436896556 0.7316289472269284
78
+ 0.12640758469836644 0.8311643467208365
79
+ 0.37083764586429246 0.7416708401537713
80
+ 0.681214410600326 0.1315322171723074
81
+ 0.7636870490838004 0.4959350927934502
82
+ 0.29447134875984765 0.41849115719065805
83
+ 0.25263710923999294 0.6823220868523201
84
+ 0.8528831212268677 0.8985654813563939
85
+ 0.05794846541806997 0.41302290581054446
86
+ 0.7200696766632078 0.29766992366433287
87
+ 0.8679498439026898 0.12812108468225358
88
+ 0.3840673709043919 0.9525642251303081
89
+ 0.4783679209709438 0.5040654069908896
90
+ 0.9635616955068795 0.06376826838796357
91
+ 0.775946045727018 0.12293523384537769
92
+ 0.393612762751659 0.3509341673137685
93
+ 0.6621444086504229 0.5143661132500527
94
+ 0.9674465096657459 0.28511831400310084
95
+ 0.2084787407737561 0.791700190836919
96
+ 0.5516163879003518 0.23245824407980603
97
+ 0.2570247451101261 0.14089619040634005
98
+ 0.45352888525950436 0.6759020257142933
99
+ 0.5446463916179347 0.3331631711205768
100
+ 0.651900557168317 0.20902488539835712
LLMatic-main/codegen_tests.py ADDED
@@ -0,0 +1,306 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from mutation_models import codegen_mutate
2
+ from utils import read_python_file, extract_code_section, get_class
3
+ from evaluations import is_trainable
4
+
5
+ import torch
6
+ import torch.nn as nn
7
+ import torch.nn.functional as F
8
+ import torch.optim as optim
9
+ import torchvision
10
+ import torchvision.transforms as transforms
11
+
12
+ import numpy as np
13
+ import os
14
+ import random
15
+ import hydra
16
+ import ray
17
+
18
+ from conf.config import Config
19
+
20
+
21
+ class Net(nn.Module):
22
+ def __init__(self):
23
+ super().__init__()
24
+ self.conv1 = nn.Conv2d(3, 1, 1)
25
+ self.fc1 = nn.Linear(1024, 10)
26
+
27
+ def forward(self, x):
28
+ x = F.relu(self.conv1(x))
29
+ x = torch.flatten(x, 1)
30
+ x = F.relu(self.fc1(x))
31
+ return x
32
+
33
+
34
+ def fixer_test(cfg):
35
+ ray.init()
36
+ exp_name = "0003"
37
+ path_nets = f"codegen_tests/{exp_name}"
38
+ if not os.path.exists(path_nets):
39
+ os.makedirs(os.path.normpath(path_nets))
40
+ main_net_focus = read_python_file(os.path.normpath("/mnt/lustre/users/mnasir/NAS-LLM/best_net.py"))
41
+ print(main_net_focus)
42
+ #fixing_prompt = '"""Create another class that inherits from nn.Module to edit the above network such that it works when torch.zeros((1, 3, 32, 32)) is passed as an input"""'
43
+ #fixing_prompt = '"""Fix the above neural network bu editing the layers such that it takes torch.zeros((1,3,32,32)) as inputs and output as tensor of size torch.size((1,10))"""'
44
+ fixing_prompt = '"""The above neural network does not work in the current form. Add or delete layers to fix the above neural network such that it takes torch.zeros((1,3,32,32)) as inputs and output as tensor of size torch.size((1,10))"""'
45
+ for i in range(10):
46
+ (print(f"Test {i}"))
47
+
48
+ fixed_net_results = codegen_mutate.remote(cfg=cfg, prompt=main_net_focus + "\n" + fixing_prompt, temperature = 0.1)
49
+ extract_code_section(ray.get(fixed_net_results), fixing_prompt, file_path=os.path.normpath(f"{path_nets}/fixed_net_{i}.py"))
50
+ print(read_python_file(os.path.normpath(f"{path_nets}/fixed_net_{i}.py")))
51
+ try:
52
+ Net = get_class(os.path.normpath(f"{path_nets}/fixed_net_{i}.py"))
53
+ net = Net()
54
+ if is_trainable(net):
55
+ print("TRUE")
56
+ else:
57
+ print("FALSE")
58
+ except Exception as e:
59
+ print(f"FALSE because of {e}")
60
+
61
+
62
+ def init_net_test(cfg):
63
+ ray.init()
64
+ exp_name = "init_net_test_0007"
65
+ path_nets = f"codegen_tests/{exp_name}"
66
+ if not os.path.exists(path_nets):
67
+ os.makedirs(os.path.normpath(path_nets))
68
+ init_net_prompt = '"""Create a simple neural network class that inherits from nn.Module pytorch class. It should accept input tensor of size 32 x 32 x 3 and output 10 neurons for classification task"""'
69
+ for i in range(10):
70
+ (print(f"Test {i}"))
71
+
72
+ fixed_net_results = codegen_mutate.remote(cfg=cfg, prompt=init_net_prompt, temperature = 0.3)
73
+ extract_code_section(ray.get(fixed_net_results), init_net_prompt, file_path=os.path.normpath(f"{path_nets}/init_net_{i}.py"))
74
+ print(read_python_file(os.path.normpath(f"{path_nets}/init_net_{i}.py")))
75
+ try:
76
+ Net = get_class(os.path.normpath(f"{path_nets}/init_net_{i}.py"))
77
+ net = Net()
78
+ if is_trainable(net):
79
+ print("TRUE")
80
+ else:
81
+ print("FALSE")
82
+ except Exception as e:
83
+ print(f"FALSE because of {e}")
84
+
85
+ def mutation_test(cfg):
86
+ ray.init()
87
+ exp_name = "mutation_test_0005"
88
+
89
+ prompts = ['"""Add a layer to improve the above network"""',
90
+ '"""Delete a layer to improve the above network"""',
91
+ '"""Increase the width of the above neural network"""',
92
+ '"""Decrease the width of the above neural network"""',
93
+ '"""Increase the depth of the above neural network"""',
94
+ '"""Decrease the depth of the above neural network"""',
95
+ '"""Add fully connected layer to improve the above network"""',
96
+ '"""Add convolutional layer to improve the above network"""',
97
+ '"""Add pooling layer to improve the above network"""',
98
+ '"""Add residual connection to improve the above network"""',
99
+ '"""Add multiple residual connections to improve the above network"""',
100
+ '"""Add dropout layer to improve the above network"""',
101
+ '"""Add normalization layer to improve the above network"""',
102
+ ]
103
+ path_nets = f"codegen_tests/{exp_name}"
104
+ if not os.path.exists(path_nets):
105
+ os.makedirs(os.path.normpath(path_nets))
106
+
107
+
108
+ seed_value = 1
109
+ for prompt in prompts:
110
+ (print(f"Test on prompt: {prompt}"))
111
+
112
+ seed_value = seed_value + 10
113
+ torch.manual_seed(seed_value)
114
+
115
+ mutation_prompt = \
116
+ f'''class Net(nn.Module):
117
+ def __init__(self):
118
+ super().__init__()
119
+ self.conv1 = nn.Conv2d(3, 1, 1)
120
+ self.fc1 = nn.Linear(1024, 10)
121
+ def forward(self, x):
122
+ x = F.relu(self.conv1(x))
123
+ x = torch.flatten(x, 1)
124
+ x = F.relu(self.fc1(x))
125
+ return x
126
+
127
+ {prompt}'''
128
+
129
+ fixed_net_results = codegen_mutate.remote(cfg=cfg, prompt=mutation_prompt, temperature = 0.6)
130
+ extract_code_section(ray.get(fixed_net_results), mutation_prompt, file_path=os.path.normpath(f"{path_nets}/mu_net_{prompt}.py"))
131
+ print(read_python_file(os.path.normpath(f"{path_nets}/mu_net_{prompt}.py")))
132
+ try:
133
+ Net = get_class(os.path.normpath(f"{path_nets}/mu_net_{prompt}.py"))
134
+ net = Net()
135
+ if is_trainable(net):
136
+ print("TRUE")
137
+ else:
138
+ print("FALSE")
139
+ except Exception as e:
140
+ print(f"FALSE because of {e}")
141
+
142
+
143
+ def crossover_test(cfg):
144
+ ray.init()
145
+ exp_name = "crossover_test_0001"
146
+
147
+ prompt = '''
148
+
149
+ class Net(nn.Module):
150
+ def __init__(self):
151
+ super(Net, self).__init__()
152
+ self.conv1 = nn.Conv2d(3, 6, 5)
153
+ self.pool = nn.MaxPool2d(2, 2)
154
+ self.conv2 = nn.Conv2d(6, 16, 5)
155
+ self.fc1 = nn.Linear(16 * 5 * 5, 120)
156
+ self.fc2 = nn.Linear(120, 84)
157
+ self.fc3 = nn.Linear(84, 10)
158
+
159
+ def forward(self, x):
160
+ x = self.pool(F.relu(self.conv1(x)))
161
+ x = self.pool(F.relu(self.conv2(x)))
162
+ x = x.view(-1, 16 * 5 * 5)
163
+ x = F.relu(self.fc1(x))
164
+ x = F.relu(self.fc2(x))
165
+ x = self.fc3(x)
166
+ return x
167
+
168
+ class Net2(nn.Module):
169
+ def __init__(self):
170
+ super(Net2, self).__init__()
171
+ self.conv1 = nn.Conv2d(3, 1, 1)
172
+ self.fc1 = nn.Linear(1024 128)
173
+ self.fc2 = nn.Linear(128, 64)
174
+ self.fc3 = nn.Linear(64, 32)
175
+ self.fc4 = nn.Linear(32, 10)
176
+
177
+ def forward(self, x):
178
+ x = self.pool(F.relu(self.conv1(x)))
179
+ x = torch.flatten(x, 1)
180
+ x = F.relu(self.fc1(x))
181
+ x = F.relu(self.fc2(x))
182
+ x = F.relu(self.fc3(x))
183
+ x = self.fc4(x)
184
+ return x
185
+
186
+ """Perform a crossover between above two neural networks and create a third neural network class that gets the best layers from above two networks"""'''
187
+ path_nets = f"codegen_tests/{exp_name}"
188
+ if not os.path.exists(path_nets):
189
+ os.makedirs(os.path.normpath(path_nets))
190
+ for i in range(10):
191
+ (print(f"Test {i}"))
192
+
193
+ fixed_net_results = codegen_mutate.remote(cfg=cfg, prompt=prompt, temperature = 0.6)
194
+ extract_code_section(ray.get(fixed_net_results), prompt, file_path=os.path.normpath(f"{path_nets}/init_net_{i}.py"))
195
+ print(read_python_file(os.path.normpath(f"{path_nets}/init_net_{i}.py")))
196
+ try:
197
+ Net = get_class(os.path.normpath(f"{path_nets}/init_net_{i}.py"))
198
+ net = Net()
199
+ if is_trainable(net):
200
+ print("TRUE")
201
+ else:
202
+ print("FALSE")
203
+ except Exception as e:
204
+ print(f"FALSE because of {e}")
205
+
206
+
207
+ def diff_mutate_test(cfg):
208
+ ray.init()
209
+ exp_name = "diff_mutate_test_0004"
210
+ path_nets = f"codegen_tests/{exp_name}"
211
+ if not os.path.exists(path_nets):
212
+ os.makedirs(os.path.normpath(path_nets))
213
+ diff_prompts = ['<NME> initial_net.py\n'
214
+ '<BFE> import torch\n'
215
+ 'import torch.nn as nn\n'
216
+ 'import torch.nn.functional as F\n'
217
+ '"""Returns a pytorch neural network class that takes an image of 3 x 32 x 32 as input and outputs 10 neurons."""\n'
218
+ 'class Net(nn.Module):\n'
219
+ ' def __init__(self):\n'
220
+ ' super().__init__()\n'
221
+ ' self.conv1 = nn.Conv2d(3, 1, 1)\n'
222
+ ' self.fc1 = nn.Linear(1024, 10)\n'
223
+ ' def forward(self, x):\n'
224
+ ' x = F.relu(self.conv1(x))\n'
225
+ ' x = torch.flatten(x, 1)\n'
226
+ ' x = F.relu(self.fc1(x))\n'
227
+ ' return x\n'
228
+ '<MSG> Added a nn.Conv2d layer to improve the neural network.\n',
229
+ '<NME> initial_net.py\n'
230
+ '<BFE> import torch\n'
231
+ 'import torch.nn as nn\n'
232
+ 'import torch.nn.functional as F\n'
233
+ '"""Returns a pytorch neural network class that takes an image of 3 x 32 x 32 as input and outputs 10 neurons."""\n'
234
+ 'class Net(nn.Module):\n'
235
+ ' def __init__(self):\n'
236
+ ' super().__init__()\n'
237
+ ' self.conv1 = nn.Conv2d(3, 1, 1)\n'
238
+ ' self.fc1 = nn.Linear(1024, 10)\n'
239
+ ' def forward(self, x):\n'
240
+ ' x = F.relu(self.conv1(x))\n'
241
+ ' x = torch.flatten(x, 1)\n'
242
+ ' x = F.relu(self.fc1(x))\n'
243
+ ' return x\n'
244
+ '<MSG> Added a nn.Linear layer to improve the neural network.\n',
245
+ '<NME> initial_net.py\n'
246
+ '<BFE> import torch\n'
247
+ 'import torch.nn as nn\n'
248
+ 'import torch.nn.functional as F\n'
249
+ '"""Returns a pytorch neural network class that takes an image of 3 x 32 x 32 as input and outputs 10 neurons."""\n'
250
+ 'class Net(nn.Module):\n'
251
+ ' def __init__(self):\n'
252
+ ' super().__init__()\n'
253
+ ' self.conv1 = nn.Conv2d(3, 1, 1)\n'
254
+ ' self.fc1 = nn.Linear(1024, 10)\n'
255
+ ' def forward(self, x):\n'
256
+ ' x = F.relu(self.conv1(x))\n'
257
+ ' x = torch.flatten(x, 1)\n'
258
+ ' x = F.relu(self.fc1(x))\n'
259
+ ' return x\n'
260
+ '<MSG> Added a nn.Conv2d and a nn.Linear layer to improve the neural network.\n',
261
+ '<NME> initial_net.py\n'
262
+ '<BFE> import torch\n'
263
+ 'import torch.nn as nn\n'
264
+ 'import torch.nn.functional as F\n'
265
+ '"""Returns a pytorch neural network class that takes an image of 3 x 32 x 32 as input and outputs 10 neurons."""\n'
266
+ 'class Net(nn.Module):\n'
267
+ ' def __init__(self):\n'
268
+ ' super().__init__()\n'
269
+ ' self.conv1 = nn.Conv2d(3, 1, 1)\n'
270
+ ' self.fc1 = nn.Linear(1024, 10)\n'
271
+ ' def forward(self, x):\n'
272
+ ' x = F.relu(self.conv1(x))\n'
273
+ ' x = torch.flatten(x, 1)\n'
274
+ ' x = F.relu(self.fc1(x))\n'
275
+ ' return x\n'
276
+ '<MSG> Added layers to improve the neural network.\n']
277
+ for i,diff_prompt in enumerate(diff_prompts):
278
+ (print(f"Test {i}"))
279
+
280
+ fixed_net_results = codegen_mutate.remote(cfg=cfg, prompt=diff_prompt, temperature = 0.8)
281
+ res = ray.get(fixed_net_results)
282
+ print(res)
283
+ extract_code_section(res, diff_prompt, file_path=os.path.normpath(f"{path_nets}/diff_prompt_{i}.py"))
284
+ print(read_python_file(os.path.normpath(f"{path_nets}/diff_prompt_{i}.py")))
285
+ try:
286
+ Net = get_class(os.path.normpath(f"{path_nets}/diff_prompt_{i}.py"))
287
+ net = Net()
288
+ if is_trainable(net):
289
+ print("TRUE")
290
+ else:
291
+ print("FALSE")
292
+ except Exception as e:
293
+ print(f"FALSE because of {e}")
294
+
295
+
296
+ @hydra.main(version_base="1.3.0", config_path="conf", config_name="config")
297
+ def main(cfg: Config):
298
+ #fixer_test(cfg)
299
+ #init_net_test(cfg)
300
+ #mutation_test(cfg)
301
+ crossover_test(cfg)
302
+ #diff_mutate_test(cfg)
303
+
304
+
305
+ if __name__ == "__main__":
306
+ main()
LLMatic-main/conf/__pycache__/config.cpython-39.pyc ADDED
Binary file (1.45 kB). View file
 
LLMatic-main/conf/config.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from dataclasses import MISSING, dataclass
2
+ import typing
3
+ import multiprocessing as mp
4
+
5
+ from hydra.core.config_store import ConfigStore
6
+ from map_elites import common as cm
7
+
8
+ @dataclass
9
+ class Config:
10
+ """This class provides a "schema" for the config file, validating types."""
11
+
12
+ RANDOM_INIT_NETS: int = 10 # INIT Nets to be accepted in archive
13
+ INIT_NUM_NETS: int = 10 # INIT Nets created per generation
14
+ ROLL_OUTS: int = 4 # For GPU training. ROLL_OUTS * (INIT_NUM_NETS or NUM_NETS) = Total nets created in each generation
15
+ NUM_NETS: int = 10 # Mutation and crossover nets to be created
16
+ START_FROM_CHECKPOINT: bool = False
17
+ RANDOM_NETWORKS: bool = False
18
+
19
+ MUTATION: str = "codegen-6B-mono"
20
+ #MUTATION: str = "codex"
21
+
22
+ GENERATIONS: int = 200
23
+ NET_TRAINING_EPOCHS: int = 50
24
+ TEMPERATURE: float = 0.0000000
25
+
26
+ DEVICE: str = "cuda" # options: ["cuda", "cpu", "both"]
27
+
28
+ NUM_PROCESSES: int = mp.cpu_count() - 1
29
+ RAY: bool = True
30
+ NUM_CPUS: int = NUM_PROCESSES
31
+ NUM_GPUS: int = 1 # per task
32
+ NUM_GPUS_TOTAL: int = 4 # total available
33
+
34
+ # MAP-ELITES
35
+
36
+ DIM_MAP: int = 2
37
+ N_NICHES: int = 100
38
+
39
+ SAVE_DIR: str = "./"
40
+
41
+
42
+ cs = ConfigStore.instance()
43
+ cs.store(name="base_config", node=Config)
LLMatic-main/conf/config.yaml ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ defaults:
2
+ - base_config
3
+ - _self_
4
+
5
+ - override hydra/launcher: submitit_slurm
6
+ # - override hydra/launcher: submitit_local
7
+
8
+
9
+ hydra:
10
+ sweeper:
11
+ params:
12
+ NUM_NETS: 10, 20
13
+ # GENERATIONS: 25, 50, 100
14
+ NET_TRAINING_EPOCHS: 10, 30
15
+ TEMPERATURE: 0.3, 0.5, 0.7
16
+
17
+ # SLURM-specific arguments
18
+ launcher:
19
+ tasks_per_node: 1
20
+ cpus_per_task: 1
21
+ gpus_per_node: 1
22
+ timeout_min: 2880
23
+ mem_gb: 30
LLMatic-main/datasets.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torchvision
3
+ import torchvision.transforms as transforms
4
+
5
+
6
+ import numpy as np
7
+
8
+
9
+ def get_datasets():
10
+
11
+ transform = transforms.Compose(
12
+ [transforms.ToTensor(),
13
+ transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
14
+
15
+ batch_size = 1
16
+
17
+ trainset = torchvision.datasets.CIFAR10(root='./data', train=True,
18
+ download=True, transform=transform)
19
+ trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size,
20
+ shuffle=True, num_workers=1)
21
+
22
+ validset = torchvision.datasets.CIFAR10(root='./data', train=False,
23
+ download=True, transform=transform)
24
+ validloader = torch.utils.data.DataLoader(validset, batch_size=batch_size,
25
+ shuffle=False, num_workers=1)
26
+
27
+ return trainset, trainloader, validset, validloader
28
+
29
+
LLMatic-main/environment.yaml ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: llmatic
2
+ channels:
3
+ - defaults
4
+ dependencies:
5
+ - ca-certificates=2023.01.10=haa95532_0
6
+ - certifi=2022.12.7=py39haa95532_0
7
+ - openssl=1.1.1t=h2bbff1b_0
8
+ - pip=22.3.1=py39haa95532_0
9
+ - python=3.9.16=h6244533_0
10
+ - setuptools=65.6.3=py39haa95532_0
11
+ - sqlite=3.40.1=h2bbff1b_0
12
+ - tzdata=2022g=h04d1e81_0
13
+ - vc=14.2=h21ff451_1
14
+ - vs2015_runtime=14.27.29016=h5e58377_2
15
+ - wheel=0.38.4=py39haa95532_0
16
+ - wincertstore=0.2=py39haa95532_2
17
+ - pip:
18
+ - aiohttp==3.8.4
19
+ - aiosignal==1.3.1
20
+ - antlr4-python3-runtime==4.9.3
21
+ - async-timeout==4.0.2
22
+ - attrs==22.2.0
23
+ - charset-normalizer==3.0.1
24
+ - click==8.1.3
25
+ - cloudpickle==2.2.1
26
+ - colorama==0.4.6
27
+ - contourpy==1.0.7
28
+ - cycler==0.11.0
29
+ - filelock==3.9.0
30
+ - flopth==0.1.3
31
+ - fonttools==4.38.0
32
+ - frozenlist==1.3.3
33
+ - fvcore==0.1.5.post20221221
34
+ - huggingface-hub==0.12.1
35
+ - hydra-core==1.3.1
36
+ - hydra-submitit-launcher==1.2.0
37
+ - idna==3.4
38
+ - importlib-resources==5.12.0
39
+ - inquirerpy==0.3.4
40
+ - iopath==0.1.10
41
+ - joblib==1.2.0
42
+ - jsonschema==4.21.1
43
+ - jsonschema-specifications==2023.12.1
44
+ - kiwisolver==1.4.4
45
+ - levenshtein==0.20.9
46
+ - matplotlib==3.7.0
47
+ - msgpack==1.0.8
48
+ - multidict==6.0.4
49
+ - networkx==3.0
50
+ - numpy==1.24.2
51
+ - omegaconf==2.3.0
52
+ - openai==0.26.5
53
+ - packaging==23.0
54
+ - pandas==1.5.3
55
+ - pfzy==0.3.4
56
+ - pillow==9.4.0
57
+ - portalocker==2.7.0
58
+ - prompt-toolkit==3.0.37
59
+ - protobuf==5.26.1
60
+ - pyparsing==3.0.9
61
+ - python-dateutil==2.8.2
62
+ - pytz==2022.7.1
63
+ - pywin32==305
64
+ - pyyaml==6.0
65
+ - rapidfuzz==2.13.7
66
+ - ray==2.10.0
67
+ - referencing==0.34.0
68
+ - regex==2022.10.31
69
+ - requests==2.28.2
70
+ - rpds-py==0.18.0
71
+ - sacremoses==0.0.53
72
+ - scikit-learn==1.2.2
73
+ - scipy==1.10.1
74
+ - six==1.16.0
75
+ - submitit==1.4.5
76
+ - tabulate==0.9.0
77
+ - termcolor==2.2.0
78
+ - threadpoolctl==3.1.0
79
+ - tokenizers==0.13.2
80
+ - torch==1.13.1
81
+ - torchvision==0.14.1
82
+ - tqdm==4.64.1
83
+ - transformers==4.25.1
84
+ - typing-extensions==4.5.0
85
+ - urllib3==1.26.14
86
+ - wcwidth==0.2.6
87
+ - xmltodict==0.13.0
88
+ - yacs==0.1.8
89
+ - yarl==1.8.2
90
+ - zipp==3.14.0
91
+ prefix: C:\Users\DELL\anaconda3\envs\llmatic
LLMatic-main/evaluations.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ class Net(nn.Module):
6
+ def __init__(self):
7
+ super().__init__()
8
+ self.conv1 = nn.Conv2d(3, 1, 1, bias=False)
9
+ self.conv2 = nn.Conv2d(3, 1, 1, bias=False)
10
+ self.fc1 = nn.Linear(1024, 10)
11
+
12
+ def forward(self, x):
13
+ x = F.relu(self.conv1(x))
14
+ x = F.relu(self.conv2(x))
15
+ x = torch.flatten(x, 1)
16
+ x = F.relu(self.fc1(x))
17
+ return x
18
+
19
+
20
+
21
+ net = Net()
22
+ def is_trainable(net):
23
+ zeros = torch.zeros((1, 3, 32, 32)) # input tensor of shape (batch_size, channels, height, width)
24
+
25
+ # Check that we can pass a dummy input through the network without errors.
26
+ try:
27
+ output = net(zeros)
28
+ except Exception as e:
29
+ return False
30
+
31
+ # Network output shape must match number of classes in CIFAR-10.
32
+ if output.shape != (1, 10):
33
+ return False
34
+
35
+ return True
36
+
37
+ def main():
38
+ print(is_trainable(net))
39
+
40
+ if __name__ == '__main__':
41
+ main()
LLMatic-main/initial_net.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ class Net(nn.Module):
5
+ def __init__(self):
6
+ super().__init__()
7
+ self.conv1 = nn.Conv2d(3, 1, 1)
8
+ self.fc1 = nn.Linear(1024, 10)
9
+ def forward(self, x):
10
+ x = F.relu(self.conv1(x))
11
+ x = torch.flatten(x, 1)
12
+ x = F.relu(self.fc1(x))
13
+ return x
LLMatic-main/llmatic.py ADDED
@@ -0,0 +1,566 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #| This file has a major part from the pymap_elites framework.
2
+ #| Copyright 2019, INRIA
3
+ #| Main contributor(s):
4
+ #| Jean-Baptiste Mouret, jean-baptiste.mouret@inria.fr
5
+ #| Eloise Dalin , eloise.dalin@inria.fr
6
+ #| Pierre Desreumaux , pierre.desreumaux@inria.fr
7
+ #|
8
+ #|
9
+ #| **Main paper**: Mouret JB, Clune J. Illuminating search spaces by
10
+ #| mapping elites. arXiv preprint arXiv:1504.04909. 2015 Apr 20.
11
+ #|
12
+ #| This software is governed by the CeCILL license under French law
13
+ #| and abiding by the rules of distribution of free software. You
14
+ #| can use, modify and/ or redistribute the software under the terms
15
+ #| of the CeCILL license as circulated by CEA, CNRS and INRIA at the
16
+ #| following URL "http://www.cecill.info".
17
+ #|
18
+ #| As a counterpart to the access to the source code and rights to
19
+ #| copy, modify and redistribute granted by the license, users are
20
+ #| provided only with a limited warranty and the software's author,
21
+ #| the holder of the economic rights, and the successive licensors
22
+ #| have only limited liability.
23
+ #|
24
+ #| In this respect, the user's attention is drawn to the risks
25
+ #| associated with loading, using, modifying and/or developing or
26
+ #| reproducing the software by the user in light of its specific
27
+ #| status of free software, that may mean that it is complicated to
28
+ #| manipulate, and that also therefore means that it is reserved for
29
+ #| developers and experienced professionals having in-depth computer
30
+ #| knowledge. Users are therefore encouraged to load and test the
31
+ #| software's suitability as regards their requirements in conditions
32
+ #| enabling the security of their systems and/or data to be ensured
33
+ #| and, more generally, to use and operate it in the same conditions
34
+ #| as regards security.
35
+ #|
36
+ #| The fact that you are presently reading this means that you have
37
+ #| had knowledge of the CeCILL license and that you accept its terms.
38
+
39
+ import math
40
+ import numpy as np
41
+ import multiprocessing
42
+
43
+ # from scipy.spatial import cKDTree : TODO -- faster?
44
+ from sklearn.neighbors import KDTree
45
+
46
+ from map_elites import common as cm
47
+
48
+ from flopth import flopth
49
+
50
+ from datasets import get_datasets
51
+ from utils import read_python_file, extract_code, get_net_name, extract_code_section, get_class, csv_writer, get_max_curiosity, get_max_fitness, count_unique_components, count_parameters, get_network_width_depth_ratio
52
+ from utils import Net
53
+ from mutation_models import codegen_mutate, codex_mutate, replace_word_mutation
54
+ from evaluations import is_trainable
55
+ from train import train_net_on_cpu, train_net_on_gpu, forward_pass_on_cpu,forward_pass_on_gpu, detect_layers, transfer_weights
56
+
57
+ import torch
58
+ import torch.nn as nn
59
+ import torch.nn.functional as F
60
+ import torch.optim as optim
61
+ import torchvision
62
+ import torchvision.transforms as transforms
63
+
64
+ import numpy as np
65
+ import os
66
+ import random
67
+ import hydra
68
+ import ray
69
+ from fvcore.nn import FlopCountAnalysis
70
+
71
+ from conf.config import Config
72
+
73
+
74
+
75
+
76
+ def __add_to_archive(s, centroid, archive, kdt, type_ind):
77
+ niche_index = kdt.query([centroid], k=1)[1][0][0]
78
+ niche = kdt.data[niche_index]
79
+ n = cm.make_hashable(niche)
80
+ s.centroid = n
81
+ if n in archive:
82
+ if type_ind == "network":
83
+ if s.fitness < archive[n].fitness:
84
+ archive[n] = s
85
+ return True
86
+ elif type_ind == "prompt":
87
+ if s.fitness > archive[n].fitness:
88
+ archive[n] = s
89
+ return True
90
+ return False
91
+ else:
92
+ archive[n] = s
93
+ return True
94
+
95
+
96
+
97
+ def to_specie(net,fit,desc,net_path):
98
+ return cm.Species(net, desc, fit, net_path)
99
+
100
+ # map-elites algorithm (CVT variant)
101
+
102
+ @hydra.main(version_base="1.3.0", config_path="conf", config_name="config")
103
+ def main(cfg: Config):
104
+ """CVT MAP-Elites
105
+ Vassiliades V, Chatzilygeroudis K, Mouret JB. Using centroidal voronoi tessellations to scale up the multidimensional archive of phenotypic elites algorithm. IEEE Transactions on Evolutionary Computation. 2017 Aug 3;22(4):623-30.
106
+
107
+ Format of the logfile: evals archive_size max mean median 5%_percentile, 95%_percentile
108
+
109
+ """
110
+ if cfg.DEVICE == "cuda":
111
+ ray.init(num_gpus=cfg.NUM_GPUS_TOTAL)
112
+ else:
113
+ ray.init()
114
+ mutation_fn = codex_mutate if cfg.MUTATION == "codex" else codegen_mutate
115
+
116
+ prompts = ['"""Add a layer to improve the above network"""',
117
+ '"""Delete a layer to improve the above network"""',
118
+ '"""improve the above network"""',
119
+ '"""improve the above network by reducing the size drastically"""',
120
+ '"""improve the above network by increasing the size drastically"""',
121
+ # Specific prompts below
122
+ '"""Add fully connected layer to improve the above network"""',
123
+ '"""Add convolutional layer to improve the above network"""',
124
+ '"""Add pooling layer to improve the above network"""',
125
+ '"""Add residual connection to improve the above network"""',
126
+ '"""Add multiple residual connections to improve the above network"""',
127
+ '"""Add dropout layer to improve the above network"""',
128
+ '"""Add normalization layer to improve the above network"""',
129
+ '"""Add recurrent layer to improve the above network"""',
130
+ ]
131
+ probabilities = [1.0 / len(prompts)] * len(prompts)
132
+
133
+ prompt_score = {}
134
+ for prompt in prompts:
135
+ prompt_score[prompt] = 0
136
+ prompt_score["None"] = 0
137
+
138
+ prompt_to_int = {}
139
+ int_to_prompt = {}
140
+ for i,prompt in enumerate(prompts):
141
+ prompt_to_int[prompt] = i
142
+ int_to_prompt[i] = prompt
143
+
144
+ if cfg.START_FROM_CHECKPOINT:
145
+ best_net = read_python_file("initial_net.py")
146
+ init_gen = 4
147
+ print(f"Starting from the following network:\n{best_net}")
148
+ else:
149
+ init_net = read_python_file(os.path.normpath(f"{cfg.SAVE_DIR}/initial_net.py"))
150
+ init_gen = 0
151
+ print(f"Starting from the following network:\n{init_net}")
152
+
153
+
154
+ # create the CVT
155
+ params = cm.default_params
156
+ c = cm.cvt(cfg.N_NICHES,cfg.DIM_MAP,
157
+ params['cvt_samples'], params['cvt_use_cache'])
158
+ kdt = KDTree(c, leaf_size=30, metric='euclidean')
159
+ cm.__write_centroids(c)
160
+
161
+ prompt_archive = {} # init archive (empty)
162
+ nets_archive = {}
163
+ n_evals = 0 # number of evaluations since the beginning
164
+ b_evals = 0 # number evaluation since the last dump
165
+
166
+ curios_net_path = os.path.normpath(f"{cfg.SAVE_DIR}/initial_net.py")
167
+
168
+ c_net_str = read_python_file(curios_net_path)
169
+ Net = get_class(curios_net_path)
170
+ curios_net = Net()
171
+ curios_prompt = prompts[0]
172
+ # main loop
173
+
174
+ temperature_start = 0.6
175
+ prev_best_score = np.inf
176
+ prev_best_net_path = None
177
+ prev_best_prompt = None
178
+
179
+ exp_name = f"gen-nets_{cfg.MUTATION}_networks-{cfg.NUM_NETS}_temp-{cfg.TEMPERATURE}_net-training-epochs-{cfg.NET_TRAINING_EPOCHS}_niches-{cfg.N_NICHES}_infer-and-flops-as-bd"
180
+ path_nets = f"{cfg.SAVE_DIR}/logs/{exp_name}"
181
+ os.makedirs(path_nets,exist_ok=True)
182
+ log_file = open(os.path.normpath(f"{path_nets}/cvt.dat"),"w")
183
+ out_file = os.path.normpath(f"{path_nets}/exp_results.csv")
184
+ csv_writer(["generations", "best_loss", "used_prompt"],out_file)
185
+
186
+
187
+ for gen_i in range(init_gen, cfg.GENERATIONS):
188
+
189
+ generated_nets = []
190
+
191
+ # random initialization
192
+ (print(f"[Generation: {gen_i}]"))
193
+
194
+ if (len(nets_archive.keys()) < cfg.RANDOM_INIT_NETS) and (len(prompt_archive.keys()) < (int(cfg.RANDOM_INIT_NETS*2))):#params['random_init'] * n_niches:
195
+
196
+
197
+ for _i in range(cfg.ROLL_OUTS):
198
+ selected_prompts = []
199
+ generated_net_results = []
200
+ selected_prompts = random.choices(prompts, weights=probabilities,k=cfg.INIT_NUM_NETS)
201
+ for i in range(0, cfg.INIT_NUM_NETS):
202
+ print(f"Selected prompt for generations: {selected_prompts[i]}")
203
+
204
+ for prompt in selected_prompts:
205
+ generated_net_results.append(mutation_fn.remote(cfg=cfg, prompt=init_net + "\n" + prompt, temperature=temperature_start))
206
+
207
+
208
+ for i,generated_net in enumerate(generated_net_results):
209
+ generated_nets.append((ray.get(generated_net),selected_prompts[i],temperature_start))
210
+
211
+ else: # variation/selection loop
212
+
213
+ evo_operator = random.choices(["mutation","crossover"], weights=[0.85,0.15])[0]
214
+
215
+ print(f"Performing {evo_operator}")
216
+ for _i in range(cfg.ROLL_OUTS):
217
+ generated_net_results = []
218
+ if evo_operator == "mutation":
219
+
220
+ if len(nets_archive.keys()) < 3:
221
+ n_nets = 1
222
+ selection = 0
223
+ else:
224
+ n_nets = 3
225
+ selection = random.randint(0, 2)
226
+
227
+ curios_nets = []
228
+ curios_prompts = []
229
+ curios_temps = []
230
+ curios_net_paths = []
231
+
232
+ n_curios_nets = get_max_fitness(nets_archive.values(),n_nets)
233
+ for n in n_curios_nets:
234
+ curios_nets.append(n.x)
235
+ curios_net_paths.append(n.net_path)
236
+
237
+ n_prompts_temps = get_max_curiosity(prompt_archive.values(),n_nets)
238
+ for pt in n_prompts_temps:
239
+ curios_prompts.append(pt.desc[0])
240
+ curios_temps.append(pt.desc[1])
241
+
242
+ curios_net = curios_nets[selection]
243
+ curios_temp = curios_temps[selection]
244
+ curios_net_path = curios_net_paths[selection]
245
+
246
+ curios_temp_ray = []
247
+ curios_prompt_ray = []
248
+
249
+
250
+ for i in range(cfg.NUM_NETS):
251
+ # Temperature mutation
252
+
253
+ if i == 1:
254
+ curios_temp = curios_temps[selection]
255
+ else:
256
+ curios_temp += random.uniform(-0.1,0.1)
257
+ if curios_temp > 1.0:
258
+ curios_temp = 1.0
259
+ elif curios_temp < 0.1:
260
+ curios_temp = 0.1
261
+
262
+ curios_prompt = curios_prompts[selection]
263
+
264
+ curios_temp_ray.append(curios_temp)
265
+ curios_prompt_ray.append(curios_prompt)
266
+
267
+ print(f"prompt in mutation is {curios_prompt}")
268
+
269
+ for i in range(cfg.NUM_NETS):
270
+ generated_net_results.append(mutation_fn.remote(cfg=cfg, prompt=read_python_file(curios_net_path) + "\n" + int_to_prompt[int(curios_prompt_ray[i])], temperature = curios_temp_ray[i]))
271
+
272
+ for i,generated_net in enumerate(generated_net_results):
273
+ generated_nets.append((ray.get(generated_net),int_to_prompt[int(curios_prompt_ray[i])],curios_temp_ray[i]))
274
+
275
+ elif evo_operator == "crossover":
276
+
277
+ if len(nets_archive.keys()) < 1:
278
+ print("No crossover performed")
279
+ else:
280
+ curios_nets_ray = []
281
+ curios_temps_ray = []
282
+
283
+ curios_nets = []
284
+ curios_prompts = []
285
+ curios_temps = []
286
+ curios_net_paths = []
287
+
288
+
289
+ if len(nets_archive.keys()) < 2:
290
+ selection = 0
291
+ n_nets = 1
292
+ else:
293
+ selection = random.randint(1, len(nets_archive.keys())-1)# change it to 1
294
+ n_nets = 2
295
+
296
+ n_curios_nets = get_max_fitness(nets_archive.values(),n_nets)
297
+ for n in n_curios_nets:
298
+ curios_nets.append(n.x)
299
+ curios_net_paths.append(n.net_path)
300
+
301
+ n_prompts_temps = get_max_curiosity(prompt_archive.values(),n_nets)
302
+ for pt in n_prompts_temps:
303
+ curios_prompts.append(pt.desc[0])
304
+ curios_temps.append(pt.desc[1])
305
+
306
+
307
+
308
+
309
+ for i in range(0, cfg.NUM_NETS):
310
+
311
+ curios_net_str = read_python_file(curios_net_paths[0])
312
+ curios_net = curios_nets[0]
313
+
314
+ curios_temp = curios_temps[0]
315
+ curios_prompt = curios_prompts[0]
316
+
317
+
318
+ curios_nets_ray.append(read_python_file(curios_net_paths[selection]))
319
+
320
+
321
+ crossover_prompt = '"""Combine the above two neural networks and create a third neural network class that also inherits from nn.Module"""'
322
+
323
+ for curios_2nd_net in curios_nets_ray:
324
+ generated_net_results.append(mutation_fn.remote(cfg=cfg, prompt=curios_net_str + "\n" + curios_2nd_net + "\n" + crossover_prompt, temperature = curios_temp))
325
+
326
+ for i,generated_net in enumerate(generated_net_results):
327
+ generated_nets.append((ray.get(generated_net),int_to_prompt[int(curios_prompt)],curios_temp))
328
+
329
+
330
+
331
+ net_class_name = None
332
+ net_paths=[]
333
+ training_prompts=[]
334
+ training_nets=[]
335
+
336
+ for i, k in enumerate(generated_nets):
337
+
338
+ invalid_net = False
339
+ generation, prompt, temperature = k
340
+
341
+
342
+ net_path = os.path.normpath(f"{path_nets}/network_{gen_i}_{i}_{prompt[3:-3]}_{temperature}.py")
343
+
344
+
345
+ if cfg.MUTATION == "codex":
346
+ extract_code_section(generation['choices'][0]["text"], prompt, file_path=net_path)
347
+ elif cfg.MUTATION.startswith("codegen"):
348
+ extract_code_section(generation, prompt, file_path=net_path)
349
+ main_net_focus = read_python_file(net_path)
350
+ print(f"Net in focus:\n {main_net_focus}")
351
+
352
+ if not invalid_net:
353
+ try:
354
+ Net = get_class(net_path)
355
+ net = Net()
356
+ except Exception as e:
357
+ net = curios_net
358
+ net_path = curios_net_path
359
+ if isinstance(curios_prompt,str):
360
+ prompt = curios_prompt
361
+ elif isinstance(curios_prompt,float) or isinstance(curios_prompt,int):
362
+ prompt = int_to_prompt[int(curios_prompt)]
363
+
364
+ #breakpoint()
365
+ try:
366
+ is_t = is_trainable(net)
367
+ except Exception:
368
+ is_t = False
369
+ if is_t:
370
+ net_paths.append(net_path)
371
+ training_prompts.append(prompt)
372
+ training_nets.append(net)
373
+ else:
374
+ invalid_net = True
375
+
376
+
377
+
378
+ if invalid_net:
379
+ print(f"The network at {net_path} is not trainable")
380
+
381
+
382
+ if training_nets == []:
383
+ continue
384
+
385
+ if (len(nets_archive.keys()) < cfg.RANDOM_INIT_NETS) and (len(prompt_archive.keys()) < (int(cfg.RANDOM_INIT_NETS*2))):
386
+
387
+ inter_results = []
388
+ losses = []
389
+ layers_c_net = detect_layers(curios_net)
390
+ if cfg.DEVICE == "cpu":
391
+
392
+ for net in training_nets:
393
+ for layer in layers_c_net:
394
+ try:
395
+ net = transfer_weights(curios_net,net,layer)
396
+ print(f"Weights transferred successfully")
397
+ except Exception:
398
+ print("Weights can not transfer")
399
+ for net in training_nets:
400
+ inter_results.append(forward_pass_on_gpu.remote(net))
401
+ elif cfg.DEVICE == "cuda":
402
+ for net in training_nets:
403
+ for layer in layers_c_net:
404
+ try:
405
+ net = transfer_weights(curios_net,net,layer)
406
+ print(f"Weights transferred successfully")
407
+ except Exception:
408
+ print("Weights can not transfer")
409
+ for net in training_nets:
410
+ inter_results.append(forward_pass_on_gpu.remote(net))
411
+ else:
412
+ raise ValueError(f"{cfg.DEVICE} is not a valid device")
413
+ fitness = []
414
+ for i,result in enumerate(inter_results):
415
+ try:
416
+ res = ray.get(result)
417
+ fitness.append([res[0],training_prompts[i],temperature,net_paths[i],res[1]])
418
+
419
+ if fitness[i][0] <= prev_best_score:
420
+ prev_best_net_path = net_paths[i]
421
+ prev_best_prompt = training_prompts[i]
422
+ prev_best_score = fitness[i][0]
423
+ temperature += 0.05
424
+
425
+ else:
426
+ temperature -= 0.05
427
+ if temperature > 1.0:
428
+ temperature = 1.0
429
+ elif temperature < 0.1:
430
+ temperature = 0.1
431
+
432
+ fitness[i][2] = temperature
433
+ except Exception:
434
+ print("not trainable due to fitness 1")
435
+
436
+ else:
437
+
438
+ if cfg.RAY:
439
+ inter_results = []
440
+ losses = []
441
+ layers_c_net = detect_layers(curios_net)
442
+ if cfg.DEVICE == "cpu":
443
+ for net in training_nets:
444
+ for layer in layers_c_net:
445
+ try:
446
+ net = transfer_weights(curios_net,net,layer)
447
+ print(f"Weights transferred successfully")
448
+ except Exception:
449
+ print("Weights can not transfer")
450
+ for net in training_nets:
451
+ inter_results.append(train_net_on_cpu.remote(net,cfg.NET_TRAINING_EPOCHS))
452
+
453
+ elif cfg.DEVICE == "both":
454
+ for i, net in enumerate(training_nets):
455
+ if i % 2 == 0:
456
+ inter_results.append(train_net_on_cpu.remote(net,cfg.NET_TRAINING_EPOCHS))
457
+ else:
458
+ inter_results.append(train_net_on_gpu.remote(net,cfg.NET_TRAINING_EPOCHS))
459
+
460
+ elif cfg.DEVICE == "cuda":
461
+ for net in training_nets:
462
+ for layer in layers_c_net:
463
+ try:
464
+ net = transfer_weights(curios_net,net,layer)
465
+ print(f"Weights transferred successfully")
466
+ except Exception:
467
+ print("Weights can not transfer")
468
+ for net in training_nets:
469
+ inter_results.append(train_net_on_gpu.remote(net,cfg.NET_TRAINING_EPOCHS))
470
+
471
+ else:
472
+ raise ValueError(f"{cfg.DEVICE} is not a valid device")
473
+ fitness = []
474
+ for i,result in enumerate(inter_results):
475
+ try:
476
+ fitness.append([ray.get(result),training_prompts[i],temperature,net_paths[i],0.0])
477
+
478
+ if fitness[i][0] <= prev_best_score:
479
+ prev_best_net_path = net_paths[i]
480
+ prev_best_prompt = training_prompts[i]
481
+ prev_best_score = fitness[i][0]
482
+ temperature += 0.05
483
+ else:
484
+ temperature -= 0.05
485
+
486
+ if temperature > 1.0:
487
+ temperature = 1.0
488
+ elif temperature < 0.1:
489
+ temperature = 0.1
490
+
491
+ fitness[i][2] = temperature
492
+ except Exception:
493
+ print("not trainable due to fitness 2")
494
+
495
+ try:
496
+ infer_results = []
497
+ for net in training_nets:
498
+ infer_results.append(forward_pass_on_gpu.remote(net))
499
+
500
+
501
+ for i,result in enumerate(infer_results):
502
+ try:
503
+ res = ray.get(result)
504
+ fitness[i][4] = res[1]
505
+ except Exception:
506
+ print("not trainable due to inference speed 1")
507
+
508
+
509
+ except Exception:
510
+ print("not trainable due to inference speed 2")
511
+
512
+ dummy_inputs = torch.zeros((1, 3, 32, 32))
513
+ net_beh_list = []
514
+ prompt_beh_list = []
515
+ for loss_x,prompt_x,temp_x,net_p,infer_speed in fitness:
516
+ _nn = read_python_file(net_p) #Avoiding sytax errors
517
+ Net = get_class(net_p)
518
+ net = Net()
519
+ try:
520
+ flps = FlopCountAnalysis(net, dummy_inputs)
521
+ flops = flps.total()
522
+ depth_width_ratio = get_network_width_depth_ratio(net)
523
+
524
+ except Exception:
525
+ try:
526
+ flps = FlopCountAnalysis(curios_net, dummy_inputs)
527
+ flops = flps.total()
528
+ depth_width_ratio = get_network_width_depth_ratio(curios_net)
529
+ except Exception:
530
+ flops = 0
531
+ depth_width_ratio = 1
532
+
533
+ print(f"flops, infer_speed, depth_width_ratio: {flops}, {infer_speed}, {depth_width_ratio}")
534
+ s_net = to_specie(net,loss_x,np.array([depth_width_ratio, flops]),net_p)
535
+ net_added = __add_to_archive(s_net, s_net.desc, nets_archive, kdt, type_ind = "network")
536
+
537
+ if net_added:
538
+ prompt_fit = 1.0
539
+ else:
540
+ prompt_fit = 0.0
541
+
542
+ s_prompt = to_specie(net,prompt_fit,np.array([prompt_to_int[prompt_x], temp_x]),net_p)
543
+ prompt_added = __add_to_archive(s_prompt, s_prompt.desc, prompt_archive, kdt, type_ind = "prompt")
544
+ if not net_added:
545
+ s_prompt.curiosity = s_prompt.curiosity - 0.5
546
+ elif net_added:
547
+ s_prompt.curiosity = s_prompt.curiosity + 1.0
548
+
549
+ if gen_i % 1 == 0:
550
+
551
+ cm.__save_archive(nets_archive, gen_i,name="net")
552
+ cm.__save_archive(prompt_archive, gen_i,name="prompt")
553
+ # write log
554
+ if log_file != None:
555
+ fit_list = np.array([x.fitness for x in nets_archive.values()])
556
+ log_file.write("{} {} {} {} {} {} {}\n".format(gen_i, len(nets_archive.keys()), len(prompt_archive.keys()),
557
+ fit_list.min(), np.mean(fit_list), np.median(fit_list),
558
+ np.percentile(fit_list, 5), np.percentile(fit_list, 95)))
559
+ log_file.flush()
560
+ cm.__save_archive(nets_archive, gen_i,name="net")
561
+ cm.__save_archive(prompt_archive, gen_i,name="prompt")
562
+ return nets_archive,prompt_archive
563
+
564
+ if __name__ == "__main__":
565
+ main()
566
+
LLMatic-main/logs/gen-nets_codegen-6B-mono_networks-10_temp-0.0_net-training-epochs-50_niches-100_infer-and-flops-as-bd/cvt.dat ADDED
File without changes
LLMatic-main/logs/gen-nets_codegen-6B-mono_networks-10_temp-0.0_net-training-epochs-50_niches-100_infer-and-flops-as-bd/exp_results.csv ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ generations,best_loss,used_prompt
2
+ generations,best_loss,used_prompt
3
+ generations,best_loss,used_prompt
4
+ generations,best_loss,used_prompt
LLMatic-main/map_elites/LICENSE ADDED
@@ -0,0 +1,518 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ CeCILL FREE SOFTWARE LICENSE AGREEMENT
3
+
4
+ Version 2.1 dated 2013-06-21
5
+
6
+
7
+ Notice
8
+
9
+ This Agreement is a Free Software license agreement that is the result
10
+ of discussions between its authors in order to ensure compliance with
11
+ the two main principles guiding its drafting:
12
+
13
+ * firstly, compliance with the principles governing the distribution
14
+ of Free Software: access to source code, broad rights granted to users,
15
+ * secondly, the election of a governing law, French law, with which it
16
+ is conformant, both as regards the law of torts and intellectual
17
+ property law, and the protection that it offers to both authors and
18
+ holders of the economic rights over software.
19
+
20
+ The authors of the CeCILL (for Ce[a] C[nrs] I[nria] L[ogiciel] L[ibre])
21
+ license are:
22
+
23
+ Commissariat � l'�nergie atomique et aux �nergies alternatives - CEA, a
24
+ public scientific, technical and industrial research establishment,
25
+ having its principal place of business at 25 rue Leblanc, immeuble Le
26
+ Ponant D, 75015 Paris, France.
27
+
28
+ Centre National de la Recherche Scientifique - CNRS, a public scientific
29
+ and technological establishment, having its principal place of business
30
+ at 3 rue Michel-Ange, 75794 Paris cedex 16, France.
31
+
32
+ Institut National de Recherche en Informatique et en Automatique -
33
+ Inria, a public scientific and technological establishment, having its
34
+ principal place of business at Domaine de Voluceau, Rocquencourt, BP
35
+ 105, 78153 Le Chesnay cedex, France.
36
+
37
+
38
+ Preamble
39
+
40
+ The purpose of this Free Software license agreement is to grant users
41
+ the right to modify and redistribute the software governed by this
42
+ license within the framework of an open source distribution model.
43
+
44
+ The exercising of this right is conditional upon certain obligations for
45
+ users so as to preserve this status for all subsequent redistributions.
46
+
47
+ In consideration of access to the source code and the rights to copy,
48
+ modify and redistribute granted by the license, users are provided only
49
+ with a limited warranty and the software's author, the holder of the
50
+ economic rights, and the successive licensors only have limited liability.
51
+
52
+ In this respect, the risks associated with loading, using, modifying
53
+ and/or developing or reproducing the software by the user are brought to
54
+ the user's attention, given its Free Software status, which may make it
55
+ complicated to use, with the result that its use is reserved for
56
+ developers and experienced professionals having in-depth computer
57
+ knowledge. Users are therefore encouraged to load and test the
58
+ suitability of the software as regards their requirements in conditions
59
+ enabling the security of their systems and/or data to be ensured and,
60
+ more generally, to use and operate it in the same conditions of
61
+ security. This Agreement may be freely reproduced and published,
62
+ provided it is not altered, and that no provisions are either added or
63
+ removed herefrom.
64
+
65
+ This Agreement may apply to any or all software for which the holder of
66
+ the economic rights decides to submit the use thereof to its provisions.
67
+
68
+ Frequently asked questions can be found on the official website of the
69
+ CeCILL licenses family (http://www.cecill.info/index.en.html) for any
70
+ necessary clarification.
71
+
72
+
73
+ Article 1 - DEFINITIONS
74
+
75
+ For the purpose of this Agreement, when the following expressions
76
+ commence with a capital letter, they shall have the following meaning:
77
+
78
+ Agreement: means this license agreement, and its possible subsequent
79
+ versions and annexes.
80
+
81
+ Software: means the software in its Object Code and/or Source Code form
82
+ and, where applicable, its documentation, "as is" when the Licensee
83
+ accepts the Agreement.
84
+
85
+ Initial Software: means the Software in its Source Code and possibly its
86
+ Object Code form and, where applicable, its documentation, "as is" when
87
+ it is first distributed under the terms and conditions of the Agreement.
88
+
89
+ Modified Software: means the Software modified by at least one
90
+ Contribution.
91
+
92
+ Source Code: means all the Software's instructions and program lines to
93
+ which access is required so as to modify the Software.
94
+
95
+ Object Code: means the binary files originating from the compilation of
96
+ the Source Code.
97
+
98
+ Holder: means the holder(s) of the economic rights over the Initial
99
+ Software.
100
+
101
+ Licensee: means the Software user(s) having accepted the Agreement.
102
+
103
+ Contributor: means a Licensee having made at least one Contribution.
104
+
105
+ Licensor: means the Holder, or any other individual or legal entity, who
106
+ distributes the Software under the Agreement.
107
+
108
+ Contribution: means any or all modifications, corrections, translations,
109
+ adaptations and/or new functions integrated into the Software by any or
110
+ all Contributors, as well as any or all Internal Modules.
111
+
112
+ Module: means a set of sources files including their documentation that
113
+ enables supplementary functions or services in addition to those offered
114
+ by the Software.
115
+
116
+ External Module: means any or all Modules, not derived from the
117
+ Software, so that this Module and the Software run in separate address
118
+ spaces, with one calling the other when they are run.
119
+
120
+ Internal Module: means any or all Module, connected to the Software so
121
+ that they both execute in the same address space.
122
+
123
+ GNU GPL: means the GNU General Public License version 2 or any
124
+ subsequent version, as published by the Free Software Foundation Inc.
125
+
126
+ GNU Affero GPL: means the GNU Affero General Public License version 3 or
127
+ any subsequent version, as published by the Free Software Foundation Inc.
128
+
129
+ EUPL: means the European Union Public License version 1.1 or any
130
+ subsequent version, as published by the European Commission.
131
+
132
+ Parties: mean both the Licensee and the Licensor.
133
+
134
+ These expressions may be used both in singular and plural form.
135
+
136
+
137
+ Article 2 - PURPOSE
138
+
139
+ The purpose of the Agreement is the grant by the Licensor to the
140
+ Licensee of a non-exclusive, transferable and worldwide license for the
141
+ Software as set forth in Article 5 <#scope> hereinafter for the whole
142
+ term of the protection granted by the rights over said Software.
143
+
144
+
145
+ Article 3 - ACCEPTANCE
146
+
147
+ 3.1 The Licensee shall be deemed as having accepted the terms and
148
+ conditions of this Agreement upon the occurrence of the first of the
149
+ following events:
150
+
151
+ * (i) loading the Software by any or all means, notably, by
152
+ downloading from a remote server, or by loading from a physical medium;
153
+ * (ii) the first time the Licensee exercises any of the rights granted
154
+ hereunder.
155
+
156
+ 3.2 One copy of the Agreement, containing a notice relating to the
157
+ characteristics of the Software, to the limited warranty, and to the
158
+ fact that its use is restricted to experienced users has been provided
159
+ to the Licensee prior to its acceptance as set forth in Article 3.1
160
+ <#accepting> hereinabove, and the Licensee hereby acknowledges that it
161
+ has read and understood it.
162
+
163
+
164
+ Article 4 - EFFECTIVE DATE AND TERM
165
+
166
+
167
+ 4.1 EFFECTIVE DATE
168
+
169
+ The Agreement shall become effective on the date when it is accepted by
170
+ the Licensee as set forth in Article 3.1 <#accepting>.
171
+
172
+
173
+ 4.2 TERM
174
+
175
+ The Agreement shall remain in force for the entire legal term of
176
+ protection of the economic rights over the Software.
177
+
178
+
179
+ Article 5 - SCOPE OF RIGHTS GRANTED
180
+
181
+ The Licensor hereby grants to the Licensee, who accepts, the following
182
+ rights over the Software for any or all use, and for the term of the
183
+ Agreement, on the basis of the terms and conditions set forth hereinafter.
184
+
185
+ Besides, if the Licensor owns or comes to own one or more patents
186
+ protecting all or part of the functions of the Software or of its
187
+ components, the Licensor undertakes not to enforce the rights granted by
188
+ these patents against successive Licensees using, exploiting or
189
+ modifying the Software. If these patents are transferred, the Licensor
190
+ undertakes to have the transferees subscribe to the obligations set
191
+ forth in this paragraph.
192
+
193
+
194
+ 5.1 RIGHT OF USE
195
+
196
+ The Licensee is authorized to use the Software, without any limitation
197
+ as to its fields of application, with it being hereinafter specified
198
+ that this comprises:
199
+
200
+ 1. permanent or temporary reproduction of all or part of the Software
201
+ by any or all means and in any or all form.
202
+
203
+ 2. loading, displaying, running, or storing the Software on any or all
204
+ medium.
205
+
206
+ 3. entitlement to observe, study or test its operation so as to
207
+ determine the ideas and principles behind any or all constituent
208
+ elements of said Software. This shall apply when the Licensee
209
+ carries out any or all loading, displaying, running, transmission or
210
+ storage operation as regards the Software, that it is entitled to
211
+ carry out hereunder.
212
+
213
+
214
+ 5.2 ENTITLEMENT TO MAKE CONTRIBUTIONS
215
+
216
+ The right to make Contributions includes the right to translate, adapt,
217
+ arrange, or make any or all modifications to the Software, and the right
218
+ to reproduce the resulting software.
219
+
220
+ The Licensee is authorized to make any or all Contributions to the
221
+ Software provided that it includes an explicit notice that it is the
222
+ author of said Contribution and indicates the date of the creation thereof.
223
+
224
+
225
+ 5.3 RIGHT OF DISTRIBUTION
226
+
227
+ In particular, the right of distribution includes the right to publish,
228
+ transmit and communicate the Software to the general public on any or
229
+ all medium, and by any or all means, and the right to market, either in
230
+ consideration of a fee, or free of charge, one or more copies of the
231
+ Software by any means.
232
+
233
+ The Licensee is further authorized to distribute copies of the modified
234
+ or unmodified Software to third parties according to the terms and
235
+ conditions set forth hereinafter.
236
+
237
+
238
+ 5.3.1 DISTRIBUTION OF SOFTWARE WITHOUT MODIFICATION
239
+
240
+ The Licensee is authorized to distribute true copies of the Software in
241
+ Source Code or Object Code form, provided that said distribution
242
+ complies with all the provisions of the Agreement and is accompanied by:
243
+
244
+ 1. a copy of the Agreement,
245
+
246
+ 2. a notice relating to the limitation of both the Licensor's warranty
247
+ and liability as set forth in Articles 8 and 9,
248
+
249
+ and that, in the event that only the Object Code of the Software is
250
+ redistributed, the Licensee allows effective access to the full Source
251
+ Code of the Software for a period of at least three years from the
252
+ distribution of the Software, it being understood that the additional
253
+ acquisition cost of the Source Code shall not exceed the cost of the
254
+ data transfer.
255
+
256
+
257
+ 5.3.2 DISTRIBUTION OF MODIFIED SOFTWARE
258
+
259
+ When the Licensee makes a Contribution to the Software, the terms and
260
+ conditions for the distribution of the resulting Modified Software
261
+ become subject to all the provisions of this Agreement.
262
+
263
+ The Licensee is authorized to distribute the Modified Software, in
264
+ source code or object code form, provided that said distribution
265
+ complies with all the provisions of the Agreement and is accompanied by:
266
+
267
+ 1. a copy of the Agreement,
268
+
269
+ 2. a notice relating to the limitation of both the Licensor's warranty
270
+ and liability as set forth in Articles 8 and 9,
271
+
272
+ and, in the event that only the object code of the Modified Software is
273
+ redistributed,
274
+
275
+ 3. a note stating the conditions of effective access to the full source
276
+ code of the Modified Software for a period of at least three years
277
+ from the distribution of the Modified Software, it being understood
278
+ that the additional acquisition cost of the source code shall not
279
+ exceed the cost of the data transfer.
280
+
281
+
282
+ 5.3.3 DISTRIBUTION OF EXTERNAL MODULES
283
+
284
+ When the Licensee has developed an External Module, the terms and
285
+ conditions of this Agreement do not apply to said External Module, that
286
+ may be distributed under a separate license agreement.
287
+
288
+
289
+ 5.3.4 COMPATIBILITY WITH OTHER LICENSES
290
+
291
+ The Licensee can include a code that is subject to the provisions of one
292
+ of the versions of the GNU GPL, GNU Affero GPL and/or EUPL in the
293
+ Modified or unmodified Software, and distribute that entire code under
294
+ the terms of the same version of the GNU GPL, GNU Affero GPL and/or EUPL.
295
+
296
+ The Licensee can include the Modified or unmodified Software in a code
297
+ that is subject to the provisions of one of the versions of the GNU GPL,
298
+ GNU Affero GPL and/or EUPL and distribute that entire code under the
299
+ terms of the same version of the GNU GPL, GNU Affero GPL and/or EUPL.
300
+
301
+
302
+ Article 6 - INTELLECTUAL PROPERTY
303
+
304
+
305
+ 6.1 OVER THE INITIAL SOFTWARE
306
+
307
+ The Holder owns the economic rights over the Initial Software. Any or
308
+ all use of the Initial Software is subject to compliance with the terms
309
+ and conditions under which the Holder has elected to distribute its work
310
+ and no one shall be entitled to modify the terms and conditions for the
311
+ distribution of said Initial Software.
312
+
313
+ The Holder undertakes that the Initial Software will remain ruled at
314
+ least by this Agreement, for the duration set forth in Article 4.2 <#term>.
315
+
316
+
317
+ 6.2 OVER THE CONTRIBUTIONS
318
+
319
+ The Licensee who develops a Contribution is the owner of the
320
+ intellectual property rights over this Contribution as defined by
321
+ applicable law.
322
+
323
+
324
+ 6.3 OVER THE EXTERNAL MODULES
325
+
326
+ The Licensee who develops an External Module is the owner of the
327
+ intellectual property rights over this External Module as defined by
328
+ applicable law and is free to choose the type of agreement that shall
329
+ govern its distribution.
330
+
331
+
332
+ 6.4 JOINT PROVISIONS
333
+
334
+ The Licensee expressly undertakes:
335
+
336
+ 1. not to remove, or modify, in any manner, the intellectual property
337
+ notices attached to the Software;
338
+
339
+ 2. to reproduce said notices, in an identical manner, in the copies of
340
+ the Software modified or not.
341
+
342
+ The Licensee undertakes not to directly or indirectly infringe the
343
+ intellectual property rights on the Software of the Holder and/or
344
+ Contributors, and to take, where applicable, vis-�-vis its staff, any
345
+ and all measures required to ensure respect of said intellectual
346
+ property rights of the Holder and/or Contributors.
347
+
348
+
349
+ Article 7 - RELATED SERVICES
350
+
351
+ 7.1 Under no circumstances shall the Agreement oblige the Licensor to
352
+ provide technical assistance or maintenance services for the Software.
353
+
354
+ However, the Licensor is entitled to offer this type of services. The
355
+ terms and conditions of such technical assistance, and/or such
356
+ maintenance, shall be set forth in a separate instrument. Only the
357
+ Licensor offering said maintenance and/or technical assistance services
358
+ shall incur liability therefor.
359
+
360
+ 7.2 Similarly, any Licensor is entitled to offer to its licensees, under
361
+ its sole responsibility, a warranty, that shall only be binding upon
362
+ itself, for the redistribution of the Software and/or the Modified
363
+ Software, under terms and conditions that it is free to decide. Said
364
+ warranty, and the financial terms and conditions of its application,
365
+ shall be subject of a separate instrument executed between the Licensor
366
+ and the Licensee.
367
+
368
+
369
+ Article 8 - LIABILITY
370
+
371
+ 8.1 Subject to the provisions of Article 8.2, the Licensee shall be
372
+ entitled to claim compensation for any direct loss it may have suffered
373
+ from the Software as a result of a fault on the part of the relevant
374
+ Licensor, subject to providing evidence thereof.
375
+
376
+ 8.2 The Licensor's liability is limited to the commitments made under
377
+ this Agreement and shall not be incurred as a result of in particular:
378
+ (i) loss due the Licensee's total or partial failure to fulfill its
379
+ obligations, (ii) direct or consequential loss that is suffered by the
380
+ Licensee due to the use or performance of the Software, and (iii) more
381
+ generally, any consequential loss. In particular the Parties expressly
382
+ agree that any or all pecuniary or business loss (i.e. loss of data,
383
+ loss of profits, operating loss, loss of customers or orders,
384
+ opportunity cost, any disturbance to business activities) or any or all
385
+ legal proceedings instituted against the Licensee by a third party,
386
+ shall constitute consequential loss and shall not provide entitlement to
387
+ any or all compensation from the Licensor.
388
+
389
+
390
+ Article 9 - WARRANTY
391
+
392
+ 9.1 The Licensee acknowledges that the scientific and technical
393
+ state-of-the-art when the Software was distributed did not enable all
394
+ possible uses to be tested and verified, nor for the presence of
395
+ possible defects to be detected. In this respect, the Licensee's
396
+ attention has been drawn to the risks associated with loading, using,
397
+ modifying and/or developing and reproducing the Software which are
398
+ reserved for experienced users.
399
+
400
+ The Licensee shall be responsible for verifying, by any or all means,
401
+ the suitability of the product for its requirements, its good working
402
+ order, and for ensuring that it shall not cause damage to either persons
403
+ or properties.
404
+
405
+ 9.2 The Licensor hereby represents, in good faith, that it is entitled
406
+ to grant all the rights over the Software (including in particular the
407
+ rights set forth in Article 5 <#scope>).
408
+
409
+ 9.3 The Licensee acknowledges that the Software is supplied "as is" by
410
+ the Licensor without any other express or tacit warranty, other than
411
+ that provided for in Article 9.2 <#good-faith> and, in particular,
412
+ without any warranty as to its commercial value, its secured, safe,
413
+ innovative or relevant nature.
414
+
415
+ Specifically, the Licensor does not warrant that the Software is free
416
+ from any error, that it will operate without interruption, that it will
417
+ be compatible with the Licensee's own equipment and software
418
+ configuration, nor that it will meet the Licensee's requirements.
419
+
420
+ 9.4 The Licensor does not either expressly or tacitly warrant that the
421
+ Software does not infringe any third party intellectual property right
422
+ relating to a patent, software or any other property right. Therefore,
423
+ the Licensor disclaims any and all liability towards the Licensee
424
+ arising out of any or all proceedings for infringement that may be
425
+ instituted in respect of the use, modification and redistribution of the
426
+ Software. Nevertheless, should such proceedings be instituted against
427
+ the Licensee, the Licensor shall provide it with technical and legal
428
+ expertise for its defense. Such technical and legal expertise shall be
429
+ decided on a case-by-case basis between the relevant Licensor and the
430
+ Licensee pursuant to a memorandum of understanding. The Licensor
431
+ disclaims any and all liability as regards the Licensee's use of the
432
+ name of the Software. No warranty is given as regards the existence of
433
+ prior rights over the name of the Software or as regards the existence
434
+ of a trademark.
435
+
436
+
437
+ Article 10 - TERMINATION
438
+
439
+ 10.1 In the event of a breach by the Licensee of its obligations
440
+ hereunder, the Licensor may automatically terminate this Agreement
441
+ thirty (30) days after notice has been sent to the Licensee and has
442
+ remained ineffective.
443
+
444
+ 10.2 A Licensee whose Agreement is terminated shall no longer be
445
+ authorized to use, modify or distribute the Software. However, any
446
+ licenses that it may have granted prior to termination of the Agreement
447
+ shall remain valid subject to their having been granted in compliance
448
+ with the terms and conditions hereof.
449
+
450
+
451
+ Article 11 - MISCELLANEOUS
452
+
453
+
454
+ 11.1 EXCUSABLE EVENTS
455
+
456
+ Neither Party shall be liable for any or all delay, or failure to
457
+ perform the Agreement, that may be attributable to an event of force
458
+ majeure, an act of God or an outside cause, such as defective
459
+ functioning or interruptions of the electricity or telecommunications
460
+ networks, network paralysis following a virus attack, intervention by
461
+ government authorities, natural disasters, water damage, earthquakes,
462
+ fire, explosions, strikes and labor unrest, war, etc.
463
+
464
+ 11.2 Any failure by either Party, on one or more occasions, to invoke
465
+ one or more of the provisions hereof, shall under no circumstances be
466
+ interpreted as being a waiver by the interested Party of its right to
467
+ invoke said provision(s) subsequently.
468
+
469
+ 11.3 The Agreement cancels and replaces any or all previous agreements,
470
+ whether written or oral, between the Parties and having the same
471
+ purpose, and constitutes the entirety of the agreement between said
472
+ Parties concerning said purpose. No supplement or modification to the
473
+ terms and conditions hereof shall be effective as between the Parties
474
+ unless it is made in writing and signed by their duly authorized
475
+ representatives.
476
+
477
+ 11.4 In the event that one or more of the provisions hereof were to
478
+ conflict with a current or future applicable act or legislative text,
479
+ said act or legislative text shall prevail, and the Parties shall make
480
+ the necessary amendments so as to comply with said act or legislative
481
+ text. All other provisions shall remain effective. Similarly, invalidity
482
+ of a provision of the Agreement, for any reason whatsoever, shall not
483
+ cause the Agreement as a whole to be invalid.
484
+
485
+
486
+ 11.5 LANGUAGE
487
+
488
+ The Agreement is drafted in both French and English and both versions
489
+ are deemed authentic.
490
+
491
+
492
+ Article 12 - NEW VERSIONS OF THE AGREEMENT
493
+
494
+ 12.1 Any person is authorized to duplicate and distribute copies of this
495
+ Agreement.
496
+
497
+ 12.2 So as to ensure coherence, the wording of this Agreement is
498
+ protected and may only be modified by the authors of the License, who
499
+ reserve the right to periodically publish updates or new versions of the
500
+ Agreement, each with a separate number. These subsequent versions may
501
+ address new issues encountered by Free Software.
502
+
503
+ 12.3 Any Software distributed under a given version of the Agreement may
504
+ only be subsequently distributed under the same version of the Agreement
505
+ or a subsequent version, subject to the provisions of Article 5.3.4
506
+ <#compatibility>.
507
+
508
+
509
+ Article 13 - GOVERNING LAW AND JURISDICTION
510
+
511
+ 13.1 The Agreement is governed by French law. The Parties agree to
512
+ endeavor to seek an amicable solution to any disagreements or disputes
513
+ that may arise during the performance of the Agreement.
514
+
515
+ 13.2 Failing an amicable solution within two (2) months as from their
516
+ occurrence, and unless emergency proceedings are necessary, the
517
+ disagreements or disputes shall be referred to the Paris Courts having
518
+ jurisdiction, by the more diligent Party.
LLMatic-main/map_elites/__init__.py ADDED
File without changes
LLMatic-main/map_elites/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (197 Bytes). View file
 
LLMatic-main/map_elites/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (185 Bytes). View file
 
LLMatic-main/map_elites/__pycache__/__init__.cpython-39.pyc ADDED
Binary file (179 Bytes). View file
 
LLMatic-main/map_elites/__pycache__/common.cpython-311.pyc ADDED
Binary file (10 kB). View file
 
LLMatic-main/map_elites/__pycache__/common.cpython-312.pyc ADDED
Binary file (8.91 kB). View file
 
LLMatic-main/map_elites/__pycache__/common.cpython-39.pyc ADDED
Binary file (5.21 kB). View file
 
LLMatic-main/map_elites/common.py ADDED
@@ -0,0 +1,231 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #! /usr/bin/env python
2
+ #| This file is a part of the pymap_elites framework.
3
+ #| Copyright 2019, INRIA
4
+ #| Main contributor(s):
5
+ #| Jean-Baptiste Mouret, jean-baptiste.mouret@inria.fr
6
+ #| Eloise Dalin , eloise.dalin@inria.fr
7
+ #| Pierre Desreumaux , pierre.desreumaux@inria.fr
8
+ #|
9
+ #|
10
+ #| **Main paper**: Mouret JB, Clune J. Illuminating search spaces by
11
+ #| mapping elites. arXiv preprint arXiv:1504.04909. 2015 Apr 20.
12
+ #|
13
+ #| This software is governed by the CeCILL license under French law
14
+ #| and abiding by the rules of distribution of free software. You
15
+ #| can use, modify and/ or redistribute the software under the terms
16
+ #| of the CeCILL license as circulated by CEA, CNRS and INRIA at the
17
+ #| following URL "http://www.cecill.info".
18
+ #|
19
+ #| As a counterpart to the access to the source code and rights to
20
+ #| copy, modify and redistribute granted by the license, users are
21
+ #| provided only with a limited warranty and the software's author,
22
+ #| the holder of the economic rights, and the successive licensors
23
+ #| have only limited liability.
24
+ #|
25
+ #| In this respect, the user's attention is drawn to the risks
26
+ #| associated with loading, using, modifying and/or developing or
27
+ #| reproducing the software by the user in light of its specific
28
+ #| status of free software, that may mean that it is complicated to
29
+ #| manipulate, and that also therefore means that it is reserved for
30
+ #| developers and experienced professionals having in-depth computer
31
+ #| knowledge. Users are therefore encouraged to load and test the
32
+ #| software's suitability as regards their requirements in conditions
33
+ #| enabling the security of their systems and/or data to be ensured
34
+ #| and, more generally, to use and operate it in the same conditions
35
+ #| as regards security.
36
+ #|
37
+ #| The fact that you are presently reading this means that you have
38
+ #| had knowledge of the CeCILL license and that you accept its terms.
39
+ #
40
+
41
+ import math
42
+ import numpy as np
43
+ import multiprocessing
44
+ from pathlib import Path
45
+ import sys
46
+ import random
47
+ from collections import defaultdict
48
+ from sklearn.cluster import KMeans
49
+
50
+ default_params = \
51
+ {
52
+ # more of this -> higher-quality CVT
53
+ "cvt_samples": 25000,
54
+ # we evaluate in batches to paralleliez
55
+ "batch_size": 100,
56
+ # proportion of niches to be filled before starting
57
+ "random_init": 0.1,
58
+ # batch for random initialization
59
+ "random_init_batch": 100,
60
+ # when to write results (one generation = one batch)
61
+ "dump_period": 10000,
62
+ # do we use several cores?
63
+ "parallel": True,
64
+ # do we cache the result of CVT and reuse?
65
+ "cvt_use_cache": True,
66
+ # min/max of parameters
67
+ "min": 0,
68
+ "max": 1,
69
+ # only useful if you use the 'iso_dd' variation operator
70
+ "iso_sigma": 0.01,
71
+ "line_sigma": 0.2
72
+ }
73
+
74
+ class Species:
75
+ def __init__(self, x, desc, fitness, net_path, centroid=None):
76
+ self.x = x
77
+ self.desc = desc
78
+ self.fitness = fitness
79
+ self.centroid = centroid
80
+ self.curiosity = 0
81
+ self.net_path = net_path
82
+
83
+
84
+ def polynomial_mutation(x):
85
+ '''
86
+ Cf Deb 2001, p 124 ; param: eta_m
87
+ '''
88
+ y = x.copy()
89
+ eta_m = 5.0;
90
+ r = np.random.random(size=len(x))
91
+ for i in range(0, len(x)):
92
+ if r[i] < 0.5:
93
+ delta_i = math.pow(2.0 * r[i], 1.0 / (eta_m + 1.0)) - 1.0
94
+ else:
95
+ delta_i = 1 - math.pow(2.0 * (1.0 - r[i]), 1.0 / (eta_m + 1.0))
96
+ y[i] += delta_i
97
+ return y
98
+
99
+ def sbx(x, y, params):
100
+ '''
101
+ SBX (cf Deb 2001, p 113) Simulated Binary Crossover
102
+
103
+ A large value ef eta gives a higher probablitity for
104
+ creating a `near-parent' solutions and a small value allows
105
+ distant solutions to be selected as offspring.
106
+ '''
107
+ eta = 10.0
108
+ xl = params['min']
109
+ xu = params['max']
110
+ z = x.copy()
111
+ r1 = np.random.random(size=len(x))
112
+ r2 = np.random.random(size=len(x))
113
+
114
+ for i in range(0, len(x)):
115
+ if abs(x[i] - y[i]) > 1e-15:
116
+ x1 = min(x[i], y[i])
117
+ x2 = max(x[i], y[i])
118
+
119
+ beta = 1.0 + (2.0 * (x1 - xl) / (x2 - x1))
120
+ alpha = 2.0 - beta ** -(eta + 1)
121
+ rand = r1[i]
122
+ if rand <= 1.0 / alpha:
123
+ beta_q = (rand * alpha) ** (1.0 / (eta + 1))
124
+ else:
125
+ beta_q = (1.0 / (2.0 - rand * alpha)) ** (1.0 / (eta + 1))
126
+
127
+ c1 = 0.5 * (x1 + x2 - beta_q * (x2 - x1))
128
+
129
+ beta = 1.0 + (2.0 * (xu - x2) / (x2 - x1))
130
+ alpha = 2.0 - beta ** -(eta + 1)
131
+ if rand <= 1.0 / alpha:
132
+ beta_q = (rand * alpha) ** (1.0 / (eta + 1))
133
+ else:
134
+ beta_q = (1.0 / (2.0 - rand * alpha)) ** (1.0 / (eta + 1))
135
+ c2 = 0.5 * (x1 + x2 + beta_q * (x2 - x1))
136
+
137
+ c1 = min(max(c1, xl), xu)
138
+ c2 = min(max(c2, xl), xu)
139
+
140
+ if r2[i] <= 0.5:
141
+ z[i] = c2
142
+ else:
143
+ z[i] = c1
144
+ return z
145
+
146
+
147
+ def iso_dd(x, y, params):
148
+ '''
149
+ Iso+Line
150
+ Ref:
151
+ Vassiliades V, Mouret JB. Discovering the elite hypervolume by leveraging interspecies correlation.
152
+ GECCO 2018
153
+ '''
154
+ assert(x.shape == y.shape)
155
+ p_max = np.array(params["max"])
156
+ p_min = np.array(params["min"])
157
+ a = np.random.normal(0, params['iso_sigma'], size=len(x))
158
+ b = np.random.normal(0, params['line_sigma'])
159
+ norm = np.linalg.norm(x - y)
160
+ z = x.copy() + a + b * (x - y)
161
+ return np.clip(z, p_min, p_max)
162
+
163
+
164
+ def variation(x, z, params):
165
+ assert(x.shape == z.shape)
166
+ y = sbx(x, z, params)
167
+ return y
168
+
169
+ def __centroids_filename(k, dim):
170
+ return 'centroids_' + str(k) + '_' + str(dim) + '.dat'
171
+
172
+
173
+ def __write_centroids(centroids):
174
+ k = centroids.shape[0]
175
+ dim = centroids.shape[1]
176
+ filename = __centroids_filename(k, dim)
177
+ with open(filename, 'w') as f:
178
+ for p in centroids:
179
+ for item in p:
180
+ f.write(str(item) + ' ')
181
+ f.write('\n')
182
+
183
+
184
+ def cvt(k, dim, samples, cvt_use_cache=True):
185
+ # check if we have cached values
186
+ fname = __centroids_filename(k, dim)
187
+ if cvt_use_cache:
188
+ if Path(fname).is_file():
189
+ print("WARNING: using cached CVT:", fname)
190
+ return np.loadtxt(fname)
191
+ # otherwise, compute cvt
192
+ print("Computing CVT (this can take a while...):", fname)
193
+
194
+ x = np.random.rand(samples, dim)
195
+ k_means = KMeans(init='k-means++', n_clusters=k,
196
+ n_init=1, verbose=1)#,algorithm="full")
197
+ k_means.fit(x)
198
+ __write_centroids(k_means.cluster_centers_)
199
+
200
+ return k_means.cluster_centers_
201
+
202
+
203
+ def make_hashable(array):
204
+ return tuple(map(float, array))
205
+
206
+
207
+ def parallel_eval(evaluate_function, to_evaluate, pool, params):
208
+ if params['parallel'] == True:
209
+ s_list = pool.map(evaluate_function, to_evaluate)
210
+ else:
211
+ s_list = map(evaluate_function, to_evaluate)
212
+ return list(s_list)
213
+
214
+ # format: fitness, centroid, desc, genome \n
215
+ # fitness, centroid, desc and x are vectors
216
+ def __save_archive(archive, gen,name="net"):
217
+ def write_array(a, f):
218
+ for i in a:
219
+ f.write(str(i) + " ")
220
+ filename = name+'archive_' + str(gen) + '.dat'
221
+ with open(filename, 'w') as f:
222
+ for k in archive.values():
223
+ f.write(k.net_path + " ")
224
+ f.write(str(k.fitness) + ' ')
225
+ write_array(k.centroid, f)
226
+ f.write(str(k.curiosity) + " ")
227
+ f.write(str(k.desc[0]) + " ")
228
+ f.write(str(k.desc[1]) + " ")
229
+
230
+ #write_array(k.x, f)
231
+ f.write("\n")
LLMatic-main/map_elites/cvt.py ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #! /usr/bin/env python
2
+ #| This file is a part of the pymap_elites framework.
3
+ #| Copyright 2019, INRIA
4
+ #| Main contributor(s):
5
+ #| Jean-Baptiste Mouret, jean-baptiste.mouret@inria.fr
6
+ #| Eloise Dalin , eloise.dalin@inria.fr
7
+ #| Pierre Desreumaux , pierre.desreumaux@inria.fr
8
+ #|
9
+ #|
10
+ #| **Main paper**: Mouret JB, Clune J. Illuminating search spaces by
11
+ #| mapping elites. arXiv preprint arXiv:1504.04909. 2015 Apr 20.
12
+ #|
13
+ #| This software is governed by the CeCILL license under French law
14
+ #| and abiding by the rules of distribution of free software. You
15
+ #| can use, modify and/ or redistribute the software under the terms
16
+ #| of the CeCILL license as circulated by CEA, CNRS and INRIA at the
17
+ #| following URL "http://www.cecill.info".
18
+ #|
19
+ #| As a counterpart to the access to the source code and rights to
20
+ #| copy, modify and redistribute granted by the license, users are
21
+ #| provided only with a limited warranty and the software's author,
22
+ #| the holder of the economic rights, and the successive licensors
23
+ #| have only limited liability.
24
+ #|
25
+ #| In this respect, the user's attention is drawn to the risks
26
+ #| associated with loading, using, modifying and/or developing or
27
+ #| reproducing the software by the user in light of its specific
28
+ #| status of free software, that may mean that it is complicated to
29
+ #| manipulate, and that also therefore means that it is reserved for
30
+ #| developers and experienced professionals having in-depth computer
31
+ #| knowledge. Users are therefore encouraged to load and test the
32
+ #| software's suitability as regards their requirements in conditions
33
+ #| enabling the security of their systems and/or data to be ensured
34
+ #| and, more generally, to use and operate it in the same conditions
35
+ #| as regards security.
36
+ #|
37
+ #| The fact that you are presently reading this means that you have
38
+ #| had knowledge of the CeCILL license and that you accept its terms.
39
+
40
+ import math
41
+ import numpy as np
42
+ import multiprocessing
43
+
44
+ # from scipy.spatial import cKDTree : TODO -- faster?
45
+ from sklearn.neighbors import KDTree
46
+
47
+ from map_elites import common as cm
48
+
49
+
50
+
51
+ def __add_to_archive(s, centroid, archive, kdt):
52
+ niche_index = kdt.query([centroid], k=1)[1][0][0]
53
+ niche = kdt.data[niche_index]
54
+ n = cm.make_hashable(niche)
55
+ s.centroid = n
56
+ if n in archive:
57
+ if s.fitness > archive[n].fitness:
58
+ archive[n] = s
59
+ return 1
60
+ return 0
61
+ else:
62
+ archive[n] = s
63
+ return 1
64
+
65
+
66
+ # evaluate a single vector (x) with a function f and return a species
67
+ # t = vector, function
68
+ def __evaluate(t):
69
+ z, f = t # evaluate z with function f
70
+ fit, desc = f(z)
71
+ return cm.Species(z, desc, fit)
72
+
73
+ # map-elites algorithm (CVT variant)
74
+ def compute(dim_map, dim_x, f,
75
+ n_niches=1000,
76
+ max_evals=1e5,
77
+ params=cm.default_params,
78
+ log_file=None,
79
+ variation_operator=cm.variation):
80
+ """CVT MAP-Elites
81
+ Vassiliades V, Chatzilygeroudis K, Mouret JB. Using centroidal voronoi tessellations to scale up the multidimensional archive of phenotypic elites algorithm. IEEE Transactions on Evolutionary Computation. 2017 Aug 3;22(4):623-30.
82
+
83
+ Format of the logfile: evals archive_size max mean median 5%_percentile, 95%_percentile
84
+
85
+ """
86
+ # setup the parallel processing pool
87
+ num_cores = multiprocessing.cpu_count()
88
+ pool = multiprocessing.Pool(num_cores)
89
+
90
+ # create the CVT
91
+ c = cm.cvt(n_niches, dim_map,
92
+ params['cvt_samples'], params['cvt_use_cache'])
93
+ kdt = KDTree(c, leaf_size=30, metric='euclidean')
94
+ cm.__write_centroids(c)
95
+
96
+ archive = {} # init archive (empty)
97
+ n_evals = 0 # number of evaluations since the beginning
98
+ b_evals = 0 # number evaluation since the last dump
99
+
100
+ # main loop
101
+ while (n_evals < max_evals):
102
+ to_evaluate = []
103
+ # random initialization
104
+ if len(archive) <= params['random_init'] * n_niches:
105
+ for i in range(0, params['random_init_batch']):
106
+ x = np.random.uniform(low=params['min'], high=params['max'], size=dim_x)
107
+ to_evaluate += [(x, f)]
108
+ else: # variation/selection loop
109
+ keys = list(archive.keys())
110
+ # we select all the parents at the same time because randint is slow
111
+ rand1 = np.random.randint(len(keys), size=params['batch_size'])
112
+ rand2 = np.random.randint(len(keys), size=params['batch_size'])
113
+ for n in range(0, params['batch_size']):
114
+ # parent selection
115
+ x = archive[keys[rand1[n]]]
116
+ y = archive[keys[rand2[n]]]
117
+ # copy & add variation
118
+ z = variation_operator(x.x, y.x, params)
119
+ to_evaluate += [(z, f)]
120
+ # evaluation of the fitness for to_evaluate
121
+ s_list = cm.parallel_eval(__evaluate, to_evaluate, pool, params)
122
+ # natural selection
123
+ for s in s_list:
124
+ __add_to_archive(s, s.desc, archive, kdt)
125
+ # count evals
126
+ n_evals += len(to_evaluate)
127
+ b_evals += len(to_evaluate)
128
+
129
+ # write archive
130
+ if b_evals >= params['dump_period'] and params['dump_period'] != -1:
131
+ print("[{}/{}]".format(n_evals, int(max_evals)), end=" ", flush=True)
132
+ cm.__save_archive(archive, n_evals)
133
+ b_evals = 0
134
+ # write log
135
+ if log_file != None:
136
+ fit_list = np.array([x.fitness for x in archive.values()])
137
+ log_file.write("{} {} {} {} {} {} {}\n".format(n_evals, len(archive.keys()),
138
+ fit_list.max(), np.mean(fit_list), np.median(fit_list),
139
+ np.percentile(fit_list, 5), np.percentile(fit_list, 95)))
140
+ log_file.flush()
141
+ cm.__save_archive(archive, n_evals)
142
+ return archive
LLMatic-main/map_elites/multitask.py ADDED
@@ -0,0 +1,242 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #! /usr/bin/env python
2
+ #| This file is a part of the pymap_elites framework.
3
+ #| Copyright 2019, INRIA
4
+ #| Main contributor(s):
5
+ #| Jean-Baptiste Mouret, jean-baptiste.mouret@inria.fr
6
+ #| Eloise Dalin , eloise.dalin@inria.fr
7
+ #| Pierre Desreumaux , pierre.desreumaux@inria.fr
8
+ #|
9
+ #|
10
+ #| **Main paper**: Mouret JB, Clune J. Illuminating search spaces by
11
+ #| mapping elites. arXiv preprint arXiv:1504.04909. 2015 Apr 20.
12
+ #|
13
+ #| This software is governed by the CeCILL license under French law
14
+ #| and abiding by the rules of distribution of free software. You
15
+ #| can use, modify and/ or redistribute the software under the terms
16
+ #| of the CeCILL license as circulated by CEA, CNRS and INRIA at the
17
+ #| following URL "http://www.cecill.info".
18
+ #|
19
+ #| As a counterpart to the access to the source code and rights to
20
+ #| copy, modify and redistribute granted by the license, users are
21
+ #| provided only with a limited warranty and the software's author,
22
+ #| the holder of the economic rights, and the successive licensors
23
+ #| have only limited liability.
24
+ #|
25
+ #| In this respect, the user's attention is drawn to the risks
26
+ #| associated with loading, using, modifying and/or developing or
27
+ #| reproducing the software by the user in light of its specific
28
+ #| status of free software, that may mean that it is complicated to
29
+ #| manipulate, and that also therefore means that it is reserved for
30
+ #| developers and experienced professionals having in-depth computer
31
+ #| knowledge. Users are therefore encouraged to load and test the
32
+ #| software's suitability as regards their requirements in conditions
33
+ #| enabling the security of their systems and/or data to be ensured
34
+ #| and, more generally, to use and operate it in the same conditions
35
+ #| as regards security.
36
+ #|
37
+ #| The fact that you are presently reading this means that you have
38
+ #| had knowledge of the CeCILL license and that you accept its terms.
39
+ #
40
+ # from scipy.spatial import cKDTree : TODO
41
+
42
+ import math
43
+ import numpy as np
44
+ import multiprocessing
45
+ from pathlib import Path
46
+ import sys
47
+ import random
48
+ from collections import defaultdict
49
+ from sklearn.neighbors import KDTree
50
+ from scipy.spatial import distance
51
+
52
+ from map_elites import common as cm
53
+
54
+
55
+ def add_to_archive(s, archive):
56
+ centroid = cm.make_hashable(s.centroid)
57
+ if centroid in archive:
58
+ if s.fitness > archive[centroid].fitness:
59
+ archive[centroid] = s
60
+ return 1
61
+ return 0
62
+ else:
63
+ archive[centroid] = s
64
+ return 1
65
+
66
+ # evaluate a single vector (z) with a function f and return a species
67
+ # t = vector, function
68
+ def __evaluate(t):
69
+ z, f, task, centroid, _ = t
70
+ fit = f(z, task)
71
+ return cm.Species(z, task, fit, centroid)
72
+
73
+ # bandit opt for optimizing tournament size
74
+ # probability matching / Adaptive pursuit Thierens GECCO 2005
75
+ # UCB: schoenauer / Sebag
76
+ # TODO : params for values, and params for window
77
+ def bandit(successes, n_niches):
78
+ n = 0
79
+ for v in successes.values():
80
+ n += len(v)
81
+ v = [1, 10, 50, 100, 500]#, 1000]
82
+ if len(successes.keys()) < len(v):
83
+ return random.choice(v)
84
+ ucb = []
85
+ for k in v:
86
+ x = [i[0] for i in successes[k]]
87
+ mean = sum(x) / float(len(x)) # 100 = batch size??
88
+ n_a = len(x)
89
+ ucb += [mean + math.sqrt(2 * math.log(n) / n_a)]
90
+ a = np.argmax(ucb)
91
+ t_size = v[a]
92
+ return t_size
93
+
94
+ # select the niche according to
95
+ def select_niche(x, z, f, centroids, tasks, t_size, params, use_distance=False):
96
+ to_evaluate = []
97
+ if not use_distance:
98
+ # No distance: evaluate on a random niche
99
+ niche = np.random.randint(len(tasks))
100
+ to_evaluate += [(z, f, tasks[niche], centroids[niche, :], params)]
101
+ else:
102
+ # we select the parent (a single one), then we select the niche
103
+ # with a tournament based on the task distance
104
+ # the size of the tournament depends on the bandit algorithm
105
+ niches_centroids = []
106
+ niches_tasks = [] # TODO : use a kd-tree
107
+ rand = np.random.randint(centroids.shape[0], size=t_size)
108
+ for p in range(0, t_size):
109
+ n = rand[p]
110
+ niches_centroids += [centroids[n, :]]
111
+ niches_tasks += [tasks[n]]
112
+ cd = distance.cdist(niches_centroids, [x.centroid], 'euclidean')
113
+ cd_min = np.argmin(cd)
114
+ to_evaluate += [(z, f, niches_tasks[cd_min], niches_centroids[cd_min], params)]
115
+ return to_evaluate
116
+
117
+
118
+ def compute(dim_map=-1,
119
+ dim_x=-1,
120
+ f=None,
121
+ max_evals=1e5,
122
+ centroids=[],
123
+ tasks=[],
124
+ variation_operator=cm.variation,
125
+ params=cm.default_params,
126
+ log_file=None):
127
+ """Multi-task MAP-Elites
128
+ - if there is no centroid : random assignation of niches
129
+ - if there is no task: use the centroids as tasks
130
+ - if there is a centroid list: use the centroids to compute distances
131
+ when using the distance, use the bandit to select the tournament size (cf paper):
132
+
133
+ Format of the logfile: evals archive_size max mean 5%_percentile, 95%_percentile
134
+
135
+ Reference:
136
+ Mouret and Maguire (2020). Quality Diversity for Multitask Optimization
137
+ Proceedings of ACM GECCO.
138
+ """
139
+ print(params)
140
+ assert(f != None)
141
+ assert(dim_x != -1)
142
+ # handle the arguments
143
+ use_distance = False
144
+ if tasks != [] and centroids != []:
145
+ use_distance = True
146
+ elif tasks == [] and centroids != []:
147
+ # if no task, we use the centroids as tasks
148
+ tasks = centroids
149
+ use_distance = True
150
+ elif tasks != [] and centroids == []:
151
+ # if no centroid, we create indices so that we can index the archive by centroid
152
+ centroids = np.arange(0, len(tasks)).reshape(len(tasks), 1)
153
+ use_distance = False
154
+ else:
155
+ raise ValueError('Multi-task MAP-Elites: you need to specify a list of task, a list of centroids, or both')
156
+ print("Multitask-MAP-Elites:: using distance =>", use_distance)
157
+
158
+ assert(len(tasks) == len(centroids))
159
+ n_tasks = len(tasks)
160
+
161
+ # init archive (empty)
162
+ archive = {}
163
+
164
+ init_count = 0
165
+
166
+ # init multiprocessing
167
+ num_cores = multiprocessing.cpu_count()
168
+ pool = multiprocessing.Pool(num_cores)
169
+
170
+ # main loop
171
+ n_evals = 0 # number of evaluations
172
+ b_evals = 0 # number evaluation since the last dump
173
+ t_size = 1 # size of the tournament (if using distance) [will be selected by the bandit]
174
+ successes = defaultdict(list) # count the successes
175
+ while (n_evals < max_evals):
176
+ to_evaluate = []
177
+ to_evaluate_centroid = []
178
+ if len(archive) <= params['random_init'] * n_tasks:
179
+ # initialize the map with random individuals
180
+ for i in range(0, params['random_init_batch']):
181
+ # create a random individual
182
+ x = np.random.uniform(low=params['min'], high=params['max'], size=dim_x)
183
+ # we take a random task
184
+ n = np.random.randint(0, n_tasks)
185
+ to_evaluate += [(x, f, tasks[n], centroids[n], params)]
186
+ s_list = cm.parallel_eval(__evaluate, to_evaluate, pool, params)
187
+ n_evals += len(to_evaluate)
188
+ b_evals += len(to_evaluate)
189
+ for i in range(0, len(list(s_list))):
190
+ add_to_archive(s_list[i], archive)
191
+ else:
192
+ # main variation/selection loop
193
+ keys = list(archive.keys())
194
+ # we do all the randint together because randint is slow
195
+ rand1 = np.random.randint(len(keys), size=params['batch_size'])
196
+ rand2 = np.random.randint(len(keys), size=params['batch_size'])
197
+ for n in range(0, params['batch_size']):
198
+ # parent selection
199
+ x = archive[keys[rand1[n]]]
200
+ y = archive[keys[rand2[n]]]
201
+ # copy & add variation
202
+ z = variation_operator(x.x, y.x, params)
203
+ # different modes for multi-task (to select the niche)
204
+ to_evaluate += select_niche(x, z, f, centroids, tasks, t_size, params, use_distance)
205
+ # parallel evaluation of the fitness
206
+ s_list = cm.parallel_eval(__evaluate, to_evaluate, pool, params)
207
+ n_evals += len(to_evaluate)
208
+ b_evals += len(to_evaluate)
209
+ # natural selection
210
+ suc = 0
211
+ for i in range(0, len(list(s_list))):
212
+ suc += add_to_archive(s_list[i], archive)
213
+ if use_distance:
214
+ successes[t_size] += [(suc, n_evals)]
215
+ if use_distance: # call the bandit to optimize t_size
216
+ t_size = bandit(successes, n_tasks)
217
+
218
+ # write archive
219
+ if params['dump_period'] != -1 and b_evals > params['dump_period']:
220
+ cm.__save_archive(archive, n_evals)
221
+ b_evals = 0
222
+ n_e = [len(v) for v in successes.values()]
223
+ print(n_evals, n_e)
224
+ np.savetxt('t_size.dat', np.array(n_e))
225
+ if log_file != None:
226
+ fit_list = np.array([x.fitness for x in archive.values()])
227
+ log_file.write("{} {} {} {} {} {} {}\n".format(n_evals, len(archive.keys()), fit_list.max(), np.mean(fit_list), np.median(fit_list), np.percentile(fit_list, 5), np.percentile(fit_list, 95)))
228
+ log_file.flush()
229
+ cm.__save_archive(archive, n_evals)
230
+ return archive
231
+
232
+
233
+ # a small test
234
+ if __name__ == "__main__":
235
+ def rastrigin(xx):
236
+ x = xx * 10.0 - 5.0
237
+ f = 10 * x.shape[0]
238
+ for i in range(0, x.shape[0]):
239
+ f += x[i] * x[i] - 10 * math.cos(2 * math.pi * x[i])
240
+ return -f, np.array([xx[0], xx[1]])
241
+ # CVT-based version
242
+ my_map = compute(dim_map=2, dim_x = 10, n_niches=1500, f=rastrigin)
LLMatic-main/mutation_models.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM, CodeGenForCausalLM
3
+ #import openai
4
+ import os
5
+ import ray
6
+
7
+ from conf.config import Config
8
+
9
+ cfg = Config()
10
+
11
+
12
+ def codex_mutate(cfg: Config, prompt, model="code-davinci-002",temperature=0.5):
13
+
14
+ os.environ['OPENAI_API_KEY'] = ""# ENETR OPENAI API KEY HERE
15
+ openai.api_key = os.getenv("OPENAI_API_KEY")
16
+ return openai.Completion.create(
17
+ model=model,
18
+ prompt=prompt,
19
+ max_tokens=300,
20
+ temperature=temperature,
21
+ )
22
+
23
+
24
+ #['codegen-350M-multi', 'codegen-2B-multi', 'codegen-6B-multi', 'codegen-16B-multi', 'codegen-350M-mono', 'codegen-2B-mono', 'codegen-6B-mono', 'codegen-16B-mono']
25
+ #@ray.remote(num_cpus=cfg.NUM_CPUS) # FIXME: Number of parallel processes should be configured globally.
26
+ @ray.remote(num_gpus=cfg.NUM_GPUS)
27
+ def codegen_mutate(cfg: Config, prompt, temperature):
28
+ diff = False
29
+
30
+ if diff:
31
+ # model = "/mnt/lustre/users/mnasir/NAS-LLM/diff-codegen-6B"
32
+ model = os.path.join(cfg.SAVE_DIR, "diff-codegen-6B")
33
+ tokenizer = AutoTokenizer.from_pretrained(model)#.to(cfg.DEVICE)
34
+ tokenizer.padding_side = 'left'
35
+ tokenizer.pad_token = tokenizer.eos_token
36
+ model = CodeGenForCausalLM.from_pretrained(model).to(cfg.DEVICE)
37
+ inputs = tokenizer(prompt, return_tensors='pt', padding=True)#.to(device)
38
+ #model.config.use_cache = True
39
+ sample = model.generate(**inputs, temperature=0.5, max_length=len(inputs[0]) + 300)
40
+
41
+ return tokenizer.decode(sample[0][len(inputs[0]):])
42
+ else:
43
+ model = f'Salesforce/{cfg.MUTATION}'
44
+ # model = "/mnt/lustre/users/mnasir/NAS-LLM/codegen-6B"
45
+ #model = "/mnt/lustre/users/mnasir/NAS-LLM/diff-codegen-6B"
46
+ #model = os.path.join(cfg.SAVE_DIR, "codegen-6B")
47
+
48
+ # TODO: Should be doing something like this to download the model automatically.
49
+ # model = os.path.join(cfg.SAVE_DIR, cfg.MUTATION)
50
+ # if not os.path.exists(model):
51
+ # # TODO: `Salesforce` part should not be hardcoded / should be configurable so that we can download models
52
+ # # from other sources.
53
+ # model = f'Salesforce/{cfg.MUTATION}'
54
+
55
+ tokenizer = AutoTokenizer.from_pretrained(model)
56
+ model = AutoModelForCausalLM.from_pretrained(model).to(cfg.DEVICE)
57
+
58
+ # TODO: Above lines may have downloaded a fresh model if it was not already present. Now, copy the model file
59
+ # to the desired location if necessary.
60
+
61
+ inputs = tokenizer(prompt, return_tensors="pt").to(cfg.DEVICE)
62
+ sample = model.generate(**inputs, max_length=350 + len(inputs[0]),temperature=temperature,num_beams=1,do_sample=True)
63
+ return tokenizer.decode(sample[0][len(inputs[0]):], truncate_before_pattern=[r"\n\n^#", "^'''", "\n\n\n"])
64
+
65
+
66
+ def replace_word_mutation(sentence):
67
+ if "Add" in sentence:
68
+ return sentence.replace("Add", "Delete")
69
+ elif "Delete" in sentence:
70
+ return sentence.replace("Delete", "Add")
71
+ else:
72
+ return sentence
73
+
LLMatic-main/outputs/2024-09-03/22-14-50/.hydra/config.yaml ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ RANDOM_INIT_NETS: 10
2
+ INIT_NUM_NETS: 10
3
+ ROLL_OUTS: 4
4
+ NUM_NETS: 10
5
+ START_FROM_CHECKPOINT: false
6
+ RANDOM_NETWORKS: false
7
+ MUTATION: codegen-6B-mono
8
+ GENERATIONS: 200
9
+ NET_TRAINING_EPOCHS: 50
10
+ TEMPERATURE: 0.0
11
+ DEVICE: cuda
12
+ NUM_PROCESSES: 15
13
+ RAY: true
14
+ NUM_CPUS: 15
15
+ NUM_GPUS: 1
16
+ NUM_GPUS_TOTAL: 4
17
+ DIM_MAP: 2
18
+ N_NICHES: 100
19
+ SAVE_DIR: ./
LLMatic-main/outputs/2024-09-03/22-14-50/.hydra/hydra.yaml ADDED
@@ -0,0 +1,182 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: outputs/${now:%Y-%m-%d}/${now:%H-%M-%S}
4
+ sweep:
5
+ dir: multirun/${now:%Y-%m-%d}/${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ submitit_folder: ${hydra.sweep.dir}/.submitit/%j
9
+ timeout_min: 2880
10
+ cpus_per_task: 1
11
+ gpus_per_node: 1
12
+ tasks_per_node: 1
13
+ mem_gb: 30
14
+ nodes: 1
15
+ name: ${hydra.job.name}
16
+ stderr_to_stdout: false
17
+ _target_: hydra_plugins.hydra_submitit_launcher.submitit_launcher.SlurmLauncher
18
+ partition: null
19
+ qos: null
20
+ comment: null
21
+ constraint: null
22
+ exclude: null
23
+ gres: null
24
+ cpus_per_gpu: null
25
+ gpus_per_task: null
26
+ mem_per_gpu: null
27
+ mem_per_cpu: null
28
+ account: null
29
+ signal_delay_s: 120
30
+ max_num_timeout: 0
31
+ additional_parameters: {}
32
+ array_parallelism: 256
33
+ setup: null
34
+ sweeper:
35
+ _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
36
+ max_batch_size: null
37
+ params:
38
+ NUM_NETS: 10, 20
39
+ NET_TRAINING_EPOCHS: 10, 30
40
+ TEMPERATURE: 0.3, 0.5, 0.7
41
+ help:
42
+ app_name: ${hydra.job.name}
43
+ header: '${hydra.help.app_name} is powered by Hydra.
44
+
45
+ '
46
+ footer: 'Powered by Hydra (https://hydra.cc)
47
+
48
+ Use --hydra-help to view Hydra specific help
49
+
50
+ '
51
+ template: '${hydra.help.header}
52
+
53
+ == Configuration groups ==
54
+
55
+ Compose your configuration from those groups (group=option)
56
+
57
+
58
+ $APP_CONFIG_GROUPS
59
+
60
+
61
+ == Config ==
62
+
63
+ Override anything in the config (foo.bar=value)
64
+
65
+
66
+ $CONFIG
67
+
68
+
69
+ ${hydra.help.footer}
70
+
71
+ '
72
+ hydra_help:
73
+ template: 'Hydra (${hydra.runtime.version})
74
+
75
+ See https://hydra.cc for more info.
76
+
77
+
78
+ == Flags ==
79
+
80
+ $FLAGS_HELP
81
+
82
+
83
+ == Configuration groups ==
84
+
85
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
86
+ to command line)
87
+
88
+
89
+ $HYDRA_CONFIG_GROUPS
90
+
91
+
92
+ Use ''--cfg hydra'' to Show the Hydra config.
93
+
94
+ '
95
+ hydra_help: ???
96
+ hydra_logging:
97
+ version: 1
98
+ formatters:
99
+ simple:
100
+ format: '[%(asctime)s][HYDRA] %(message)s'
101
+ handlers:
102
+ console:
103
+ class: logging.StreamHandler
104
+ formatter: simple
105
+ stream: ext://sys.stdout
106
+ root:
107
+ level: INFO
108
+ handlers:
109
+ - console
110
+ loggers:
111
+ logging_example:
112
+ level: DEBUG
113
+ disable_existing_loggers: false
114
+ job_logging:
115
+ version: 1
116
+ formatters:
117
+ simple:
118
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
119
+ handlers:
120
+ console:
121
+ class: logging.StreamHandler
122
+ formatter: simple
123
+ stream: ext://sys.stdout
124
+ file:
125
+ class: logging.FileHandler
126
+ formatter: simple
127
+ filename: ${hydra.runtime.output_dir}/${hydra.job.name}.log
128
+ root:
129
+ level: INFO
130
+ handlers:
131
+ - console
132
+ - file
133
+ disable_existing_loggers: false
134
+ env: {}
135
+ mode: RUN
136
+ searchpath: []
137
+ callbacks: {}
138
+ output_subdir: .hydra
139
+ overrides:
140
+ hydra:
141
+ - hydra.mode=RUN
142
+ task: []
143
+ job:
144
+ name: llmatic
145
+ chdir: null
146
+ override_dirname: ''
147
+ id: ???
148
+ num: ???
149
+ config_name: config
150
+ env_set: {}
151
+ env_copy: []
152
+ config:
153
+ override_dirname:
154
+ kv_sep: '='
155
+ item_sep: ','
156
+ exclude_keys: []
157
+ runtime:
158
+ version: 1.3.1
159
+ version_base: '1.3'
160
+ cwd: C:\Users\kjkj0\PycharmProjects\help-research-student\LLMatic-main
161
+ config_sources:
162
+ - path: hydra.conf
163
+ schema: pkg
164
+ provider: hydra
165
+ - path: C:\Users\kjkj0\PycharmProjects\help-research-student\LLMatic-main\conf
166
+ schema: file
167
+ provider: main
168
+ - path: ''
169
+ schema: structured
170
+ provider: schema
171
+ output_dir: C:\Users\kjkj0\PycharmProjects\help-research-student\LLMatic-main\outputs\2024-09-03\22-14-50
172
+ choices:
173
+ hydra/env: default
174
+ hydra/callbacks: null
175
+ hydra/job_logging: default
176
+ hydra/hydra_logging: default
177
+ hydra/hydra_help: default
178
+ hydra/help: default
179
+ hydra/sweeper: basic
180
+ hydra/launcher: submitit_slurm
181
+ hydra/output: default
182
+ verbose: false
LLMatic-main/outputs/2024-09-03/22-14-50/.hydra/overrides.yaml ADDED
@@ -0,0 +1 @@
 
 
1
+ []
LLMatic-main/outputs/2024-09-03/22-14-50/llmatic.log ADDED
File without changes
LLMatic-main/outputs/2024-09-03/23-04-26/.hydra/config.yaml ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ RANDOM_INIT_NETS: 10
2
+ INIT_NUM_NETS: 10
3
+ ROLL_OUTS: 4
4
+ NUM_NETS: 10
5
+ START_FROM_CHECKPOINT: false
6
+ RANDOM_NETWORKS: false
7
+ MUTATION: codegen-6B-mono
8
+ GENERATIONS: 200
9
+ NET_TRAINING_EPOCHS: 50
10
+ TEMPERATURE: 0.0
11
+ DEVICE: cuda
12
+ NUM_PROCESSES: 15
13
+ RAY: true
14
+ NUM_CPUS: 15
15
+ NUM_GPUS: 1
16
+ NUM_GPUS_TOTAL: 4
17
+ DIM_MAP: 2
18
+ N_NICHES: 100
19
+ SAVE_DIR: ./
LLMatic-main/outputs/2024-09-03/23-04-26/.hydra/hydra.yaml ADDED
@@ -0,0 +1,182 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: outputs/${now:%Y-%m-%d}/${now:%H-%M-%S}
4
+ sweep:
5
+ dir: multirun/${now:%Y-%m-%d}/${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ submitit_folder: ${hydra.sweep.dir}/.submitit/%j
9
+ timeout_min: 2880
10
+ cpus_per_task: 1
11
+ gpus_per_node: 1
12
+ tasks_per_node: 1
13
+ mem_gb: 30
14
+ nodes: 1
15
+ name: ${hydra.job.name}
16
+ stderr_to_stdout: false
17
+ _target_: hydra_plugins.hydra_submitit_launcher.submitit_launcher.SlurmLauncher
18
+ partition: null
19
+ qos: null
20
+ comment: null
21
+ constraint: null
22
+ exclude: null
23
+ gres: null
24
+ cpus_per_gpu: null
25
+ gpus_per_task: null
26
+ mem_per_gpu: null
27
+ mem_per_cpu: null
28
+ account: null
29
+ signal_delay_s: 120
30
+ max_num_timeout: 0
31
+ additional_parameters: {}
32
+ array_parallelism: 256
33
+ setup: null
34
+ sweeper:
35
+ _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
36
+ max_batch_size: null
37
+ params:
38
+ NUM_NETS: 10, 20
39
+ NET_TRAINING_EPOCHS: 10, 30
40
+ TEMPERATURE: 0.3, 0.5, 0.7
41
+ help:
42
+ app_name: ${hydra.job.name}
43
+ header: '${hydra.help.app_name} is powered by Hydra.
44
+
45
+ '
46
+ footer: 'Powered by Hydra (https://hydra.cc)
47
+
48
+ Use --hydra-help to view Hydra specific help
49
+
50
+ '
51
+ template: '${hydra.help.header}
52
+
53
+ == Configuration groups ==
54
+
55
+ Compose your configuration from those groups (group=option)
56
+
57
+
58
+ $APP_CONFIG_GROUPS
59
+
60
+
61
+ == Config ==
62
+
63
+ Override anything in the config (foo.bar=value)
64
+
65
+
66
+ $CONFIG
67
+
68
+
69
+ ${hydra.help.footer}
70
+
71
+ '
72
+ hydra_help:
73
+ template: 'Hydra (${hydra.runtime.version})
74
+
75
+ See https://hydra.cc for more info.
76
+
77
+
78
+ == Flags ==
79
+
80
+ $FLAGS_HELP
81
+
82
+
83
+ == Configuration groups ==
84
+
85
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
86
+ to command line)
87
+
88
+
89
+ $HYDRA_CONFIG_GROUPS
90
+
91
+
92
+ Use ''--cfg hydra'' to Show the Hydra config.
93
+
94
+ '
95
+ hydra_help: ???
96
+ hydra_logging:
97
+ version: 1
98
+ formatters:
99
+ simple:
100
+ format: '[%(asctime)s][HYDRA] %(message)s'
101
+ handlers:
102
+ console:
103
+ class: logging.StreamHandler
104
+ formatter: simple
105
+ stream: ext://sys.stdout
106
+ root:
107
+ level: INFO
108
+ handlers:
109
+ - console
110
+ loggers:
111
+ logging_example:
112
+ level: DEBUG
113
+ disable_existing_loggers: false
114
+ job_logging:
115
+ version: 1
116
+ formatters:
117
+ simple:
118
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
119
+ handlers:
120
+ console:
121
+ class: logging.StreamHandler
122
+ formatter: simple
123
+ stream: ext://sys.stdout
124
+ file:
125
+ class: logging.FileHandler
126
+ formatter: simple
127
+ filename: ${hydra.runtime.output_dir}/${hydra.job.name}.log
128
+ root:
129
+ level: INFO
130
+ handlers:
131
+ - console
132
+ - file
133
+ disable_existing_loggers: false
134
+ env: {}
135
+ mode: RUN
136
+ searchpath: []
137
+ callbacks: {}
138
+ output_subdir: .hydra
139
+ overrides:
140
+ hydra:
141
+ - hydra.mode=RUN
142
+ task: []
143
+ job:
144
+ name: llmatic
145
+ chdir: null
146
+ override_dirname: ''
147
+ id: ???
148
+ num: ???
149
+ config_name: config
150
+ env_set: {}
151
+ env_copy: []
152
+ config:
153
+ override_dirname:
154
+ kv_sep: '='
155
+ item_sep: ','
156
+ exclude_keys: []
157
+ runtime:
158
+ version: 1.3.1
159
+ version_base: '1.3'
160
+ cwd: C:\Users\kjkj0\PycharmProjects\help-research-student\LLMatic-main
161
+ config_sources:
162
+ - path: hydra.conf
163
+ schema: pkg
164
+ provider: hydra
165
+ - path: C:\Users\kjkj0\PycharmProjects\help-research-student\LLMatic-main\conf
166
+ schema: file
167
+ provider: main
168
+ - path: ''
169
+ schema: structured
170
+ provider: schema
171
+ output_dir: C:\Users\kjkj0\PycharmProjects\help-research-student\LLMatic-main\outputs\2024-09-03\23-04-26
172
+ choices:
173
+ hydra/env: default
174
+ hydra/callbacks: null
175
+ hydra/job_logging: default
176
+ hydra/hydra_logging: default
177
+ hydra/hydra_help: default
178
+ hydra/help: default
179
+ hydra/sweeper: basic
180
+ hydra/launcher: submitit_slurm
181
+ hydra/output: default
182
+ verbose: false
LLMatic-main/outputs/2024-09-03/23-04-26/.hydra/overrides.yaml ADDED
@@ -0,0 +1 @@
 
 
1
+ []
LLMatic-main/outputs/2024-09-03/23-04-26/llmatic.log ADDED
File without changes
LLMatic-main/outputs/2024-09-06/05-03-40/.hydra/config.yaml ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ RANDOM_INIT_NETS: 10
2
+ INIT_NUM_NETS: 10
3
+ ROLL_OUTS: 4
4
+ NUM_NETS: 10
5
+ START_FROM_CHECKPOINT: false
6
+ RANDOM_NETWORKS: false
7
+ MUTATION: codegen-6B-mono
8
+ GENERATIONS: 200
9
+ NET_TRAINING_EPOCHS: 50
10
+ TEMPERATURE: 0.0
11
+ DEVICE: cuda
12
+ NUM_PROCESSES: 15
13
+ RAY: true
14
+ NUM_CPUS: 15
15
+ NUM_GPUS: 1
16
+ NUM_GPUS_TOTAL: 4
17
+ DIM_MAP: 2
18
+ N_NICHES: 100
19
+ SAVE_DIR: ./
LLMatic-main/outputs/2024-09-06/05-03-40/.hydra/hydra.yaml ADDED
@@ -0,0 +1,182 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: outputs/${now:%Y-%m-%d}/${now:%H-%M-%S}
4
+ sweep:
5
+ dir: multirun/${now:%Y-%m-%d}/${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ submitit_folder: ${hydra.sweep.dir}/.submitit/%j
9
+ timeout_min: 2880
10
+ cpus_per_task: 1
11
+ gpus_per_node: 1
12
+ tasks_per_node: 1
13
+ mem_gb: 30
14
+ nodes: 1
15
+ name: ${hydra.job.name}
16
+ stderr_to_stdout: false
17
+ _target_: hydra_plugins.hydra_submitit_launcher.submitit_launcher.SlurmLauncher
18
+ partition: null
19
+ qos: null
20
+ comment: null
21
+ constraint: null
22
+ exclude: null
23
+ gres: null
24
+ cpus_per_gpu: null
25
+ gpus_per_task: null
26
+ mem_per_gpu: null
27
+ mem_per_cpu: null
28
+ account: null
29
+ signal_delay_s: 120
30
+ max_num_timeout: 0
31
+ additional_parameters: {}
32
+ array_parallelism: 256
33
+ setup: null
34
+ sweeper:
35
+ _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
36
+ max_batch_size: null
37
+ params:
38
+ NUM_NETS: 10, 20
39
+ NET_TRAINING_EPOCHS: 10, 30
40
+ TEMPERATURE: 0.3, 0.5, 0.7
41
+ help:
42
+ app_name: ${hydra.job.name}
43
+ header: '${hydra.help.app_name} is powered by Hydra.
44
+
45
+ '
46
+ footer: 'Powered by Hydra (https://hydra.cc)
47
+
48
+ Use --hydra-help to view Hydra specific help
49
+
50
+ '
51
+ template: '${hydra.help.header}
52
+
53
+ == Configuration groups ==
54
+
55
+ Compose your configuration from those groups (group=option)
56
+
57
+
58
+ $APP_CONFIG_GROUPS
59
+
60
+
61
+ == Config ==
62
+
63
+ Override anything in the config (foo.bar=value)
64
+
65
+
66
+ $CONFIG
67
+
68
+
69
+ ${hydra.help.footer}
70
+
71
+ '
72
+ hydra_help:
73
+ template: 'Hydra (${hydra.runtime.version})
74
+
75
+ See https://hydra.cc for more info.
76
+
77
+
78
+ == Flags ==
79
+
80
+ $FLAGS_HELP
81
+
82
+
83
+ == Configuration groups ==
84
+
85
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
86
+ to command line)
87
+
88
+
89
+ $HYDRA_CONFIG_GROUPS
90
+
91
+
92
+ Use ''--cfg hydra'' to Show the Hydra config.
93
+
94
+ '
95
+ hydra_help: ???
96
+ hydra_logging:
97
+ version: 1
98
+ formatters:
99
+ simple:
100
+ format: '[%(asctime)s][HYDRA] %(message)s'
101
+ handlers:
102
+ console:
103
+ class: logging.StreamHandler
104
+ formatter: simple
105
+ stream: ext://sys.stdout
106
+ root:
107
+ level: INFO
108
+ handlers:
109
+ - console
110
+ loggers:
111
+ logging_example:
112
+ level: DEBUG
113
+ disable_existing_loggers: false
114
+ job_logging:
115
+ version: 1
116
+ formatters:
117
+ simple:
118
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
119
+ handlers:
120
+ console:
121
+ class: logging.StreamHandler
122
+ formatter: simple
123
+ stream: ext://sys.stdout
124
+ file:
125
+ class: logging.FileHandler
126
+ formatter: simple
127
+ filename: ${hydra.runtime.output_dir}/${hydra.job.name}.log
128
+ root:
129
+ level: INFO
130
+ handlers:
131
+ - console
132
+ - file
133
+ disable_existing_loggers: false
134
+ env: {}
135
+ mode: RUN
136
+ searchpath: []
137
+ callbacks: {}
138
+ output_subdir: .hydra
139
+ overrides:
140
+ hydra:
141
+ - hydra.mode=RUN
142
+ task: []
143
+ job:
144
+ name: llmatic
145
+ chdir: null
146
+ override_dirname: ''
147
+ id: ???
148
+ num: ???
149
+ config_name: config
150
+ env_set: {}
151
+ env_copy: []
152
+ config:
153
+ override_dirname:
154
+ kv_sep: '='
155
+ item_sep: ','
156
+ exclude_keys: []
157
+ runtime:
158
+ version: 1.3.1
159
+ version_base: '1.3'
160
+ cwd: C:\Users\kjkj0\PycharmProjects\help-research-student\LLMatic-main
161
+ config_sources:
162
+ - path: hydra.conf
163
+ schema: pkg
164
+ provider: hydra
165
+ - path: C:\Users\kjkj0\PycharmProjects\help-research-student\LLMatic-main\conf
166
+ schema: file
167
+ provider: main
168
+ - path: ''
169
+ schema: structured
170
+ provider: schema
171
+ output_dir: C:\Users\kjkj0\PycharmProjects\help-research-student\LLMatic-main\outputs\2024-09-06\05-03-40
172
+ choices:
173
+ hydra/env: default
174
+ hydra/callbacks: null
175
+ hydra/job_logging: default
176
+ hydra/hydra_logging: default
177
+ hydra/hydra_help: default
178
+ hydra/help: default
179
+ hydra/sweeper: basic
180
+ hydra/launcher: submitit_slurm
181
+ hydra/output: default
182
+ verbose: false
LLMatic-main/outputs/2024-09-06/05-03-40/.hydra/overrides.yaml ADDED
@@ -0,0 +1 @@
 
 
1
+ []
LLMatic-main/outputs/2024-09-06/05-03-40/llmatic.log ADDED
File without changes
LLMatic-main/outputs/2024-09-06/06-51-07/.hydra/config.yaml ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ RANDOM_INIT_NETS: 10
2
+ INIT_NUM_NETS: 10
3
+ ROLL_OUTS: 4
4
+ NUM_NETS: 10
5
+ START_FROM_CHECKPOINT: false
6
+ RANDOM_NETWORKS: false
7
+ MUTATION: codegen-6B-mono
8
+ GENERATIONS: 200
9
+ NET_TRAINING_EPOCHS: 50
10
+ TEMPERATURE: 0.0
11
+ DEVICE: cuda
12
+ NUM_PROCESSES: 15
13
+ RAY: true
14
+ NUM_CPUS: 15
15
+ NUM_GPUS: 1
16
+ NUM_GPUS_TOTAL: 4
17
+ DIM_MAP: 2
18
+ N_NICHES: 100
19
+ SAVE_DIR: ./
LLMatic-main/outputs/2024-09-06/06-51-07/.hydra/hydra.yaml ADDED
@@ -0,0 +1,182 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: outputs/${now:%Y-%m-%d}/${now:%H-%M-%S}
4
+ sweep:
5
+ dir: multirun/${now:%Y-%m-%d}/${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ submitit_folder: ${hydra.sweep.dir}/.submitit/%j
9
+ timeout_min: 2880
10
+ cpus_per_task: 1
11
+ gpus_per_node: 1
12
+ tasks_per_node: 1
13
+ mem_gb: 30
14
+ nodes: 1
15
+ name: ${hydra.job.name}
16
+ stderr_to_stdout: false
17
+ _target_: hydra_plugins.hydra_submitit_launcher.submitit_launcher.SlurmLauncher
18
+ partition: null
19
+ qos: null
20
+ comment: null
21
+ constraint: null
22
+ exclude: null
23
+ gres: null
24
+ cpus_per_gpu: null
25
+ gpus_per_task: null
26
+ mem_per_gpu: null
27
+ mem_per_cpu: null
28
+ account: null
29
+ signal_delay_s: 120
30
+ max_num_timeout: 0
31
+ additional_parameters: {}
32
+ array_parallelism: 256
33
+ setup: null
34
+ sweeper:
35
+ _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
36
+ max_batch_size: null
37
+ params:
38
+ NUM_NETS: 10, 20
39
+ NET_TRAINING_EPOCHS: 10, 30
40
+ TEMPERATURE: 0.3, 0.5, 0.7
41
+ help:
42
+ app_name: ${hydra.job.name}
43
+ header: '${hydra.help.app_name} is powered by Hydra.
44
+
45
+ '
46
+ footer: 'Powered by Hydra (https://hydra.cc)
47
+
48
+ Use --hydra-help to view Hydra specific help
49
+
50
+ '
51
+ template: '${hydra.help.header}
52
+
53
+ == Configuration groups ==
54
+
55
+ Compose your configuration from those groups (group=option)
56
+
57
+
58
+ $APP_CONFIG_GROUPS
59
+
60
+
61
+ == Config ==
62
+
63
+ Override anything in the config (foo.bar=value)
64
+
65
+
66
+ $CONFIG
67
+
68
+
69
+ ${hydra.help.footer}
70
+
71
+ '
72
+ hydra_help:
73
+ template: 'Hydra (${hydra.runtime.version})
74
+
75
+ See https://hydra.cc for more info.
76
+
77
+
78
+ == Flags ==
79
+
80
+ $FLAGS_HELP
81
+
82
+
83
+ == Configuration groups ==
84
+
85
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
86
+ to command line)
87
+
88
+
89
+ $HYDRA_CONFIG_GROUPS
90
+
91
+
92
+ Use ''--cfg hydra'' to Show the Hydra config.
93
+
94
+ '
95
+ hydra_help: ???
96
+ hydra_logging:
97
+ version: 1
98
+ formatters:
99
+ simple:
100
+ format: '[%(asctime)s][HYDRA] %(message)s'
101
+ handlers:
102
+ console:
103
+ class: logging.StreamHandler
104
+ formatter: simple
105
+ stream: ext://sys.stdout
106
+ root:
107
+ level: INFO
108
+ handlers:
109
+ - console
110
+ loggers:
111
+ logging_example:
112
+ level: DEBUG
113
+ disable_existing_loggers: false
114
+ job_logging:
115
+ version: 1
116
+ formatters:
117
+ simple:
118
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
119
+ handlers:
120
+ console:
121
+ class: logging.StreamHandler
122
+ formatter: simple
123
+ stream: ext://sys.stdout
124
+ file:
125
+ class: logging.FileHandler
126
+ formatter: simple
127
+ filename: ${hydra.runtime.output_dir}/${hydra.job.name}.log
128
+ root:
129
+ level: INFO
130
+ handlers:
131
+ - console
132
+ - file
133
+ disable_existing_loggers: false
134
+ env: {}
135
+ mode: RUN
136
+ searchpath: []
137
+ callbacks: {}
138
+ output_subdir: .hydra
139
+ overrides:
140
+ hydra:
141
+ - hydra.mode=RUN
142
+ task: []
143
+ job:
144
+ name: llmatic
145
+ chdir: null
146
+ override_dirname: ''
147
+ id: ???
148
+ num: ???
149
+ config_name: config
150
+ env_set: {}
151
+ env_copy: []
152
+ config:
153
+ override_dirname:
154
+ kv_sep: '='
155
+ item_sep: ','
156
+ exclude_keys: []
157
+ runtime:
158
+ version: 1.3.1
159
+ version_base: '1.3'
160
+ cwd: C:\Users\kjkj0\PycharmProjects\help-research-student\LLMatic-main
161
+ config_sources:
162
+ - path: hydra.conf
163
+ schema: pkg
164
+ provider: hydra
165
+ - path: C:\Users\kjkj0\PycharmProjects\help-research-student\LLMatic-main\conf
166
+ schema: file
167
+ provider: main
168
+ - path: ''
169
+ schema: structured
170
+ provider: schema
171
+ output_dir: C:\Users\kjkj0\PycharmProjects\help-research-student\LLMatic-main\outputs\2024-09-06\06-51-07
172
+ choices:
173
+ hydra/env: default
174
+ hydra/callbacks: null
175
+ hydra/job_logging: default
176
+ hydra/hydra_logging: default
177
+ hydra/hydra_help: default
178
+ hydra/help: default
179
+ hydra/sweeper: basic
180
+ hydra/launcher: submitit_slurm
181
+ hydra/output: default
182
+ verbose: false