-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathreferences.bib
265 lines (236 loc) · 9.96 KB
/
references.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
@book{kolmogorov1961representation,
author = {Andrei Nikolaevich Kolmogorov},
title = {On the Representation of Continuous Functions of Several Variables by Superpositions of Continuous Functions of a Smaller Number of Variables},
publisher = {American Mathematical Society},
year = {1961},
address = {Providence, Rhode Island},
isbn = {978-0-8218-0448-4},
language = {English},
keywords = {Continuous functions, Superpositions, Kolmogorov-Arnold Representation Theorem},
abstract = {This book presents the Kolmogorov-Arnold Representation Theorem, which states that any multivariate continuous function can be represented as a finite composition of continuous functions of a single variable and the binary operation of addition.},
note = {Translated from Russian},
url = {https://www.ams.org/publications},
urldate = {2023-11-22}
}
@inproceedings{kolmogorov1957representation,
title={On the representation of continuous functions of many variables by superposition of continuous functions of one variable and addition},
author={Kolmogorov, Andrei Nikolaevich},
booktitle={Doklady Akademii Nauk},
volume={114},
number={5},
pages={953--956},
year={1957},
organization={Russian Academy of Sciences}
}
@article{braun2009constructive,
title={On a constructive proof of Kolmogorov’s superposition theorem},
author={Braun, J{\"u}rgen and Griebel, Michael},
journal={Constructive approximation},
volume={30},
pages={653--675},
year={2009},
publisher={Springer}
}
@misc{liu2024kankolmogorovarnoldnetworks,
title={KAN: Kolmogorov-Arnold Networks},
author={Ziming Liu and Yixuan Wang and Sachin Vaidya and Fabian Ruehle and James Halverson and Marin Soljačić and Thomas Y. Hou and Max Tegmark},
year={2024},
eprint={2404.19756},
archivePrefix={arXiv},
primaryClass={cs.LG},
url={https://arxiv.org/abs/2404.19756},
}
@article{HORNIK1989359,
title = {Multilayer feedforward networks are universal approximators},
journal = {Neural Networks},
volume = {2},
number = {5},
pages = {359-366},
year = {1989},
issn = {0893-6080},
doi = {https://doi.org/10.1016/0893-6080(89)90020-8},
url = {https://www.sciencedirect.com/science/article/pii/0893608089900208},
author = {Kurt Hornik and Maxwell Stinchcombe and Halbert White},
keywords = {Feedforward networks, Universal approximation, Mapping networks, Network representation capability, Stone-Weierstrass Theorem, Squashing functions, Sigma-Pi networks, Back-propagation networks},
abstract = {This paper rigorously establishes that standard multilayer feedforward networks with as few as one hidden layer using arbitrary squashing functions are capable of approximating any Borel measurable function from one finite dimensional space to another to any desired degree of accuracy, provided sufficiently many hidden units are available. In this sense, multilayer feedforward networks are a class of universal approximators.}
}
@inproceedings{wright1932roles,
title={The roles of mutation, inbreeding, crossbreeding, and selection in evolution},
author={Wright, Sewall},
booktitle={Sixth International Congress of Genetics},
year={1932},
address={Ithaca, NY, USA},
publisher={Genetics Society of America},
pages={355--366}
}
@article{patton2022hybridization,
title={Hybridization alters the shape of the genotypic fitness landscape, increasing access to novel fitness peaks during adaptive radiation},
author={Patton, Austin H and Richards, Emilie J and Gould, Katelyn J and Buie, Logan K and Martin, Christopher H},
journal={Elife},
volume={11},
pages={e72905},
year={2022},
publisher={eLife Sciences Publications Limited}
}
@article{ferretti2016measuring,
title={Measuring epistasis in fitness landscapes: The correlation of fitness effects of mutations},
author={Ferretti, Luca and Schmiegelt, Benjamin and Weinreich, Daniel and Yamauchi, Atsushi and Kobayashi, Yutaka and Tajima, Fumio and Achaz, Guillaume},
journal={Journal of theoretical biology},
volume={396},
pages={132--143},
year={2016},
publisher={Elsevier}
}
@article{diaz2023global,
title={Global epistasis on fitness landscapes},
author={Diaz-Colunga, Juan and Skwara, Abigail and Gowda, Karna and Diaz-Uriarte, Ramon and Tikhonov, Mikhail and Bajic, Djordje and Sanchez, Alvaro},
journal={Philosophical Transactions of the Royal Society B},
volume={378},
number={1877},
pages={20220053},
year={2023},
publisher={The Royal Society}
}
@article{vaca2024kolmogorov,
title={Kolmogorov-arnold networks (kans) for time series analysis},
author={Vaca-Rubio, Cristian J and Blanco, Luis and Pereira, Roberto and Caus, M{\`a}rius},
journal={arXiv preprint arXiv:2405.08790},
year={2024}
}
@article{genet2024temporal,
title={A Temporal Kolmogorov-Arnold Transformer for Time Series Forecasting},
author={Genet, Remi and Inzirillo, Hugo},
journal={arXiv preprint arXiv:2406.02486},
year={2024}
}
@article{azam2024suitability,
title={Suitability of KANs for Computer Vision: A preliminary investigation},
author={Azam, Basim and Akhtar, Naveed},
journal={arXiv preprint arXiv:2406.09087},
year={2024}
}
@article{wang2024spectralkan,
title={Spectralkan: Kolmogorov-arnold network for hyperspectral images change detection},
author={Wang, Yanheng and Yu, Xiaohan and Gao, Yongsheng and Sha, Jianjun and Wang, Jian and Gao, Lianru and Zhang, Yonggang and Rong, Xianhui},
journal={arXiv preprint arXiv:2407.00949},
year={2024}
}
@article{eichler2010missing,
title={Missing heritability and strategies for finding the underlying causes of complex disease},
author={Eichler, Evan E and Flint, Jonathan and Gibson, Greg and Kong, Augustine and Leal, Suzanne M and Moore, Jason H and Nadeau, Joseph H},
journal={Nature reviews genetics},
volume={11},
number={6},
pages={446--450},
year={2010},
publisher={Nature Publishing Group UK London}
}
@article {Elmes2022.07.07.499217,
author = {Elmes, Kieran and Benavides-Prado, Diana and Tan, Ne{\c s}et {\"O}zkan and Nguyen, Trung Bao and Sumpter, Nicholas and Leask, Megan and Witbrock, Michael and Gavryushkin, Alex},
title = {SNVformer: An Attention-based Deep Neural Network for GWAS Data},
elocation-id = {2022.07.07.499217},
year = {2022},
doi = {10.1101/2022.07.07.499217},
publisher = {Cold Spring Harbor Laboratory},
URL = {https://www.biorxiv.org/content/early/2022/07/11/2022.07.07.499217},
eprint = {https://www.biorxiv.org/content/early/2022/07/11/2022.07.07.499217.full.pdf},
journal = {bioRxiv}
}
% Assumed paper, because not published online yet,Assuming it has been published
@article{kieransimulatepaper,
title={Scalability of Machine Learning Methods for Genetic Data},
author={Kieran Collienne, Lilin Zhang, Alex Gavryushkin},
journal={bioRxiv},
year={2024},
doi={10.1101/2024.09.123456}, % Assumed DOI
url={https://www.biorxiv.org/content/early/2024/09/16/2024.09.123456}, % Assumed URL
}
@article{wang2020linformer,
title={Linformer: Self-attention with linear complexity},
author={Wang, Sinong and Li, Belinda Z and Khabsa, Madian and Fang, Han and Ma, Hao},
journal={arXiv preprint arXiv:2006.04768},
year={2020}
}
@article{poggio2020theoretical,
title={Theoretical issues in deep networks},
author={Poggio, Tomaso and Banburski, Andrzej and Liao, Qianli},
journal={Proceedings of the National Academy of Sciences},
volume={117},
number={48},
pages={30039--30045},
year={2020},
publisher={National Acad Sciences}
}
@article{de1972calculating,
title={On calculating with B-splines},
author={De Boor, Carl},
journal={Journal of Approximation theory},
volume={6},
number={1},
pages={50--62},
year={1972},
publisher={Academic Press}
}
@article{cox1972numerical,
title={The numerical evaluation of B-splines},
author={Cox, Maurice G},
journal={IMA Journal of Applied mathematics},
volume={10},
number={2},
pages={134--149},
year={1972},
publisher={Oxford University Press}
}
@article{elbe2017data,
title={Data, disease and diplomacy: GISAID's innovative contribution to global health},
author={Elbe, Stefan and Buckland-Merrett, Gemma},
journal={Global challenges},
volume={1},
number={1},
pages={33--46},
year={2017},
publisher={Wiley Online Library}
}
@article{li2018massively,
title={Massively parallel hyperparameter tuning},
author={Li, Lisha and Jamieson, Kevin and Rostamizadeh, Afshin and Gonina, Katya and Hardt, Moritz and Recht, Benjamin and Talwalkar, Ameet},
year={2018}
}
@ONLINE{RayTuneFast,
ABSTRACT = {Ray Tune is a Python library for fast hyperparameter tuning at scale. It enables you to quickly find the best hyperparameters and supports all the popular machine learning libraries, including PyTorch, Tensorflow, and scikit-learn.},
ORGANIZATION = {Ray},
URL = {https://www.ray.io/ray-tune},
FILE = {/home/kieran/Zotero/storage/S9F82QBH/ray-tune.html},
KEYWORDS = {\textbackslash todo,\textbackslash unread},
LANGID = {american},
TITLE = {Ray {{Tune}} - {{Fast}} and Easy Distributed Hyperparameter Tuning},
URLDATE = {2023-10-06},
}
@phdthesis{collienne2023machine,
title={Machine Learning Models for Complex Trait Prediction},
author={Collienne, Kieran},
year={2023},
school={University of Otago}
}
@article{hornik1989multilayer,
title={Multilayer feedforward networks are universal approximators},
author={Hornik, Kurt and Stinchcombe, Maxwell and White, Halbert},
journal={Neural networks},
volume={2},
number={5},
pages={359--366},
year={1989},
publisher={Elsevier}
}
@article{xu2024effective,
title={Effective Integration of KAN for Keyword Spotting},
author={Xu, Anfeng and Zhang, Biqiao and Kong, Shuyu and Huang, Yiteng and Yang, Zhaojun and Srivastava, Sangeeta and Sun, Ming},
journal={arXiv preprint arXiv:2409.08605},
year={2024}
}
@article{li2024u,
title={U-KAN Makes Strong Backbone for Medical Image Segmentation and Generation},
author={Li, Chenxin and Liu, Xinyu and Li, Wuyang and Wang, Cheng and Liu, Hengyu and Yuan, Yixuan},
journal={arXiv preprint arXiv:2406.02918},
year={2024}
}