@article{368dfae192724e878d7b0a29dd7b0cbd,
title = "Interfacing with the Brain: How Nanotechnology Can Contribute",
abstract = "Interfacing artificial devices with the human brain is the central goal of neurotechnology. Yet, our imaginations are often limited by currently available paradigms and technologies. Suggestions for brain–machine interfaces have changed over time, along with the available technology. Mechanical levers and cable winches were used to move parts of the brain during the mechanical age. Sophisticated electronic wiring and remote control have arisen during the electronic age, ultimately leading to plug-and-play computer interfaces. Nonetheless, our brains are so complex that these visions, until recently, largely remained unreachable dreams. The general problem, thus far, is that most of our technology is mechanically and/or electrically engineered, whereas the brain is a living, dynamic entity. As a result, these worlds are difficult to interface with one another. Nanotechnology, which encompasses engineered solid-state objects and integrated circuits, excels at small length scales of single to a few hundred nanometers and, thus, matches the sizes of biomolecules, biomolecular assemblies, and parts of cells. Consequently, we envision nanomaterials and nanotools as opportunities to interface with the brain in alternative ways. Here, we review the existing literature on the use of nanotechnology in brain–machine interfaces and look forward in discussing perspectives and limitations based on the authors{\textquoteright} expertise across a range of complementary disciplines─from neuroscience, engineering, physics, and chemistry to biology and medicine, computer science and mathematics, and social science and jurisprudence. We focus on nanotechnology but also include information from related fields when useful and complementary.",
author = "Ahmed, {Abdullah A. A.} and Nuria Alegret and Bethany Almeida and Ram{\'o}n Alvarez-Puebla and Andrews, {Anne M.} and Laura Ballerini and Barrios-Capuchino, {Juan J.} and Charline Becker and Blick, {Robert H.} and Shahin Bonakdar and Indranath Chakraborty and Xiaodong Chen and Jinwoo Cheon and Gerwin Chilla and {Coelho Conceicao}, {Andre Luiz} and James Delehanty and Martin Dulle and Efros, {Alexander L.} and Matthias Epple and Mark Fedyk and Neus Feliu and Miao Feng and Rafael Fern{\'a}ndez-Chac{\'o}n and Irene Fernandez-Cuesta and Niels Fertig and Stephan F{\"o}rster and Garrido, {Jose A.} and Michael George and Guse, {Andreas H.} and Norbert Hampp and Jann Harberts and Jili Han and Heekeren, {Hauke R.} and Hofmann, {Ulrich G.} and Malte Holzapfel and Hessam Hosseinkazemi and Yalan Huang and Patrick Huber and Taeghwan Hyeon and Sven Ingebrandt and Marcello Ienca and Armin Iske and Yanan Kang and Gregor Kasieczka and Dae-Hyeong Kim and Kostas Kostarelos and Jae-Hyun Lee and Kai-Wei Lin and Sijin Liu and Xin Liu and Yang Liu and Christian Lohr and Volker Mail{\"a}nder and Laura Maffongelli and Saad Megahed and Alf Mews and Marina Mutas and Leroy Nack and Nako Nakatsuka and Oertner, {Thomas G.} and Andreas Offenh{\"a}usser and Martin Oheim and Ben Otange and Ferdinand Otto and Enrico Patrono and Bo Peng and Alessandra Picchiotti and Filippo Pierini and Monika P{\"o}tter-Nerger and Maria Pozzi and Arnd Pralle and Maurizio Prato and Bing Qi and Pedro Ramos-Cabrer and Genger, {Ute Resch} and Norbert Ritter and Marten Rittner and Sathi Roy and Francesca Santoro and Schuck, {Nicolas W.} and Florian Schulz and Erkin {\c S}eker and Marvin Skiba and Martin Sosniok and Holger Stephan and Ruixia Wang and Ting Wang and Wegner, {K. David} and Weiss, {Paul S.} and Ming Xu and Chenxi Yang and Zargarian, {Seyed Shahrooz} and Yuan Zeng and Yaofeng Zhou and Dingcheng Zhu and Robert Zierold and Parak, {Wolfgang J.}",
note = "doi: 10.1021/acsnano.4c10525",
year = "2025",
month = mar,
day = "25",
doi = "10.1021/acsnano.4c10525",
language = "English",
volume = "19",
pages = "10630--10717",
journal = "ACS nano",
issn = "1936-0851",
publisher = "American Chemical Society",
number = "11",
}
@article{67fb618aec0a43f48a4c297f3a300b6d,
title = "Product kernels are efficient and flexible tools for high-dimensional scattered data interpolation",
author = "Kristof Albrecht and Juliane Entzian and Armin Iske",
year = "2025",
month = mar,
day = "20",
doi = "10.1007/s10444-025-10226-y",
language = "English",
volume = "51",
journal = "Advances in Computational Mathematics",
issn = "1572-9044",
publisher = "Springer Netherlands",
}
@inbook{9bf95cfcba7d4ae99f9917d3a7578536,
title = "Bounds on the Generalization Error in Active Learning",
author = "Vincent Menden and Yahya Saleh and Armin Iske",
year = "2025",
language = "English",
volume = "265",
booktitle = "Proceedings of the 6th Northern Lights Deep Learning Conference (NLDL)",
}
@inbook{2ab1e3e17f284b628f2194732d0aa316,
title = "Irregular Sampling of High-Dimensional Functions in Reproducing Kernel Hilbert Spaces",
author = "Armin Iske and Lennart Ohlsen",
year = "2025",
language = "English",
booktitle = "2025 International Conference on Sampling Theory and Applications (SampTA)",
publisher = "IEEE",
}
@inbook{c11f7c1f4aa14356903c71e9e82c81f9,
title = "On the Convergence of Irregular Sampling in Reproducing Kernel Hilbert Spaces",
author = "Armin Iske",
year = "2025",
language = "English",
booktitle = "2025 International Conference on Sampling Theory and Applications (SampTA)",
publisher = "IEEE",
}
@article{c04639c84e7b4ff9969ed2eb38bf4a6b,
title = "On the convergence of generalized kernel-based interpolation by greedy data selection algorithms",
author = "Kristof Albrecht and Armin Iske",
year = "2024",
month = dec,
day = "27",
doi = "10.1007/s10543-024-01048-3",
language = "English",
volume = "65",
journal = "BIT Numerical Mathematics",
issn = "0006-3835",
publisher = "Springer Netherlands",
}
@inbook{66e79ce6e46744c99d4fc9c92b6c91d3,
title = "Finetuning greedy kernel models by exchange algorithms",
author = "Tizian Wenzel and Armin Iske",
year = "2024",
language = "English",
booktitle = "Algoritmy 2024",
}
@phdthesis{bcf8faec266540bfa81310abd1b4d5be,
title = "Kernel-Based Generalized Interpolation and its Application to Computerized Tomography",
author = "Kristof Albrecht",
year = "2024",
language = "English",
school = "University of Hamburg",
}
@inbook{7b09636baedc4b48b84b255041cc1901,
title = "Learning phase-space flows using time-discrete implicit Runge-Kutta PINNs",
author = "{Fern{\'a}ndez Corral}, {\'A}lvaro and Nicol{\'a}s Mendoza and Armin Iske and Andrey Yachmenev and Jochen K{\"u}pper",
year = "2024",
language = "English",
booktitle = "International Conference on Scientific Computing and Machine Learning 2024",
note = "International Conference on Scientific Computing and Machine Learning 2024 ; Conference date: 19-03-2024 Through 23-03-2024",
url = "https://scml.jp/index.html",
}
@phdthesis{18d3be20c7ad45d7a1cc25ddb9fec4f5,
title = "Structure Analysis of Nonstandard Kernels for Multivariate Reconstructions",
abstract = "This dissertation concerns adaptive kernel-based approximation methods. We create a toolbox for adapting kernels to underlying problems, focusing on the interpolation of multivariate scattered data with an emphasis on anisotropies. By developing five nonstandard classes of flexible kernels – transformation, summation, and product kernels, as well as the anisotropic versions of the latter two orthogonal summation, and tensor product kernels – significant limitations of traditional radially symmetric kernels are addressed. These classes, some entirely new and others building on existing structures, provide the flexibility to select and combine kernels tailored to specific problems. Thus, they extend the variety of interpolation methods. The theoretical analysis conducted on each kernel class{\textquoteright}s native space not only expands the understanding of native spaces in general but also enlightens underlying (name-giving) structures and their associated benefits. We investigate the interpolation method for each kernel, including impacts on accuracy and stability. Numerical tests confirm the theoretical findings and show which kernel class is suitable for specific problem adaptations: We propose transformation or tensor product kernels for adapting to the point set; transformation kernels for adapting to the domain; and summation, transformation, or orthogonal summation kernels for adapting to the target function.",
author = "Juliane Entzian",
year = "2024",
language = "English",
school = "University of Hamburg",
}