[ { "id": "https://authors.library.caltech.edu/records/v7erc-8r294", "eprint_id": 121547, "eprint_status": "archive", "datestamp": "2023-08-20 08:51:00", "lastmod": "2023-12-22 23:38:11", "type": "book_section", "metadata_visibility": "show", "creators": { "items": [ { "id": "Schweikert-Christina", "name": { "family": "Schweikert", "given": "Christina" } }, { "id": "Shimojo-S", "name": { "family": "Shimojo", "given": "Shinsuke" }, "orcid": "0000-0002-1290-5232" }, { "id": "Glasser-Hannah", "name": { "family": "Glasser", "given": "Hannah" } }, { "id": "Hendsey-Rebecca", "name": { "family": "Hendsey", "given": "Rebecca" } }, { "id": "Alsaber-Rami", "name": { "family": "Alsaber", "given": "Rami" } }, { "id": "Hsu-D-Frank", "name": { "family": "Hsu", "given": "D. Frank" } } ] }, "title": "Modeling Prototypical Preference Behavior and Diversity using Rank Score Characteristic Functions", "ispublished": "unpub", "full_text_status": "public", "note": "\u00a9 2023 IEEE.", "abstract": "When given two human face images to choose, a subject's decision process is recorded as a sequence of eye movement gaze points. This sequence is then analyzed to detect and predict the preference made by the subject. In an experiment with twelve subjects, each with sixty trials, we have analyzed the 720 sequences using five attributes and combinatorial fusion. Results are promising with good accuracy and efficiency. In this paper, we characterize the decision-making behavior of each subject and measure the cognitive diversity between each of these twelve subjects and a prototypical subject. Our study contributes to improving the data and predictive quality of the experiment and the computational modeling.", "date": "2022-12", "date_type": "published", "publisher": "IEEE", "place_of_pub": "Piscataway, NJ", "pagerange": "203-207", "id_number": "CaltechAUTHORS:20230526-661743000.1", "isbn": "978-1-6654-9084-9", "book_title": "2022 IEEE 21st International Conference on Cognitive Informatics & Cognitive Computing (ICCI*CC)", "official_url": "https://resolver.caltech.edu/CaltechAUTHORS:20230526-661743000.1", "rights": "No commercial reproduction, distribution, display or performance rights in this work are provided.", "local_group": { "items": [ { "id": "Tianqiao-and-Chrissy-Chen-Institute-for-Neuroscience" }, { "id": "Division-of-Biology-and-Biological-Engineering" } ] }, "doi": "10.1109/iccicc57084.2022.10101519", "pub_year": "2022", "author_list": "Schweikert, Christina; Shimojo, Shinsuke; et el." }, { "id": "https://authors.library.caltech.edu/records/92h16-3r143", "eprint_id": 107562, "eprint_status": "archive", "datestamp": "2023-08-20 03:24:31", "lastmod": "2023-12-22 23:39:57", "type": "book_section", "metadata_visibility": "show", "creators": { "items": [ { "id": "Sun-Jennifer-J", "name": { "family": "Sun", "given": "Jennifer J." }, "orcid": "0000-0002-0906-6589" }, { "id": "Kennedy-Ann", "name": { "family": "Kennedy", "given": "Ann" }, "orcid": "0000-0002-3782-0518" }, { "id": "Zhan-Eric", "name": { "family": "Zhan", "given": "Eric" } }, { "id": "Anderson-D-J", "name": { "family": "Anderson", "given": "David J." }, "orcid": "0000-0001-6175-3872" }, { "id": "Yue-Yisong", "name": { "family": "Yue", "given": "Yisong" }, "orcid": "0000-0001-9127-1989" }, { "id": "Perona-P", "name": { "family": "Perona", "given": "Pietro" }, "orcid": "0000-0002-7583-5809" } ] }, "title": "Task Programming: Learning Data Efficient Behavior Representations", "ispublished": "unpub", "full_text_status": "public", "note": "\u00a9 2021 IEEE. \n\nWe would like to thank Tomomi Karigo at Caltech for providing the mouse dataset. The Simons Foundation (Global Brain grant 543025 to PP) generously supported this work, and this work is partially supported by NIH Award #K99MH117264 (to AK), NSF Award #1918839 (to YY), and NSERC Award #PGSD3-532647-2019 (to JJS).\n\n
Submitted - 2011.13917.pdf
", "abstract": "Specialized domain knowledge is often necessary to accurately annotate training sets for in-depth analysis, but can be burdensome and time-consuming to acquire from domain experts. This issue arises prominently in automated behavior analysis, in which agent movements or actions of interest are detected from video tracking data. To reduce annotation effort, we present TREBA: a method to learn annotation-sample efficient trajectory embedding for behavior analysis, based on multi-task self-supervised learning. The tasks in our method can be efficiently engineered by domain experts through a process we call \"task programming\", which uses programs to explicitly encode structured knowledge from domain experts. Total domain expert effort can be reduced by exchanging data annotation time for the construction of a small number of programmed tasks. We evaluate this trade-off using data from behavioral neuroscience, in which specialized domain knowledge is used to identify behaviors. We present experimental results in three datasets across two domains: mice and fruit flies. Using embeddings from TREBA, we reduce annotation burden by up to a factor of 10 without compromising accuracy compared to state-of-the-art features. Our results thus suggest that task programming and self-supervision can be an effective way to reduce annotation effort for domain experts.", "date": "2021-06", "date_type": "published", "publisher": "IEEE", "place_of_pub": "Piscataway, NJ", "pagerange": "2875-2884", "id_number": "CaltechAUTHORS:20210119-161625521", "isbn": "978-1-6654-4509-2", "book_title": "2021 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)", "official_url": "https://resolver.caltech.edu/CaltechAUTHORS:20210119-161625521", "rights": "No commercial reproduction, distribution, display or performance rights in this work are provided.", "funders": { "items": [ { "agency": "Simons Foundation", "grant_number": "543025" }, { "agency": "NIH", "grant_number": "K99MH117264" }, { "agency": "NSF", "grant_number": "CCF-1918839" }, { "agency": "Natural Sciences and Engineering Research Council of Canada (NSERC)", "grant_number": "PGSD3-532647-2019" } ] }, "local_group": { "items": [ { "id": "Tianqiao-and-Chrissy-Chen-Institute-for-Neuroscience" }, { "id": "Division-of-Biology-and-Biological-Engineering" } ] }, "doi": "10.1109/CVPR46437.2021.00290", "primary_object": { "basename": "2011.13917.pdf", "url": "https://authors.library.caltech.edu/records/92h16-3r143/files/2011.13917.pdf" }, "pub_year": "2021", "author_list": "Sun, Jennifer J.; Kennedy, Ann; et el." }, { "id": "https://authors.library.caltech.edu/records/0k7zx-tfz21", "eprint_id": 106486, "eprint_status": "archive", "datestamp": "2023-08-20 00:35:14", "lastmod": "2023-12-22 23:40:53", "type": "book_section", "metadata_visibility": "show", "creators": { "items": [ { "id": "Huang-Yujia", "name": { "family": "Huang", "given": "Yujia" }, "orcid": "0000-0001-7667-8342" }, { "id": "Gornet-James", "name": { "family": "Gornet", "given": "James" }, "orcid": "0000-0002-5431-7340" }, { "id": "Dai-Sihui", "name": { "family": "Dai", "given": "Sihui" } }, { "id": "Yu-Zhiding", "name": { "family": "Yu", "given": "Zhiding" } }, { "id": "Nguyen-Tan-M", "name": { "family": "Nguyen", "given": "Tan" } }, { "id": "Tsao-D-Y", "name": { "family": "Tsao", "given": "Doris Y." }, "orcid": "0000-0003-1083-1919" }, { "id": "Anandkumar-A", "name": { "family": "Anandkumar", "given": "Anima" } } ] }, "title": "Neural Networks with Recurrent Generative Feedback", "ispublished": "unpub", "full_text_status": "public", "note": "We thank Chaowei Xiao, Haotao Wang, Jean Kossaifi, Francisco Luongo for the valuable feedback. Y. Huang is supported by DARPA LwLL grants. J. Gornet is supported by supported by the NIH Predoctoral Training in Quantitative Neuroscience 1T32NS105595-01A1. D. Y. Tsao is supported by Howard Hughes Medical Institute and Tianqiao and Chrissy Chen Institute for Neuroscience. A. Anandkumar is supported in part by Bren endowed chair, DARPA LwLL grants, Tianqiao and\nChrissy Chen Institute for Neuroscience, Microsoft, Google, and Adobe faculty fellowships.\n\nPublished - NeurIPS-2020-neural-networks-with-recurrent-generative-feedback-Paper.pdf
Supplemental Material - NeurIPS-2020-neural-networks-with-recurrent-generative-feedback-Supplemental.pdf
", "abstract": "Neural networks are vulnerable to input perturbations such as additive noise and adversarial attacks. In contrast, human perception is much more robust to such perturbations. The Bayesian brain hypothesis states that human brains use an internal generative model to update the posterior beliefs of the sensory input. This mechanism can be interpreted as a form of self-consistency between the maximum a posteriori (MAP) estimation of an internal generative model and the external environment. Inspired by such hypothesis, we enforce self-consistency in neural networks by incorporating generative recurrent feedback. We instantiate this design\non convolutional neural networks (CNNs). The proposed framework, termed Convolutional Neural Networks with Feedback (CNN-F), introduces a generative feedback with latent variables to existing CNN architectures, where consistent predictions are made through alternating MAP inference under a Bayesian framework. In the experiments, CNN-F shows considerably improved adversarial robustness over conventional feedforward CNNs on standard benchmarks.", "date": "2020-12", "date_type": "published", "publisher": "Advances in Neural Information Processing Systems", "id_number": "CaltechAUTHORS:20201106-120201944", "official_url": "https://resolver.caltech.edu/CaltechAUTHORS:20201106-120201944", "rights": "No commercial reproduction, distribution, display or performance rights in this work are provided.", "funders": { "items": [ { "agency": "NIH Predoctoral Fellowship", "grant_number": "1T32NS105595-01A1" }, { "agency": "Howard Hughes Medical Institute (HHMI)" }, { "agency": "Tianqiao and Chrissy Chen Institute for Neuroscience" }, { "agency": "Bren Professor of Computing and Mathematical Sciences" }, { "agency": "Defense Advanced Research Projects Agency (DARPA)" }, { "agency": "Learning with Less Labels (LwLL)" }, { "agency": "Microsoft Faculty Fellowship" }, { "agency": "Google Faculty Research Award" }, { "agency": "Adobe" } ] }, "local_group": { "items": [ { "id": "Tianqiao-and-Chrissy-Chen-Institute-for-Neuroscience" }, { "id": "Division-of-Biology-and-Biological-Engineering" } ] }, "contributors": { "items": [ { "id": "Larochelle-H", "name": { "family": "Larochelle", "given": "H." } }, { "id": "Ranzato-M", "name": { "family": "Ranzato", "given": "M." } }, { "id": "Hadsell-R", "name": { "family": "Hadsell", "given": "R." } }, { "id": "Balcan-M-F", "name": { "family": "Balcan", "given": "M. F." } }, { "id": "Lin-H", "name": { "family": "Lin", "given": "H." } } ] }, "doi": "10.48550/arXiv.2007.09200", "primary_object": { "basename": "NeurIPS-2020-neural-networks-with-recurrent-generative-feedback-Paper.pdf", "url": "https://authors.library.caltech.edu/records/0k7zx-tfz21/files/NeurIPS-2020-neural-networks-with-recurrent-generative-feedback-Paper.pdf" }, "related_objects": [ { "basename": "NeurIPS-2020-neural-networks-with-recurrent-generative-feedback-Supplemental.pdf", "url": "https://authors.library.caltech.edu/records/0k7zx-tfz21/files/NeurIPS-2020-neural-networks-with-recurrent-generative-feedback-Supplemental.pdf" } ], "pub_year": "2020", "author_list": "Huang, Yujia; Gornet, James; et el." }, { "id": "https://authors.library.caltech.edu/records/b72n6-64504", "eprint_id": 97394, "eprint_status": "archive", "datestamp": "2023-08-19 18:52:01", "lastmod": "2023-10-20 22:18:07", "type": "book_section", "metadata_visibility": "show", "creators": { "items": [ { "id": "Haghi-B-A", "name": { "family": "Haghi", "given": "Benyamin" }, "orcid": "0000-0002-4839-7647" }, { "id": "Kellis-Spencer-S", "name": { "family": "Kellis", "given": "Spencer" }, "orcid": "0000-0002-5158-1058" }, { "id": "Shah-Sahil", "name": { "family": "Shah", "given": "Sahil" } }, { "id": "Ashok-M", "name": { "family": "Ashok", "given": "Maitreyi" } }, { "id": "Bashford-L", "name": { "family": "Bashford", "given": "Luke" }, "orcid": "0000-0003-4391-2491" }, { "id": "Kramer-D-R", "name": { "family": "Kramer", "given": "Daniel" }, "orcid": "0000-0003-4551-2977" }, { "id": "Lee-Brian", "name": { "family": "Lee", "given": "Brian" } }, { "id": "Liu-Charles-Y", "name": { "family": "Liu", "given": "Charles" } }, { "id": "Andersen-R-A", "name": { "family": "Andersen", "given": "Richard A." }, "orcid": "0000-0002-7947-0472" }, { "id": "Emami-A", "name": { "family": "Emami", "given": "Azita" }, "orcid": "0000-0002-6945-9958" } ] }, "title": "Deep Multi-State Dynamic Recurrent Neural Networks Operating on Wavelet Based Neural Features for Robust Brain Machine Interfaces", "ispublished": "unpub", "full_text_status": "public", "note": "\u00a9 2019 Neural Information Processing Systems Foundation, Inc. \n\nWe thank Tianqiao and Chrissy (T&C) Chen Institute for Neuroscience at California Institute of Technology (Caltech) for supporting this IRB approved research. We also thank Dr. Erin Burkett for reviewing this manuscript.\n\nSubmitted - 710327.full.pdf
Supplemental Material - 9594-deep-multi-state-dynamic-recurrent-neural-networks-operating-on-wavelet-based-neural-features-for-robust-brain-machine-interfaces-supplemental.zip
", "abstract": "We present a new deep multi-state Dynamic Recurrent Neural Network (DRNN) architecture for Brain Machine Interface (BMI) applications. Our DRNN is used to predict Cartesian representation of a computer cursor movement kinematics from open-loop neural data recorded from the posterior parietal cortex (PPC) of a human subject in a BMI system. We design the algorithm to achieve a reasonable trade-off between performance and robustness, and we constrain memory usage in favor of future hardware implementation. We feed the predictions of the network back to the input to improve prediction performance and robustness. We apply a scheduled sampling approach to the model in order to solve a statistical distribution mismatch between the ground truth and predictions. Additionally, we configure a small DRNN to operate with a short history of input, reducing the required buffering of input data and number of memory accesses. This configuration lowers the expected power consumption in a neural network accelerator. Operating on wavelet-based neural features, we show that the average performance of DRNN surpasses other state-of-the-art methods in the literature on both single- and multi-day data recorded over 43 days. Results show that multi-state DRNN has the potential to model the nonlinear relationships between the neural data and kinematics for robust BMIs.", "date": "2019-12", "date_type": "published", "publisher": "Neural Information Processing Systems Foundation, Inc.", "id_number": "CaltechAUTHORS:20190724-154847448", "official_url": "https://resolver.caltech.edu/CaltechAUTHORS:20190724-154847448", "rights": "No commercial reproduction, distribution, display or performance rights in this work are provided.", "funders": { "items": [ { "agency": "Tianqiao and Chrissy Chen Institute for Neuroscience" } ] }, "local_group": { "items": [ { "id": "Tianqiao-and-Chrissy-Chen-Institute-for-Neuroscience" } ] }, "doi": "10.1101/710327", "primary_object": { "basename": "9594-deep-multi-state-dynamic-recurrent-neural-networks-operating-on-wavelet-based-neural-features-for-robust-brain-machine-interfaces-supplemental.zip", "url": "https://authors.library.caltech.edu/records/b72n6-64504/files/9594-deep-multi-state-dynamic-recurrent-neural-networks-operating-on-wavelet-based-neural-features-for-robust-brain-machine-interfaces-supplemental.zip" }, "related_objects": [ { "basename": "9594-deep-multi-state-dynamic-recurrent-neural-networks-operating-on-wavelet-based-neural-features-for-robust-brain-machine-interfaces.pdf", "url": "https://authors.library.caltech.edu/records/b72n6-64504/files/9594-deep-multi-state-dynamic-recurrent-neural-networks-operating-on-wavelet-based-neural-features-for-robust-brain-machine-interfaces.pdf" }, { "basename": "710327.full.pdf", "url": "https://authors.library.caltech.edu/records/b72n6-64504/files/710327.full.pdf" } ], "pub_year": "2019", "author_list": "Haghi, Benyamin; Kellis, Spencer; et el." }, { "id": "https://authors.library.caltech.edu/records/est3h-bh929", "eprint_id": 95764, "eprint_status": "archive", "datestamp": "2023-08-19 14:37:45", "lastmod": "2023-10-20 20:32:46", "type": "book_section", "metadata_visibility": "show", "creators": { "items": [ { "id": "Shah-Sahil", "name": { "family": "Shah", "given": "Sahil" } }, { "id": "Haghi-B-A", "name": { "family": "Haghi", "given": "Benyamin" }, "orcid": "0000-0002-4839-7647" }, { "id": "Kellis-Spencer-S", "name": { "family": "Kellis", "given": "Spencer" }, "orcid": "0000-0002-5158-1058" }, { "id": "Bashford-L", "name": { "family": "Bashford", "given": "Luke" }, "orcid": "0000-0003-4391-2491" }, { "id": "Kramer-D-R", "name": { "family": "Kramer", "given": "Daniel" }, "orcid": "0000-0003-4551-2977" }, { "id": "Lee-Brian", "name": { "family": "Lee", "given": "Brian" } }, { "id": "Liu-Charles-Y", "name": { "family": "Liu", "given": "Charles" } }, { "id": "Andersen-R-A", "name": { "family": "Andersen", "given": "Richard" }, "orcid": "0000-0002-7947-0472" }, { "id": "Emami-A", "name": { "family": "Emami", "given": "Azita" }, "orcid": "0000-0002-6945-9958" } ] }, "title": "Decoding Kinematics from Human Parietal Cortex using Neural Networks", "ispublished": "unpub", "full_text_status": "restricted", "note": "\u00a9 2019 IEEE. \n\nThis IRB approved research was supported by Chen Institute for Neuroscience at the California Institute of technology (Caltech), Pasadena, CA USA 91125.", "abstract": "Brain-machine interfaces have shown promising results in providing control over assistive devices for paralyzed patients. In this work we describe a BMI system using electrodes implanted in the parietal lobe of a tetraplegic subject. Neural data used for the decoding was recorded in five 3-minute blocks during the same session. Within each block, the subject uses motor imagery to control a cursor in a 2D center-out task. We compare performance for four different algorithms: Kalman filter, a two-layer Deep Neural Network (DNN), a Recurrent Neural Network (RNN) with SimpleRNN unit cell (SimpleRNN), and a RNN with Long-Short-Term Memory (LSTM) unit cell. The decoders achieved Pearson Correlation Coefficients (\u03c1) of 0.48, 0.39, 0.77 and 0.75, respectively, in the Y-coordinate, and 0.24, 0.20, 0.46 and 0.47, respectively, in the X-coordinate.", "date": "2019-03", "date_type": "published", "publisher": "IEEE", "place_of_pub": "Piscataway, NJ", "pagerange": "1138-1141", "id_number": "CaltechAUTHORS:20190523-133828806", "isbn": "9781538679210", "book_title": "2019 9th International IEEE/EMBS Conference on Neural Engineering (NER)", "official_url": "https://resolver.caltech.edu/CaltechAUTHORS:20190523-133828806", "rights": "No commercial reproduction, distribution, display or performance rights in this work are provided.", "funders": { "items": [ { "agency": "Tianqiao and Chrissy Chen Institute for Neuroscience" } ] }, "local_group": { "items": [ { "id": "Tianqiao-and-Chrissy-Chen-Institute-for-Neuroscience" } ] }, "doi": "10.1109/ner.2019.8717137", "pub_year": "2019", "author_list": "Shah, Sahil; Haghi, Benyamin; et el." } ]