Jan Gosmann and Chris Eliasmith. Vector-derived transformation binding: an improved binding operation for deep symbol-like processing in neural networks. Neural Computation, 31(5):849-869, 05 2019. URL: https://www.mitpressjournals.org/doi/abs/10.1162/neco_a_01179, doi:10.1162/neco_a_01179.
@article{gosmann2019,
title = {Vector-Derived Transformation Binding: An Improved Binding Operation for Deep Symbol-Like Processing in Neural Networks},
abstract = {We present a new binding operation, vector-derived transformation binding (VTB), for use in vector symbolic architectures (VSA). The performance of VTB is compared to circular convolution, used in holographic reduced representations (HRRs), in terms of list and stack encoding capacity. A special focus is given to the possibility of a neural implementation by the means of the Neural Engineering Framework (NEF). While the scaling of required neural resources is slightly worse for VTB, it is found to be on par with circular convolution for list encoding and better for encoding of stacks. Furthermore, VTB influences the vector length less, which also benefits a neural implementation. Consequently, we argue that VTB is an improvement over HRRs for neurally implemented VSAs.},
author = {Jan Gosmann and Chris Eliasmith},
url = {https://www.mitpressjournals.org/doi/abs/10.1162/neco_a_01179},
doi = {10.1162/neco_a_01179},
journal = {Neural Computation},
year = {2019},
month = {05},
volume = {31},
number = {5},
pages = {849-869},
publisher = {MIT Press},
pdf = "http://compneuro.uwaterloo.ca/files/publications/gosmann.2019b.pdf"
}