/memory-editable-transformer

My explorations into editing the knowledge and memories of an attention network

MIT LicenseMIT

My explorations into editing the knowledge and memories of an attention network.

Citations

@article{meng2022memit,
  title   = {Mass Editing Memory in a Transformer},
  author  = {Kevin Meng and Sen Sharma, Arnab and Alex Andonian and Yonatan Belinkov and David Bau},
  journal = {arXiv preprint arXiv:2210.07229},
  year    = {2022}
}
@inproceedings{Burns2022DiscoveringLK,
  title  = {Discovering Latent Knowledge in Language Models Without Supervision},
  author = {Collin Burns and Hao-Tong Ye and Dan Klein and Jacob Steinhardt},
  year   = {2022}
}