@article{TEXTUAL,
      recid = {12683},
      author = {Levental, Maksim and Khan, Arham and Chard, Ryan and  Yoshii, Kazutomo and Chard, Kyle and Foster, Ian},
      title = {BraggHLS: High-Level Synthesis for Low-Latency Deep Neural  Networks for Experimental Science},
      journal = {Proceedings of the International Symposium on Highly  Efficient Accelerators and Reconfigurable Technologies},
      address = {2024-06-19},
      number = {TEXTUAL},
      abstract = {In many experiment-driven scientific domains, such as  high-energy physics, material science, and cosmology, high  data rate experiments impose hard constraints on data  acquisition systems: collected data must either be  indiscriminately stored for post-processing and analysis,  thereby necessitating large storage capacity, or accurately  filtered in real-time, thereby necessitating low-latency  processing. Deep neural networks, effective in other  filtering tasks, have not been widely employed in such data  acquisition systems, due to design and deployment  difficulties. We present an open source, lightweight,  compiler framework, without any proprietary dependencies,  BraggHLS, based on high-level synthesis techniques, for  translating high-level representations of deep neural  networks to low-level representations, suitable for  deployment to near-sensor devices such as  field-programmable gate arrays. We evaluate BraggHLS on  various workloads and present a case-study implementation  of a deep neural network for Bragg peak detection in the  context of high-energy diffraction microscopy. We show  BraggHLS is able to produce an implementation of the  network with a throughput of 4.8 µs/sample, which is  approximately a 4 × improvement over the existing  implementation.},
      url = {http://knowledge.uchicago.edu/record/12683},
}