@article {10.3844/jcssp.2021.178.187, article_type = {journal}, title = {Automatic Piano Sheet Music Transcription with Machine Learning}, author = {Saputra, Fernandes and Namyu, Un Greffin and Vincent, and Suhartono, Derwin and Gema, Aryo Pradipta}, volume = {17}, number = {3}, year = {2021}, month = {Mar}, pages = {178-187}, doi = {10.3844/jcssp.2021.178.187}, url = {https://thescipub.com/abstract/jcssp.2021.178.187}, abstract = {Automatic Music Transcription (AMT) is becoming more and more popular throughout the day, it has piqued the interest of many in addition to academic research. A successful AMT system would be able to bridge multiple ranges of interactions between people and music, including music education. The goal of this research is to transcribe an audio input to music notation. Research methods were conducted by training multiple neural networks architectures in different kinds of cases. The evaluation used two approaches, those were objective evaluation and subjective evaluation. The result of this research was an achievement of 74.80% F1 score and 73.3% out of 30 respondents claimed that Bidirectional Long Short-Term Memory (BiLSTM) has the best result. It could be concluded that BiLSTM is the best architecture suited for automatic music transcription.}, journal = {Journal of Computer Science}, publisher = {Science Publications} }