@article {10.3844/jcssp.2021.480.489, article_type = {journal}, title = {SQL Generation from Natural Language: A Sequence-to-Sequence Model Powered by the Transformers Architecture and Association Rules}, author = {Mellah, Youssef and Rhouati, Abdelkader and Ettifouri, El Hassane and Bouchentouf, Toumi and Belkasmi, Mohammed Ghaouth}, volume = {17}, number = {5}, year = {2021}, month = {May}, pages = {480-489}, doi = {10.3844/jcssp.2021.480.489}, url = {https://thescipub.com/abstract/jcssp.2021.480.489}, abstract = {Using Natural Language (NL) to interacting with relational databases allows users from any background to easily query and analyze large amounts of data. This requires a system that understands user questions and automatically converts them into structured query language such as SQL. The best performing Text-to-SQL systems use supervised learning (usually formulated as a classification problem) by approaching this task as a sketch-based slot-filling problem, or by first converting questions into an Intermediate Logical Form (ILF) then convert it to the corresponding SQL query. However, non-supervised modeling that directly converts questions to SQL queries has proven more difficult. In this sense, we propose an approach to directly translate NL questions into SQL statements. In this study, we present a Sequence-to-Sequence (Seq2Seq) parsing model for the NL to SQL task, powered by the Transformers Architecture exploring the two Language Models (LM): Text-To-Text Transfer Transformer (T5) and the Multilingual pre-trained Text-To-Text Transformer (mT5). Besides, we adopt the transformation-based learning algorithm to update the aggregation predictions based on association rules. The resulting model achieves a new state-of-the-art on the WikiSQL DataSet, for the weakly supervised SQL generation.}, journal = {Journal of Computer Science}, publisher = {Science Publications} }