|
1 | 1 | //! Module layout: |
2 | 2 | //! - [`files`] : discover and load `.sql` files from disk |
3 | | -//! - [`ast`] : parse SQL into an AST using `sqlparser` |
4 | | -//! - [`comments`] : extract and model comments + spans |
| 3 | +//! - [`ast`] : parse SQL into an AST using [`sqlparser`] |
| 4 | +//! - [`comments`] : extract and model comments and spans |
| 5 | +
|
| 6 | +use core::fmt; |
| 7 | +use std::{ |
| 8 | + error, |
| 9 | + path::{Path, PathBuf}, |
| 10 | +}; |
| 11 | + |
| 12 | +use sqlparser::parser::ParserError; |
| 13 | + |
| 14 | +use crate::{ |
| 15 | + ast::ParsedSqlFileSet, |
| 16 | + comments::{CommentError, Comments}, |
| 17 | + docs::SqlDocs, |
| 18 | + files::SqlFileSet, |
| 19 | +}; |
5 | 20 | pub mod ast; |
6 | 21 | pub mod comments; |
7 | 22 | pub mod docs; |
8 | 23 | pub mod files; |
| 24 | + |
| 25 | +/// Error enum for returning relevant error based on error type |
| 26 | +#[derive(Debug)] |
| 27 | +pub enum DocError { |
| 28 | + /// Wrapper for standard [`std::io::Error`] |
| 29 | + FileReadError(std::io::Error), |
| 30 | + /// Wrapper for [`CommentError`] |
| 31 | + CommentError(CommentError), |
| 32 | + /// Wrapper for [`ParserError`] |
| 33 | + SqlParserError(ParserError), |
| 34 | +} |
| 35 | + |
| 36 | +impl fmt::Display for DocError { |
| 37 | + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
| 38 | + match self { |
| 39 | + Self::FileReadError(error) => write!(f, "file read error:{error}"), |
| 40 | + Self::CommentError(comment_error) => { |
| 41 | + write!(f, "comment parse error: {comment_error}") |
| 42 | + } |
| 43 | + Self::SqlParserError(parser_error) => write!(f, "SQL parse error {parser_error}"), |
| 44 | + } |
| 45 | + } |
| 46 | +} |
| 47 | + |
| 48 | +impl error::Error for DocError { |
| 49 | + fn source(&self) -> Option<&(dyn error::Error + 'static)> { |
| 50 | + match self { |
| 51 | + Self::FileReadError(e) => Some(e), |
| 52 | + Self::CommentError(e) => Some(e), |
| 53 | + Self::SqlParserError(e) => Some(e), |
| 54 | + } |
| 55 | + } |
| 56 | +} |
| 57 | + |
| 58 | +impl From<std::io::Error> for DocError { |
| 59 | + fn from(e: std::io::Error) -> Self { |
| 60 | + Self::FileReadError(e) |
| 61 | + } |
| 62 | +} |
| 63 | + |
| 64 | +impl From<CommentError> for DocError { |
| 65 | + fn from(e: CommentError) -> Self { |
| 66 | + Self::CommentError(e) |
| 67 | + } |
| 68 | +} |
| 69 | + |
| 70 | +impl From<ParserError> for DocError { |
| 71 | + fn from(e: ParserError) -> Self { |
| 72 | + Self::SqlParserError(e) |
| 73 | + } |
| 74 | +} |
| 75 | + |
| 76 | +/// Primary Entry point. Returns a tuple of [`PathBuf`] and [`SqlDocs`]. |
| 77 | +/// |
| 78 | +/// # Parameters: |
| 79 | +/// - `dir`: the [`Path`] to recursively parse `.sql` files. Allows for coercion |
| 80 | +/// from [`String`] |
| 81 | +/// - `deny_list`: a `Vec` of the `.sql` files to ignore (do not need to specify |
| 82 | +/// other file types to ignore). |
| 83 | +/// |
| 84 | +/// # Errors |
| 85 | +/// - Will return a `DocError` that specifies the error (io, comment parsing, |
| 86 | +/// sql parsing) |
| 87 | +pub fn generate_docs_from_dir<P: AsRef<Path>, S: AsRef<str>>( |
| 88 | + dir: P, |
| 89 | + deny_list: &[S], |
| 90 | +) -> Result<Vec<(PathBuf, SqlDocs)>, DocError> { |
| 91 | + // Convert deny list to a `Vec<String>` |
| 92 | + let deny_vec: Vec<String> = deny_list.iter().map(|file| file.as_ref().to_string()).collect(); |
| 93 | + // verify whether deny_list is empty and return correct `Option` |
| 94 | + let deny_option = if deny_vec.is_empty() { None } else { Some(deny_vec) }; |
| 95 | + // Generate the file set from the directory content |
| 96 | + let file_set = SqlFileSet::new(dir.as_ref(), deny_option)?; |
| 97 | + // parse all files sql |
| 98 | + let parsed_files = ParsedSqlFileSet::parse_all(file_set)?; |
| 99 | + let mut sql_docs = Vec::new(); |
| 100 | + // iterate on each file and generate the `SqlDocs` and associate with the `Path` |
| 101 | + for file in parsed_files.files() { |
| 102 | + let comments = Comments::parse_all_comments_from_file(file)?; |
| 103 | + let docs = SqlDocs::from_parsed_file(file, &comments); |
| 104 | + let path = file.file().path().to_path_buf(); |
| 105 | + sql_docs.push((path, docs)); |
| 106 | + } |
| 107 | + Ok(sql_docs) |
| 108 | +} |
| 109 | + |
| 110 | +/// Secondary Entry point. Returns a tuple of [`PathBuf`] and [`SqlDocs`]. |
| 111 | +/// Useful when no deny list is needed |
| 112 | +/// |
| 113 | +/// # Parameters: |
| 114 | +/// - `dir`: the [`Path`] to recursively parse `.sql` files. Allows for coercion |
| 115 | +/// from [`String`] |
| 116 | +/// |
| 117 | +/// # Errors |
| 118 | +/// - Will return a `DocError` that specifies the error (io, comment parsing, |
| 119 | +/// sql parsing) |
| 120 | +pub fn generate_docs_from_dir_no_deny<P: AsRef<Path>>( |
| 121 | + dir: P, |
| 122 | +) -> Result<Vec<(PathBuf, SqlDocs)>, DocError> { |
| 123 | + generate_docs_from_dir::<P, &str>(dir, &[]) |
| 124 | +} |
| 125 | + |
| 126 | +#[cfg(test)] |
| 127 | +#[test] |
| 128 | +fn test_with_deny_list_from_files() { |
| 129 | + let generated_docs = generate_docs_from_dir( |
| 130 | + "sql_files", |
| 131 | + &[ |
| 132 | + "sql_files/without_comments.sql", |
| 133 | + "sql_files/with_single_line_comments.sql", |
| 134 | + "sql_files/with_multiline_comments.sql", |
| 135 | + ], |
| 136 | + ) |
| 137 | + .unwrap(); |
| 138 | + assert_eq!( |
| 139 | + generated_docs.iter().next().unwrap().0, |
| 140 | + PathBuf::from("sql_files/with_mixed_comments.sql") |
| 141 | + ); |
| 142 | + let table_names = vec!["users", "posts"]; |
| 143 | + let table_comments = |
| 144 | + vec!["Users table stores user account information", "Posts table stores blog posts"]; |
| 145 | + for (i, (_, sqldoc)) in generated_docs.iter().enumerate() { |
| 146 | + assert_eq!(sqldoc.tables()[i].name(), table_names[i]); |
| 147 | + assert_eq!(sqldoc.tables()[i].doc().as_ref().unwrap(), table_comments[i]); |
| 148 | + } |
| 149 | + let user_columns = vec!["id", "username", "email", "created_at"]; |
| 150 | + let user_columns_comments = |
| 151 | + vec!["Primary key", "Username for login", "Email address", "When the user registered"]; |
| 152 | + for (i, column) in generated_docs[0].1.tables()[0].columns().iter().enumerate() { |
| 153 | + assert_eq!(column.name(), user_columns[i]); |
| 154 | + assert_eq!(column.doc().as_ref().unwrap(), user_columns_comments[i]); |
| 155 | + } |
| 156 | +} |
| 157 | + |
| 158 | +#[test] |
| 159 | +fn test_with_no_deny_list_from_files() { |
| 160 | + let generated_docs = generate_docs_from_dir_no_deny("sql_files").unwrap(); |
| 161 | + let expected_paths = vec![ |
| 162 | + "sql_files/without_comments.sql", |
| 163 | + "sql_files/with_multiline_comments.sql", |
| 164 | + "sql_files/with_single_line_comments.sql", |
| 165 | + "sql_files/with_mixed_comments.sql", |
| 166 | + ]; |
| 167 | + let table_names = vec!["users", "posts"]; |
| 168 | + let table_comments = |
| 169 | + vec!["Users table stores user account information", "Posts table stores blog posts"]; |
| 170 | + let user_columns = vec!["id", "username", "email", "created_at"]; |
| 171 | + let user_columns_comments = |
| 172 | + vec!["Primary key", "Username for login", "Email address", "When the user registered"]; |
| 173 | + for (i, (buf, sql_docs)) in generated_docs.iter().enumerate() { |
| 174 | + assert_eq!(buf, expected_paths[i]); |
| 175 | + if buf == "sql_files/with_mixed_comments.sql" { |
| 176 | + for (i, table) in sql_docs.tables().iter().enumerate() { |
| 177 | + assert_eq!(table.name(), table_names[i]); |
| 178 | + assert_eq!(table.doc().as_ref().unwrap(), table_comments[i]); |
| 179 | + if table.name() == "users" { |
| 180 | + for (i, column) in table.columns().iter().enumerate() { |
| 181 | + assert_eq!(column.name(), user_columns[i]); |
| 182 | + assert_eq!(column.doc().as_ref().unwrap(), user_columns_comments[i]); |
| 183 | + } |
| 184 | + } |
| 185 | + } |
| 186 | + } |
| 187 | + } |
| 188 | +} |
0 commit comments