Skip to content

Commit 2a93a87

Browse files
committed
completed lib mod with unit tests. Fixes #5
1 parent ffa7d0c commit 2a93a87

File tree

3 files changed

+186
-6
lines changed

3 files changed

+186
-6
lines changed

src/comments.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,7 @@ impl CommentWithSpan {
196196
///
197197
/// # Parameters
198198
/// - the comment as a [`String`]
199-
/// - the span of the comment as a [`CommentSpan`]
199+
/// - the span of the comment as a [`Span`]
200200
#[must_use]
201201
pub const fn new(comment: Comment, span: Span) -> Self {
202202
Self { comment, span }
@@ -208,7 +208,7 @@ impl CommentWithSpan {
208208
&self.comment
209209
}
210210

211-
/// Getter method for retrieving the [`CommentSpan`] of the comment
211+
/// Getter method for retrieving the [`Span`] of the comment
212212
#[must_use]
213213
pub const fn span(&self) -> &Span {
214214
&self.span
@@ -249,8 +249,9 @@ impl Comments {
249249
/// - `file`: the [`ParsedSqlFile`] that needs to be parsed for comments
250250
///
251251
/// # Errors
252-
/// - Will return [`CommentError::UnmatchedBlockCommentStart`] if a comment
252+
/// - Will return [`CommentError::UnmatchedMultilineCommentStart`] if a comment
253253
/// does not have an opening `/*`
254+
/// - Will return [`CommentError::UnterminatedMultiLineComment`] if a multiline comment doesn't end before `EOF`
254255
pub fn parse_all_comments_from_file(file: &ParsedSqlFile) -> CommentResult<Self> {
255256
let src = file.content();
256257
let comments = Self::scan_comments(src)?;

src/docs.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,6 @@ impl SqlDocs {
100100
#[must_use]
101101
pub fn from_parsed_file(file: &ParsedSqlFile, comments: &Comments) -> Self {
102102
let mut tables = Vec::new();
103-
dbg!(comments);
104103
for statement in file.statements() {
105104
#[allow(clippy::single_match)]
106105
match statement {

src/lib.rs

Lines changed: 182 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,188 @@
11
//! Module layout:
22
//! - [`files`] : discover and load `.sql` files from disk
3-
//! - [`ast`] : parse SQL into an AST using `sqlparser`
4-
//! - [`comments`] : extract and model comments + spans
3+
//! - [`ast`] : parse SQL into an AST using [`sqlparser`]
4+
//! - [`comments`] : extract and model comments and spans
5+
6+
use core::fmt;
7+
use std::{
8+
error,
9+
path::{Path, PathBuf},
10+
};
11+
12+
use sqlparser::parser::ParserError;
13+
14+
use crate::{
15+
ast::ParsedSqlFileSet,
16+
comments::{CommentError, Comments},
17+
docs::SqlDocs,
18+
files::SqlFileSet,
19+
};
520
pub mod ast;
621
pub mod comments;
722
pub mod docs;
823
pub mod files;
24+
25+
/// Error enum for returning relevant error based on error type
26+
#[derive(Debug)]
27+
pub enum DocError {
28+
/// Wrapper for standard [`std::io::Error`]
29+
FileReadError(std::io::Error),
30+
/// Wrapper for [`CommentError`]
31+
CommentError(CommentError),
32+
/// Wrapper for [`ParserError`]
33+
SqlParserError(ParserError),
34+
}
35+
36+
impl fmt::Display for DocError {
37+
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
38+
match self {
39+
Self::FileReadError(error) => write!(f, "file read error:{error}"),
40+
Self::CommentError(comment_error) => {
41+
write!(f, "comment parse error: {comment_error}")
42+
}
43+
Self::SqlParserError(parser_error) => write!(f, "SQL parse error {parser_error}"),
44+
}
45+
}
46+
}
47+
48+
impl error::Error for DocError {
49+
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
50+
match self {
51+
Self::FileReadError(e) => Some(e),
52+
Self::CommentError(e) => Some(e),
53+
Self::SqlParserError(e) => Some(e),
54+
}
55+
}
56+
}
57+
58+
impl From<std::io::Error> for DocError {
59+
fn from(e: std::io::Error) -> Self {
60+
Self::FileReadError(e)
61+
}
62+
}
63+
64+
impl From<CommentError> for DocError {
65+
fn from(e: CommentError) -> Self {
66+
Self::CommentError(e)
67+
}
68+
}
69+
70+
impl From<ParserError> for DocError {
71+
fn from(e: ParserError) -> Self {
72+
Self::SqlParserError(e)
73+
}
74+
}
75+
76+
/// Primary Entry point. Returns a tuple of [`PathBuf`] and [`SqlDocs`].
77+
///
78+
/// # Parameters:
79+
/// - `dir`: the [`Path`] to recursively parse `.sql` files. Allows for coercion
80+
/// from [`String`]
81+
/// - `deny_list`: a `Vec` of the `.sql` files to ignore (do not need to specify
82+
/// other file types to ignore).
83+
///
84+
/// # Errors
85+
/// - Will return a `DocError` that specifies the error (io, comment parsing,
86+
/// sql parsing)
87+
pub fn generate_docs_from_dir<P: AsRef<Path>, S: AsRef<str>>(
88+
dir: P,
89+
deny_list: &[S],
90+
) -> Result<Vec<(PathBuf, SqlDocs)>, DocError> {
91+
// Convert deny list to a `Vec<String>`
92+
let deny_vec: Vec<String> = deny_list.iter().map(|file| file.as_ref().to_string()).collect();
93+
// verify whether deny_list is empty and return correct `Option`
94+
let deny_option = if deny_vec.is_empty() { None } else { Some(deny_vec) };
95+
// Generate the file set from the directory content
96+
let file_set = SqlFileSet::new(dir.as_ref(), deny_option)?;
97+
// parse all files sql
98+
let parsed_files = ParsedSqlFileSet::parse_all(file_set)?;
99+
let mut sql_docs = Vec::new();
100+
// iterate on each file and generate the `SqlDocs` and associate with the `Path`
101+
for file in parsed_files.files() {
102+
let comments = Comments::parse_all_comments_from_file(file)?;
103+
let docs = SqlDocs::from_parsed_file(file, &comments);
104+
let path = file.file().path().to_path_buf();
105+
sql_docs.push((path, docs));
106+
}
107+
Ok(sql_docs)
108+
}
109+
110+
/// Secondary Entry point. Returns a tuple of [`PathBuf`] and [`SqlDocs`].
111+
/// Useful when no deny list is needed
112+
///
113+
/// # Parameters:
114+
/// - `dir`: the [`Path`] to recursively parse `.sql` files. Allows for coercion
115+
/// from [`String`]
116+
///
117+
/// # Errors
118+
/// - Will return a `DocError` that specifies the error (io, comment parsing,
119+
/// sql parsing)
120+
pub fn generate_docs_from_dir_no_deny<P: AsRef<Path>>(
121+
dir: P,
122+
) -> Result<Vec<(PathBuf, SqlDocs)>, DocError> {
123+
generate_docs_from_dir::<P, &str>(dir, &[])
124+
}
125+
126+
#[cfg(test)]
127+
#[test]
128+
fn test_with_deny_list_from_files() {
129+
let generated_docs = generate_docs_from_dir(
130+
"sql_files",
131+
&[
132+
"sql_files/without_comments.sql",
133+
"sql_files/with_single_line_comments.sql",
134+
"sql_files/with_multiline_comments.sql",
135+
],
136+
)
137+
.unwrap();
138+
assert_eq!(
139+
generated_docs.iter().next().unwrap().0,
140+
PathBuf::from("sql_files/with_mixed_comments.sql")
141+
);
142+
let table_names = vec!["users", "posts"];
143+
let table_comments =
144+
vec!["Users table stores user account information", "Posts table stores blog posts"];
145+
for (i, (_, sqldoc)) in generated_docs.iter().enumerate() {
146+
assert_eq!(sqldoc.tables()[i].name(), table_names[i]);
147+
assert_eq!(sqldoc.tables()[i].doc().as_ref().unwrap(), table_comments[i]);
148+
}
149+
let user_columns = vec!["id", "username", "email", "created_at"];
150+
let user_columns_comments =
151+
vec!["Primary key", "Username for login", "Email address", "When the user registered"];
152+
for (i, column) in generated_docs[0].1.tables()[0].columns().iter().enumerate() {
153+
assert_eq!(column.name(), user_columns[i]);
154+
assert_eq!(column.doc().as_ref().unwrap(), user_columns_comments[i]);
155+
}
156+
}
157+
158+
#[test]
159+
fn test_with_no_deny_list_from_files() {
160+
let generated_docs = generate_docs_from_dir_no_deny("sql_files").unwrap();
161+
let expected_paths = vec![
162+
"sql_files/without_comments.sql",
163+
"sql_files/with_multiline_comments.sql",
164+
"sql_files/with_single_line_comments.sql",
165+
"sql_files/with_mixed_comments.sql",
166+
];
167+
let table_names = vec!["users", "posts"];
168+
let table_comments =
169+
vec!["Users table stores user account information", "Posts table stores blog posts"];
170+
let user_columns = vec!["id", "username", "email", "created_at"];
171+
let user_columns_comments =
172+
vec!["Primary key", "Username for login", "Email address", "When the user registered"];
173+
for (i, (buf, sql_docs)) in generated_docs.iter().enumerate() {
174+
assert_eq!(buf, expected_paths[i]);
175+
if buf == "sql_files/with_mixed_comments.sql" {
176+
for (i, table) in sql_docs.tables().iter().enumerate() {
177+
assert_eq!(table.name(), table_names[i]);
178+
assert_eq!(table.doc().as_ref().unwrap(), table_comments[i]);
179+
if table.name() == "users" {
180+
for (i, column) in table.columns().iter().enumerate() {
181+
assert_eq!(column.name(), user_columns[i]);
182+
assert_eq!(column.doc().as_ref().unwrap(), user_columns_comments[i]);
183+
}
184+
}
185+
}
186+
}
187+
}
188+
}

0 commit comments

Comments
 (0)