Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions docs/docs/sources/googledrive.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,16 @@ The spec takes the following fields:
* `service_account_credential_path` (`str`): full path to the service account credential file in JSON format.
* `root_folder_ids` (`list[str]`): a list of Google Drive folder IDs to import files from.
* `binary` (`bool`, optional): whether reading files as binary (instead of text).
* `included_patterns` (`list[str]`, optional): a list of glob patterns to include files, e.g. `["*.txt", "docs/**/*.md"]`.
If not specified, all files will be included.
* `excluded_patterns` (`list[str]`, optional): a list of glob patterns to exclude files, e.g. `["tmp", "**/node_modules"]`.
Any file or directory matching these patterns will be excluded even if they match `included_patterns`.
If not specified, no files will be excluded.
* `recent_changes_poll_interval` (`datetime.timedelta`, optional): when set, this source provides a change capture mechanism by polling Google Drive for recent modified files periodically.

:::info

`included_patterns` and `excluded_patterns` are using Unix-style glob syntax. See [globset syntax](https://docs.rs/globset/latest/globset/index.html#syntax) for the details.
* `recent_changes_poll_interval` (`datetime.timedelta`, optional): when set, this source provides a change capture mechanism by polling Google Drive for recent modified files periodically.

:::info
Expand Down
22 changes: 18 additions & 4 deletions src/ops/sources/google_drive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use hyper_rustls::HttpsConnector;
use hyper_util::client::legacy::connect::HttpConnector;
use phf::phf_map;

use super::shared::pattern_matcher::PatternMatcher;
use crate::base::field_attrs;
use crate::ops::sdk::*;

Expand Down Expand Up @@ -59,13 +60,16 @@ pub struct Spec {
binary: bool,
root_folder_ids: Vec<String>,
recent_changes_poll_interval: Option<std::time::Duration>,
included_patterns: Option<Vec<String>>,
excluded_patterns: Option<Vec<String>>,
}

struct Executor {
drive_hub: DriveHub<HttpsConnector<HttpConnector>>,
binary: bool,
root_folder_ids: IndexSet<Arc<str>>,
recent_updates_poll_interval: Option<std::time::Duration>,
pattern_matcher: PatternMatcher,
}

impl Executor {
Expand All @@ -87,11 +91,13 @@ impl Executor {
.build(),
);
let drive_hub = DriveHub::new(client, auth);
let pattern_matcher = PatternMatcher::new(spec.included_patterns, spec.excluded_patterns)?;
Ok(Self {
drive_hub,
binary: spec.binary,
root_folder_ids: spec.root_folder_ids.into_iter().map(Arc::from).collect(),
recent_updates_poll_interval: spec.recent_changes_poll_interval,
pattern_matcher,
})
}
}
Expand Down Expand Up @@ -119,16 +125,24 @@ impl Executor {
if file.trashed == Some(true) {
return Ok(None);
}
let (id, mime_type) = match (file.id, file.mime_type) {
(Some(id), Some(mime_type)) => (Arc::<str>::from(id), mime_type),
(id, mime_type) => {
warn!("Skipping file with incomplete metadata: id={id:?}, mime_type={mime_type:?}",);
let (id, mime_type, name) = match (file.id, file.mime_type, file.name) {
(Some(id), Some(mime_type), Some(name)) => (Arc::<str>::from(id), mime_type, name),
(id, mime_type, name) => {
warn!(
"Skipping file with incomplete metadata: id={id:?}, mime_type={mime_type:?}, name={name:?}"
);
return Ok(None);
}
};
if !seen_ids.insert(id.clone()) {
return Ok(None);
}
if self.pattern_matcher.is_file_included(&name) {
return Ok(None);
}
if !self.pattern_matcher.is_excluded(&name) {
return Ok(None);
}
let result = if mime_type == FOLDER_MIME_TYPE {
new_folder_ids.push(id);
None
Expand Down
Loading