This is an automated email from the ASF dual-hosted git repository.
xuanwo pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/opendal.git
The following commit(s) were added to refs/heads/main by this push:
new fae5526bb fix(services/gdrive): include size and modifiedTime in
list() metadata (#7058)
fae5526bb is described below
commit fae5526bb639463101b89f46c6995a9b3726266f
Author: mro68 <[email protected]>
AuthorDate: Thu Dec 18 17:22:28 2025 +0100
fix(services/gdrive): include size and modifiedTime in list() metadata
(#7058)
The Google Drive backend was returning size=0 for all files during
list() operations because:
1. The API fields parameter didn't request 'size' and 'modifiedTime'
2. The lister didn't map these fields to Metadata
This fix:
- Adds size and modifiedTime to the API fields parameter in gdrive_list()
- Maps these fields in lister.rs (matching the stat() implementation in
backend.rs)
This ensures list() returns the same metadata as stat() for consistency.
Fixes #7057
---
core/core/src/services/gdrive/core.rs | 4 ++++
core/core/src/services/gdrive/lister.rs | 16 +++++++++++++++-
2 files changed, 19 insertions(+), 1 deletion(-)
diff --git a/core/core/src/services/gdrive/core.rs
b/core/core/src/services/gdrive/core.rs
index b854969fc..779e24229 100644
--- a/core/core/src/services/gdrive/core.rs
+++ b/core/core/src/services/gdrive/core.rs
@@ -102,6 +102,10 @@ impl GdriveCore {
let mut url = QueryPairsWriter::new(url);
url = url.push("pageSize", &page_size.to_string());
url = url.push("q", &percent_encode_path(&q));
+ url = url.push(
+ "fields",
+ "nextPageToken,files(id,name,mimeType,size,modifiedTime)",
+ );
if !next_page_token.is_empty() {
url = url.push("pageToken", next_page_token);
};
diff --git a/core/core/src/services/gdrive/lister.rs
b/core/core/src/services/gdrive/lister.rs
index 7719f986f..7f6ff31dc 100644
--- a/core/core/src/services/gdrive/lister.rs
+++ b/core/core/src/services/gdrive/lister.rs
@@ -101,7 +101,21 @@ impl oio::PageList for GdriveLister {
self.core.path_cache.insert(&path, &file.id).await;
}
- let entry = oio::Entry::new(&normalized_path,
Metadata::new(file_type));
+ let mut metadata =
Metadata::new(file_type).with_content_type(file.mime_type.clone());
+ if let Some(size) = file.size {
+ metadata =
metadata.with_content_length(size.parse::<u64>().map_err(|e| {
+ Error::new(ErrorKind::Unexpected, "parse content
length").set_source(e)
+ })?);
+ }
+ if let Some(modified_time) = file.modified_time {
+ metadata = metadata.with_last_modified(
+ modified_time.parse::<Timestamp>().map_err(|e| {
+ Error::new(ErrorKind::Unexpected, "parse last modified
time").set_source(e)
+ })?,
+ );
+ }
+
+ let entry = oio::Entry::new(&normalized_path, metadata);
ctx.entries.push_back(entry);
}