opendal/services/onedrive/
writer.rs1use std::sync::Arc;
19
20use bytes::Buf;
21use bytes::Bytes;
22use http::StatusCode;
23
24use super::core::OneDriveCore;
25use super::error::parse_error;
26use super::graph_model::{OneDriveItem, OneDriveUploadSessionCreationResponseBody};
27use crate::raw::*;
28use crate::*;
29
30pub struct OneDriveWriter {
31 core: Arc<OneDriveCore>,
32 op: OpWrite,
33 path: String,
34}
35
36impl OneDriveWriter {
37 const MAX_SIMPLE_SIZE: usize = 4 * 1024 * 1024; const CHUNK_SIZE_FACTOR: usize = 327_680 * 12; pub fn new(core: Arc<OneDriveCore>, op: OpWrite, path: String) -> Self {
44 OneDriveWriter { core, op, path }
45 }
46}
47
48impl oio::OneShotWrite for OneDriveWriter {
52 async fn write_once(&self, bs: Buffer) -> Result<Metadata> {
53 let size = bs.len();
54
55 let meta = if size <= Self::MAX_SIMPLE_SIZE {
56 self.write_simple(bs).await?
57 } else {
58 self.write_chunked(bs).await?
59 };
60
61 Ok(meta)
62 }
63}
64
65impl OneDriveWriter {
66 async fn write_simple(&self, bs: Buffer) -> Result<Metadata> {
67 let response = self
68 .core
69 .onedrive_upload_simple(&self.path, &self.op, bs)
70 .await?;
71
72 match response.status() {
73 StatusCode::CREATED | StatusCode::OK => {
74 let item: OneDriveItem = serde_json::from_reader(response.into_body().reader())
75 .map_err(new_json_deserialize_error)?;
76
77 let mut meta = Metadata::new(EntryMode::FILE)
78 .with_etag(item.e_tag)
79 .with_content_length(item.size.max(0) as u64);
80
81 let last_modified = item.last_modified_date_time;
82 let date_utc_last_modified = parse_datetime_from_rfc3339(&last_modified)?;
83 meta.set_last_modified(date_utc_last_modified);
84
85 Ok(meta)
86 }
87 _ => Err(parse_error(response)),
88 }
89 }
90
91 pub(crate) async fn write_chunked(&self, bs: Buffer) -> Result<Metadata> {
92 let session_response = self.create_upload_session().await?;
98
99 let mut offset = 0;
100 let total_bytes = bs.to_bytes();
101 let total_len = total_bytes.len();
102 let chunks = total_bytes.chunks(OneDriveWriter::CHUNK_SIZE_FACTOR);
103
104 for chunk in chunks {
105 let mut end = offset + OneDriveWriter::CHUNK_SIZE_FACTOR;
106 if end > total_bytes.len() {
107 end = total_bytes.len();
108 }
109 let chunk_end = end - 1;
110
111 let response = self
112 .core
113 .onedrive_chunked_upload(
114 &session_response.upload_url,
115 &self.op,
116 offset,
117 chunk_end,
118 total_len,
119 Buffer::from(Bytes::copy_from_slice(chunk)),
120 )
121 .await?;
122
123 match response.status() {
124 StatusCode::ACCEPTED | StatusCode::OK => {} StatusCode::CREATED => {
128 let item: OneDriveItem = serde_json::from_reader(response.into_body().reader())
130 .map_err(new_json_deserialize_error)?;
131
132 let mut meta = Metadata::new(EntryMode::FILE)
133 .with_etag(item.e_tag)
134 .with_content_length(item.size.max(0) as u64);
135
136 let last_modified = item.last_modified_date_time;
137 let date_utc_last_modified = parse_datetime_from_rfc3339(&last_modified)?;
138 meta.set_last_modified(date_utc_last_modified);
139 return Ok(meta);
140 }
141 _ => return Err(parse_error(response)),
142 }
143
144 offset += OneDriveWriter::CHUNK_SIZE_FACTOR;
145 }
146
147 debug_assert!(false, "should have returned");
148
149 Ok(Metadata::default()) }
151
152 async fn create_upload_session(&self) -> Result<OneDriveUploadSessionCreationResponseBody> {
153 let response = self
154 .core
155 .onedrive_create_upload_session(&self.path, &self.op)
156 .await?;
157 match response.status() {
158 StatusCode::OK => {
159 let bs = response.into_body();
160 let result: OneDriveUploadSessionCreationResponseBody =
161 serde_json::from_reader(bs.reader()).map_err(new_json_deserialize_error)?;
162 Ok(result)
163 }
164 _ => Err(parse_error(response)),
165 }
166 }
167}