diff --git a/src/api2/backup.rs b/src/api2/backup.rs index 0d580b44..939ecf6d 100644 --- a/src/api2/backup.rs +++ b/src/api2/backup.rs @@ -154,8 +154,8 @@ pub fn backup_api() -> Router { let router = Router::new() .subdir( - "config", Router::new() - .upload(api_method_upload_config()) + "blob", Router::new() + .upload(api_method_upload_blob()) ) .subdir( "dynamic_chunk", Router::new() diff --git a/src/api2/backup/upload_chunk.rs b/src/api2/backup/upload_chunk.rs index ce8fcdce..dfc1126d 100644 --- a/src/api2/backup/upload_chunk.rs +++ b/src/api2/backup/upload_chunk.rs @@ -86,8 +86,8 @@ pub fn api_method_upload_fixed_chunk() -> ApiAsyncMethod { .maximum(1024*1024*16) ) .required("encoded-size", IntegerSchema::new("Encoded chunk size.") - .minimum(9) - // fixme: .maximum(1024*1024*16+40) + .minimum((std::mem::size_of::() as isize)+1) + .maximum(1024*1024*16+(std::mem::size_of::() as isize)) ) ) } @@ -142,8 +142,8 @@ pub fn api_method_upload_dynamic_chunk() -> ApiAsyncMethod { .maximum(1024*1024*16) ) .required("encoded-size", IntegerSchema::new("Encoded chunk size.") - .minimum(9) - // fixme: .maximum(1024*1024*16+40) + .minimum((std::mem::size_of::() as isize) +1) + .maximum(1024*1024*16+(std::mem::size_of::() as isize)) ) ) } @@ -222,19 +222,19 @@ fn upload_speedtest( Ok(Box::new(resp)) } -pub fn api_method_upload_config() -> ApiAsyncMethod { +pub fn api_method_upload_blob() -> ApiAsyncMethod { ApiAsyncMethod::new( - upload_config, - ObjectSchema::new("Upload configuration file.") + upload_blob, + ObjectSchema::new("Upload binary blob file.") .required("file-name", crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA.clone()) - .required("size", IntegerSchema::new("File size.") - .minimum(1) - .maximum(1024*1024*16) + .required("encoded-size", IntegerSchema::new("Encoded blob size.") + .minimum((std::mem::size_of::() as isize) +1) + .maximum(1024*1024*16+(std::mem::size_of::() as isize)) ) ) } -fn upload_config( +fn upload_blob( _parts: Parts, req_body: Body, param: Value, @@ -243,13 +243,9 @@ fn upload_config( ) -> Result { let mut file_name = tools::required_string_param(¶m, "file-name")?.to_owned(); - let size = tools::required_integer_param(¶m, "size")? as usize; + let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as usize; - if !file_name.ends_with(".conf") { - bail!("wrong config file extension: '{}'", file_name); - } else { - file_name.push_str(".zstd"); - } + file_name.push_str(".blob"); let env: &BackupEnvironment = rpcenv.as_ref(); @@ -262,17 +258,24 @@ fn upload_config( let resp = req_body .map_err(Error::from) - .concat2() + .fold(Vec::new(), |mut acc, chunk| { + acc.extend_from_slice(&*chunk); + Ok::<_, Error>(acc) + }) .and_then(move |data| { - if size != data.len() { - bail!("got configuration file with unexpected length ({} != {})", size, data.len()); + if encoded_size != data.len() { + bail!("got blob with unexpected length ({} != {})", encoded_size, data.len()); } - let data = zstd::block::compress(&data, 0)?; + let orig_len = data.len(); // fixme: - tools::file_set_contents(&path, &data, None)?; + let mut blob = DataBlob::from_raw(data)?; + // always comput CRC at server side + blob.set_crc(blob.compute_crc()); - env2.debug(format!("upload config {:?} ({} bytes, comp: {})", path, size, data.len())); + tools::file_set_contents(&path, blob.raw_data(), None)?; + + env2.debug(format!("upload blob {:?} ({} bytes, comp: {})", path, orig_len, encoded_size)); Ok(()) }) diff --git a/src/backup/data_blob.rs b/src/backup/data_blob.rs index 8f56dce4..10d69b08 100644 --- a/src/backup/data_blob.rs +++ b/src/backup/data_blob.rs @@ -24,6 +24,11 @@ impl DataBlob { &self.raw_data } + /// Consume self and returns raw_data + pub fn into_inner(self) -> Vec { + self.raw_data + } + /// accessor to chunk type (magic number) pub fn magic(&self) -> &[u8; 8] { self.raw_data[0..8].try_into().unwrap() @@ -42,7 +47,7 @@ impl DataBlob { } /// compute the CRC32 checksum - pub fn compute_crc(&mut self) -> u32 { + pub fn compute_crc(&self) -> u32 { let mut hasher = crc32fast::Hasher::new(); let start = std::mem::size_of::(); // start after HEAD hasher.update(&self.raw_data[start..]); diff --git a/src/bin/proxmox-backup-client.rs b/src/bin/proxmox-backup-client.rs index f2109ce9..f4386e6f 100644 --- a/src/bin/proxmox-backup-client.rs +++ b/src/bin/proxmox-backup-client.rs @@ -482,7 +482,7 @@ fn create_backup( match backup_type { BackupType::CONFIG => { println!("Upload config file '{}' to '{:?}' as {}", filename, repo, target); - client.upload_config(&filename, &target).wait()?; + client.upload_blob(&filename, &target, crypt_config.clone(), true).wait()?; } BackupType::PXAR => { println!("Upload directory '{}' to '{:?}' as {}", filename, repo, target); diff --git a/src/client/http_client.rs b/src/client/http_client.rs index c366ceef..c3006d74 100644 --- a/src/client/http_client.rs +++ b/src/client/http_client.rs @@ -452,10 +452,12 @@ impl BackupClient { self.canceller.take().unwrap().cancel(); } - pub fn upload_config>( + pub fn upload_blob>( &self, src_path: P, file_name: &str, + crypt_config: Option>, + compress: bool, ) -> impl Future { let h2 = self.h2.clone(); @@ -464,13 +466,22 @@ impl BackupClient { let task = tokio::fs::File::open(src_path.clone()) .map_err(move |err| format_err!("unable to open file {:?} - {}", src_path, err)) - .and_then(|file| { + .and_then(move |file| { let contents = vec![]; tokio::io::read_to_end(file, contents) .map_err(Error::from) .and_then(move |(_, contents)| { - let param = json!({"size": contents.len(), "file-name": file_name }); - h2.upload("config", Some(param), contents) + let blob = if let Some(ref crypt_config) = crypt_config { + DataBlob::encode(&contents, Some(crypt_config), compress)? + } else { + DataBlob::encode(&contents, None, compress)? + }; + let raw_data = blob.into_inner(); + Ok(raw_data) + }) + .and_then(move |raw_data| { + let param = json!({"encoded-size": raw_data.len(), "file-name": file_name }); + h2.upload("blob", Some(param), raw_data) .map(|_| {}) }) });