mirror of
https://github.com/rustfs/rustfs.git
synced 2026-01-16 17:20:33 +00:00
fix: content-range (#216)
This commit is contained in:
@@ -43,15 +43,16 @@ pub async fn create_bitrot_reader(
|
||||
) -> disk::error::Result<Option<BitrotReader<Box<dyn AsyncRead + Send + Sync + Unpin>>>> {
|
||||
// Calculate the total length to read, including the checksum overhead
|
||||
let length = length.div_ceil(shard_size) * checksum_algo.size() + length;
|
||||
|
||||
let offset = offset.div_ceil(shard_size) * checksum_algo.size() + offset;
|
||||
if let Some(data) = inline_data {
|
||||
// Use inline data
|
||||
let rd = Cursor::new(data.to_vec());
|
||||
let mut rd = Cursor::new(data.to_vec());
|
||||
rd.set_position(offset as u64);
|
||||
let reader = BitrotReader::new(Box::new(rd) as Box<dyn AsyncRead + Send + Sync + Unpin>, shard_size, checksum_algo);
|
||||
Ok(Some(reader))
|
||||
} else if let Some(disk) = disk {
|
||||
// Read from disk
|
||||
match disk.read_file_stream(bucket, path, offset, length).await {
|
||||
match disk.read_file_stream(bucket, path, offset, length - offset).await {
|
||||
Ok(rd) => {
|
||||
let reader = BitrotReader::new(rd, shard_size, checksum_algo);
|
||||
Ok(Some(reader))
|
||||
|
||||
@@ -1611,11 +1611,6 @@ impl DiskAPI for LocalDisk {
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
async fn read_file_stream(&self, volume: &str, path: &str, offset: usize, length: usize) -> Result<FileReader> {
|
||||
// warn!(
|
||||
// "disk read_file_stream: volume: {}, path: {}, offset: {}, length: {}",
|
||||
// volume, path, offset, length
|
||||
// );
|
||||
|
||||
let volume_dir = self.get_bucket_path(volume)?;
|
||||
if !skip_access_checks(volume) {
|
||||
access(&volume_dir)
|
||||
|
||||
@@ -2123,6 +2123,8 @@ impl SetDisks {
|
||||
|
||||
let till_offset = erasure.shard_file_offset(part_offset, part_length, part_size);
|
||||
|
||||
let read_offset = (part_offset / erasure.block_size) * erasure.shard_size();
|
||||
|
||||
let mut readers = Vec::with_capacity(disks.len());
|
||||
let mut errors = Vec::with_capacity(disks.len());
|
||||
for (idx, disk_op) in disks.iter().enumerate() {
|
||||
@@ -2131,7 +2133,7 @@ impl SetDisks {
|
||||
disk_op.as_ref(),
|
||||
bucket,
|
||||
&format!("{}/{}/part.{}", object, files[idx].data_dir.unwrap_or_default(), part_number),
|
||||
part_offset,
|
||||
read_offset,
|
||||
till_offset,
|
||||
erasure.shard_size(),
|
||||
HashAlgorithm::HighwayHash256,
|
||||
|
||||
@@ -201,7 +201,7 @@ impl GetObjectReader {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct HTTPRangeSpec {
|
||||
pub is_suffix_length: bool,
|
||||
pub start: i64,
|
||||
|
||||
@@ -770,18 +770,18 @@ impl S3 for FS {
|
||||
};
|
||||
|
||||
let reader = store
|
||||
.get_object_reader(bucket.as_str(), key.as_str(), rs, h, &opts)
|
||||
.get_object_reader(bucket.as_str(), key.as_str(), rs.clone(), h, &opts)
|
||||
.await
|
||||
.map_err(ApiError::from)?;
|
||||
|
||||
let info = reader.object_info;
|
||||
let event_info = info.clone();
|
||||
let content_type = {
|
||||
if let Some(content_type) = info.content_type {
|
||||
match ContentType::from_str(&content_type) {
|
||||
if let Some(content_type) = &info.content_type {
|
||||
match ContentType::from_str(content_type) {
|
||||
Ok(res) => Some(res),
|
||||
Err(err) => {
|
||||
error!("parse content-type err {} {:?}", &content_type, err);
|
||||
error!("parse content-type err {} {:?}", content_type, err);
|
||||
//
|
||||
None
|
||||
}
|
||||
@@ -797,11 +797,29 @@ impl S3 for FS {
|
||||
info.size as usize,
|
||||
)));
|
||||
|
||||
let mut rs = rs;
|
||||
|
||||
if let Some(part_number) = part_number {
|
||||
if rs.is_none() {
|
||||
rs = HTTPRangeSpec::from_object_info(&info, part_number);
|
||||
}
|
||||
}
|
||||
|
||||
let content_range = if let Some(rs) = rs {
|
||||
let total_size = info.get_actual_size().map_err(ApiError::from)?;
|
||||
let (start, length) = rs.get_offset_length(total_size as i64).map_err(ApiError::from)?;
|
||||
Some(format!("bytes {}-{}/{}", start, start as i64 + length - 1, total_size))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let output = GetObjectOutput {
|
||||
body,
|
||||
content_length: Some(info.size as i64),
|
||||
last_modified,
|
||||
content_type,
|
||||
accept_ranges: Some("bytes".to_string()),
|
||||
content_range,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user