mirror of
https://github.com/rustfs/rustfs.git
synced 2026-01-17 09:40:32 +00:00
@@ -14,7 +14,7 @@ use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use time::OffsetDateTime;
|
||||
use tracing::{error, warn};
|
||||
use tracing::{error, info};
|
||||
|
||||
use crate::config::common::{read_config, save_config};
|
||||
use crate::error::{Error, Result};
|
||||
@@ -358,11 +358,12 @@ pub async fn load_bucket_metadata_parse(api: Arc<ECStore>, bucket: &str, parse:
|
||||
let mut bm = match read_bucket_metadata(api.clone(), bucket).await {
|
||||
Ok(res) => res,
|
||||
Err(err) => {
|
||||
warn!("load_bucket_metadata_parse err {:?}", &err);
|
||||
if !config::error::is_not_found(&err) {
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
info!("bucketmeta {} not found with err {:?}, start to init ", bucket, &err);
|
||||
|
||||
BucketMetadata::new(bucket)
|
||||
}
|
||||
};
|
||||
|
||||
@@ -761,7 +761,7 @@ impl LocalDisk {
|
||||
Ok(res) => res,
|
||||
Err(e) => {
|
||||
if !DiskError::VolumeNotFound.is(&e) && !is_err_file_not_found(&e) {
|
||||
error!("scan list_dir {}, err {:?}", ¤t, &e);
|
||||
warn!("scan list_dir {}, err {:?}", ¤t, &e);
|
||||
}
|
||||
|
||||
if opts.report_notfound && is_err_file_not_found(&e) && current == &opts.base_dir {
|
||||
@@ -1443,7 +1443,7 @@ impl DiskAPI for LocalDisk {
|
||||
check_path_length(file_path.to_string_lossy().to_string().as_str())?;
|
||||
|
||||
// TODO: writeAllDirect io.copy
|
||||
info!("file_path: {:?}", file_path);
|
||||
// info!("file_path: {:?}", file_path);
|
||||
if let Some(parent) = file_path.parent() {
|
||||
os::make_dir_all(parent, &volume_dir).await?;
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ pub mod store_api;
|
||||
pub mod store_err;
|
||||
mod store_init;
|
||||
pub mod store_list_objects;
|
||||
mod store_utils;
|
||||
pub mod utils;
|
||||
pub mod xhttp;
|
||||
|
||||
|
||||
@@ -3957,6 +3957,8 @@ impl StorageAPI for SetDisks {
|
||||
}
|
||||
let (fi, _, _) = self.get_object_fileinfo(bucket, object, opts, false).await?;
|
||||
|
||||
// warn!("get object_info fi {:?}", &fi);
|
||||
|
||||
let oi = fi.to_object_info(bucket, object, opts.versioned || opts.version_suspended);
|
||||
|
||||
Ok(oi)
|
||||
@@ -4018,14 +4020,14 @@ impl StorageAPI for SetDisks {
|
||||
let obj_info = fi.to_object_info(bucket, object, opts.versioned || opts.version_suspended);
|
||||
|
||||
if let Some(ref mut metadata) = fi.metadata {
|
||||
for (k, v) in obj_info.user_defined {
|
||||
for (k, v) in obj_info.user_defined.unwrap_or_default() {
|
||||
metadata.insert(k, v);
|
||||
}
|
||||
fi.metadata = Some(metadata.clone())
|
||||
} else {
|
||||
let mut metadata = HashMap::new();
|
||||
|
||||
for (k, v) in obj_info.user_defined {
|
||||
for (k, v) in obj_info.user_defined.unwrap_or_default() {
|
||||
metadata.insert(k, v);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::heal::heal_ops::HealSequence;
|
||||
use crate::store_utils::clean_metadata;
|
||||
use crate::{
|
||||
disk::DiskStore,
|
||||
error::{Error, Result},
|
||||
@@ -171,6 +172,7 @@ impl FileInfo {
|
||||
version_id = Some(Uuid::nil())
|
||||
}
|
||||
|
||||
// etag
|
||||
let (content_type, content_encoding, etag) = {
|
||||
if let Some(ref meta) = self.metadata {
|
||||
let content_type = meta.get("content-type").cloned();
|
||||
@@ -182,6 +184,7 @@ impl FileInfo {
|
||||
(None, None, None)
|
||||
}
|
||||
};
|
||||
// tags
|
||||
let user_tags = self
|
||||
.metadata
|
||||
.as_ref()
|
||||
@@ -196,6 +199,15 @@ impl FileInfo {
|
||||
|
||||
let inlined = self.inline_data();
|
||||
|
||||
// TODO:expires
|
||||
// TODO:ReplicationState
|
||||
// TODO:TransitionedObject
|
||||
|
||||
let metadata = self.metadata.clone().map(|mut v| {
|
||||
clean_metadata(&mut v);
|
||||
v
|
||||
});
|
||||
|
||||
ObjectInfo {
|
||||
bucket: bucket.to_string(),
|
||||
name,
|
||||
@@ -215,6 +227,7 @@ impl FileInfo {
|
||||
successor_mod_time: self.successor_mod_time,
|
||||
etag,
|
||||
inlined,
|
||||
user_defined: metadata,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
@@ -627,7 +640,7 @@ pub struct ObjectInfo {
|
||||
// Actual size is the real size of the object uploaded by client.
|
||||
pub actual_size: Option<usize>,
|
||||
pub is_dir: bool,
|
||||
pub user_defined: HashMap<String, String>,
|
||||
pub user_defined: Option<HashMap<String, String>>,
|
||||
pub parity_blocks: usize,
|
||||
pub data_blocks: usize,
|
||||
pub version_id: Option<Uuid>,
|
||||
@@ -646,8 +659,11 @@ pub struct ObjectInfo {
|
||||
|
||||
impl ObjectInfo {
|
||||
pub fn is_compressed(&self) -> bool {
|
||||
self.user_defined
|
||||
.contains_key(&format!("{}compression", RESERVED_METADATA_PREFIX))
|
||||
if let Some(meta) = &self.user_defined {
|
||||
meta.contains_key(&format!("{}compression", RESERVED_METADATA_PREFIX))
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_actual_size(&self) -> Result<usize> {
|
||||
@@ -656,13 +672,16 @@ impl ObjectInfo {
|
||||
}
|
||||
|
||||
if self.is_compressed() {
|
||||
if let Some(size_str) = self.user_defined.get(&format!("{}actual-size", RESERVED_METADATA_PREFIX)) {
|
||||
if !size_str.is_empty() {
|
||||
// Todo: deal with error
|
||||
let size = size_str.parse::<usize>()?;
|
||||
return Ok(size);
|
||||
if let Some(meta) = &self.user_defined {
|
||||
if let Some(size_str) = meta.get(&format!("{}actual-size", RESERVED_METADATA_PREFIX)) {
|
||||
if !size_str.is_empty() {
|
||||
// Todo: deal with error
|
||||
let size = size_str.parse::<usize>()?;
|
||||
return Ok(size);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut actual_size = 0;
|
||||
self.parts.iter().for_each(|part| {
|
||||
actual_size += part.actual_size;
|
||||
|
||||
@@ -245,6 +245,8 @@ impl ECStore {
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
// warn!("list_objects_generic opts {:?}", &opts);
|
||||
|
||||
// use get
|
||||
if !opts.prefix.is_empty() && opts.limit == 1 && opts.marker.is_none() {
|
||||
match self
|
||||
|
||||
@@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::heal_commands::HealResultItem;
|
||||
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, Default)]
|
||||
pub struct TraceType(u64);
|
||||
|
||||
impl TraceType {
|
||||
@@ -34,12 +34,6 @@ impl TraceType {
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for TraceType {
|
||||
fn default() -> Self {
|
||||
Self(0)
|
||||
}
|
||||
}
|
||||
|
||||
impl TraceType {
|
||||
pub fn contains(&self, x: &TraceType) -> bool {
|
||||
(self.0 & x.0) == x.0
|
||||
@@ -54,12 +48,12 @@ impl TraceType {
|
||||
}
|
||||
|
||||
pub fn merge(&mut self, other: &TraceType) {
|
||||
self.0 = self.0 | other.0
|
||||
self.0 |= other.0
|
||||
}
|
||||
|
||||
pub fn set_if(&mut self, b: bool, other: &TraceType) {
|
||||
if b {
|
||||
self.0 = self.0 | other.0
|
||||
self.0 |= other.0
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -52,6 +52,7 @@ use transform_stream::AsyncTryStream;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::storage::error::to_s3_error;
|
||||
use crate::storage::options::extract_metadata_from_mime;
|
||||
|
||||
macro_rules! try_ {
|
||||
($result:expr) => {
|
||||
@@ -411,7 +412,7 @@ impl S3 for FS {
|
||||
};
|
||||
let last_modified = info.mod_time.map(Timestamp::from);
|
||||
|
||||
let metadata = Some(info.user_defined);
|
||||
let metadata = info.user_defined;
|
||||
|
||||
let output = HeadObjectOutput {
|
||||
content_length: Some(try_!(i64::try_from(info.size))),
|
||||
@@ -642,6 +643,7 @@ impl S3 for FS {
|
||||
key,
|
||||
content_length,
|
||||
tagging,
|
||||
metadata,
|
||||
..
|
||||
} = input;
|
||||
|
||||
@@ -667,7 +669,10 @@ impl S3 for FS {
|
||||
return Err(S3Error::with_message(S3ErrorCode::InternalError, "Not init".to_string()));
|
||||
};
|
||||
|
||||
let mut metadata = extract_metadata(&req.headers);
|
||||
let mut metadata = metadata.unwrap_or_default();
|
||||
|
||||
extract_metadata_from_mime(&req.headers, &mut metadata);
|
||||
|
||||
if let Some(tags) = tagging {
|
||||
metadata.insert(xhttp::AMZ_OBJECT_TAGGING.to_owned(), tags);
|
||||
}
|
||||
|
||||
@@ -130,15 +130,19 @@ pub fn extract_metadata(headers: &HeaderMap<HeaderValue>) -> HashMap<String, Str
|
||||
metadata
|
||||
}
|
||||
|
||||
fn extract_metadata_from_mime(headers: &HeaderMap<HeaderValue>, metadata: &mut HashMap<String, String>) {
|
||||
pub fn extract_metadata_from_mime(headers: &HeaderMap<HeaderValue>, metadata: &mut HashMap<String, String>) {
|
||||
for (k, v) in headers.iter() {
|
||||
if k.as_str().starts_with("x-amz-meta-") {
|
||||
metadata.insert(k.to_string(), String::from_utf8_lossy(v.as_bytes()).to_string());
|
||||
if let Some(key) = k.as_str().strip_prefix("x-amz-meta-") {
|
||||
if key.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
metadata.insert(key.to_owned(), String::from_utf8_lossy(v.as_bytes()).to_string());
|
||||
continue;
|
||||
}
|
||||
|
||||
if k.as_str().starts_with("x-rustfs-meta-") {
|
||||
metadata.insert(k.to_string(), String::from_utf8_lossy(v.as_bytes()).to_string());
|
||||
if let Some(key) = k.as_str().strip_prefix("x-rustfs-meta-") {
|
||||
metadata.insert(key.to_owned(), String::from_utf8_lossy(v.as_bytes()).to_string());
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user