Skip to content

Commit

Permalink
Issue #264 Merge remote-tracking branch 'origin/264-backup-service-su…
Browse files Browse the repository at this point in the history
…pports-conditional-filtering-of-key-data' into main
  • Loading branch information
lurenpluto committed May 13, 2023
2 parents e53ba76 + 1e17b0e commit 89110cc
Show file tree
Hide file tree
Showing 12 changed files with 136 additions and 38 deletions.
1 change: 1 addition & 0 deletions src/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions src/component/cyfs-backup-lib/src/backup/uni_backup_task.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,7 @@ pub struct UniBackupParams {
pub password: Option<ProtectedPassword>,

pub target_file: LocalFileBackupParam,

// Key data filters in glob format
pub key_data_filters: Vec<String>,
}
1 change: 1 addition & 0 deletions src/component/cyfs-backup/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ tide = "0.16"
http-types = "2.12"
surf = { version = '2.3', default-features = false, features = ['h1-client-rustls'] }
futures = "0.3"
globset = '0.4'

[dev-dependencies]
rand = "0.8"
4 changes: 2 additions & 2 deletions src/component/cyfs-backup/src/backup/uni_backup_task.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ impl UniBackupTask {
let uni_stat = UniBackupStat::new(self.noc.clone(), self.ndc.clone());
let uni_stat = uni_stat.stat().await?;

let keydata = KeyDataManager::new_uni(&params.isolate);
let keydata = KeyDataManager::new_uni(&params.isolate, &params.key_data_filters)?;
let keydata_stat = KeyDataBackupStat::new(keydata);
let keydata_stat = keydata_stat.stat();

Expand Down Expand Up @@ -225,7 +225,7 @@ impl UniBackupTask {
}

let keydata_meta = {
let keydata = KeyDataManager::new_uni(&params.isolate);
let keydata = KeyDataManager::new_uni(&params.isolate, &params.key_data_filters)?;
let keydata_backup = KeyDataBackupManager::new(keydata, data_writer);

keydata_backup.run().await.map_err(|e| {
Expand Down
19 changes: 12 additions & 7 deletions src/component/cyfs-backup/src/data/log.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ impl BackupLogFile {
AppendCount::new(1024),
ContentLimit::BytesSurpassed(1024 * 1024 * 10),
Compression::None,
#[cfg(unix)] None,
#[cfg(unix)]
None,
);

Self { writer }
Expand Down Expand Up @@ -58,17 +59,19 @@ impl BackupLogManager {
id: &ObjectId,
e: BuckyError,
) {
let t = if id.is_chunk_id() { "chunk" } else { "object" };

let msg = match isolate_id {
Some(isolate_id) => {
let dec_id = dec_id.unwrap();
if self.state_default_isolate == Some(*isolate_id) {
format!("[{}] [{}] {}\n", dec_id, id, e)
format!("[{}] [{}] [{}] {}\n", t, dec_id, id, e)
} else {
format!("[{}] [{}] [{}] {}\n", isolate_id, dec_id, id, e)
format!("[{}] [{}] [{}] [{}] {}\n", t, isolate_id, dec_id, id, e)
}
}
None => {
format!("[{}] {}\n", id, e)
format!("[{}] [{}] {}\n", t, id, e)
}
};

Expand All @@ -81,17 +84,19 @@ impl BackupLogManager {
dec_id: Option<&ObjectId>,
id: &ObjectId,
) {
let t = if id.is_chunk_id() { "chunk" } else { "object" };

let msg = match isolate_id {
Some(isolate_id) => {
let dec_id = dec_id.unwrap();
if self.state_default_isolate == Some(*isolate_id) {
format!("[{}] [{}]\n", dec_id, id,)
format!("[{}] [{}] [{}]\n", t, dec_id, id,)
} else {
format!("[{}] [{}] [{}]\n", isolate_id, dec_id, id)
format!("[{}] [{}] [{}] [{}]\n", t, isolate_id, dec_id, id)
}
}
None => {
format!("[{}]\n", id)
format!("[{}] [{}]\n", t, id)
}
};

Expand Down
29 changes: 16 additions & 13 deletions src/component/cyfs-backup/src/key_data/backup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,27 +5,23 @@ use crate::meta::{KeyDataMeta, KeyDataType};
use cyfs_base::*;
use cyfs_util::AsyncReadWithSeek;

use std::path::PathBuf;

pub struct KeyDataBackupManager {
cyfs_root: PathBuf,
list: Vec<KeyData>,
key_data_manager: KeyDataManager,
data_writer: BackupDataWriterRef,
}

impl KeyDataBackupManager {
pub fn new(keydata: KeyDataManager, data_writer: BackupDataWriterRef) -> Self {
pub fn new(key_data_manager: KeyDataManager, data_writer: BackupDataWriterRef) -> Self {
Self {
cyfs_root: keydata.cyfs_root,
list: keydata.list,
key_data_manager,
data_writer,
}
}

pub async fn run(&self) -> BuckyResult<Vec<KeyDataMeta>> {
let mut list = Vec::with_capacity(self.list.len());
let mut list = Vec::with_capacity(self.key_data_manager.list().len());

for item in &self.list {
for item in self.key_data_manager.list() {
let chunk_id = self.backup_data(item).await?;
if chunk_id.is_none() {
continue;
Expand All @@ -50,12 +46,17 @@ impl KeyDataBackupManager {
}

async fn backup_data(&self, data: &KeyData) -> BuckyResult<Option<ChunkId>> {
let file = self.cyfs_root.join(&data.local_path);
let file = self.key_data_manager.cyfs_root().join(&data.local_path);
if !file.exists() {
warn!("target key data not exists! {}", file.display());
return Ok(None);
}

if !self.key_data_manager.check_filter(&file) {
warn!("key data will be ignored by filter: {}", file.display());
return Ok(None);
}

let data = match data.data_type {
KeyDataType::File => async_std::fs::read(&file).await.map_err(|e| {
let msg = format!(
Expand All @@ -66,9 +67,11 @@ impl KeyDataBackupManager {
error!("{}", msg);
BuckyError::new(BuckyErrorCode::IoError, msg)
})?,
KeyDataType::Dir => {
ZipHelper::zip_dir_to_buffer(&file, zip::CompressionMethod::Stored)?
}
KeyDataType::Dir => ZipHelper::zip_dir_to_buffer(
&file,
zip::CompressionMethod::Stored,
&self.key_data_manager,
)?,
};

let chunk_id = ChunkId::calculate_sync(&data).unwrap();
Expand Down
56 changes: 50 additions & 6 deletions src/component/cyfs-backup/src/key_data/key_data.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
use crate::meta::KeyDataType;
use cyfs_base::*;

use std::borrow::Cow;
use std::path::PathBuf;
use std::path::{PathBuf, Path};

#[derive(Clone, Debug)]
pub struct KeyData {
Expand Down Expand Up @@ -32,19 +33,39 @@ impl KeyData {
}

pub struct KeyDataManager {
pub cyfs_root: PathBuf,
pub list: Vec<KeyData>,
cyfs_root: PathBuf,
list: Vec<KeyData>,
filter_list: Vec<globset::GlobMatcher>,
}

impl KeyDataManager {
pub fn new_uni(isolate: &str) -> Self {
pub fn new_uni(isolate: &str, filters: &Vec<String>) -> BuckyResult<Self> {
let mut filter_list = vec![];
for filter in filters {
info!("new key data filter: {}", filter);
let glob = globset::GlobBuilder::new(filter)
.case_insensitive(true)
.literal_separator(true)
.build()
.map_err(|e| {
let msg = format!(
"parse key data filter as glob error! token={}, {}",
filter, e
);
error!("{}", msg);
BuckyError::new(BuckyErrorCode::InvalidFormat, msg)
})?;

filter_list.push(glob.compile_matcher());
}

let mut list = vec![];
let data = if isolate.is_empty() {
KeyData::new_dir("etc")
} else {
KeyData::new_dir(format!("etc/{}", isolate))
};

list.push(data);

let data_dir = if isolate.is_empty() {
Expand Down Expand Up @@ -78,6 +99,29 @@ impl KeyDataManager {
list.push(data);

let cyfs_root = cyfs_util::get_cyfs_root_path();
Self { cyfs_root, list }
let ret = Self {
cyfs_root,
list,
filter_list,
};

Ok(ret)
}

pub fn cyfs_root(&self) -> &Path {
&self.cyfs_root
}

pub fn list(&self) -> &Vec<KeyData> {
&self.list
}
pub fn check_filter(&self, path: &Path) -> bool {
for filter in &self.filter_list {
if filter.is_match(path) {
return false;
}
}

true
}
}
23 changes: 15 additions & 8 deletions src/component/cyfs-backup/src/key_data/stat.rs
Original file line number Diff line number Diff line change
@@ -1,45 +1,52 @@
use crate::meta::*;
use super::key_data::*;

use std::path::PathBuf;

pub struct KeyDataBackupStat {
cyfs_root: PathBuf,
list: Vec<KeyData>,
key_data_manager: KeyDataManager
}

impl KeyDataBackupStat {
pub fn new(keydata: KeyDataManager) -> Self {
pub fn new(key_data_manager: KeyDataManager) -> Self {
Self {
cyfs_root: keydata.cyfs_root,
list: keydata.list,
key_data_manager,
}
}

pub fn stat(&self) -> ObjectArchiveDataMeta {
let mut result = ObjectArchiveDataMeta::default();

for item in &self.list {
for item in self.key_data_manager.list() {
self.stat_data(&mut result, item);
}

result
}

fn stat_data(&self, result: &mut ObjectArchiveDataMeta, data: &KeyData) {
let file = self.cyfs_root.join(&data.local_path);
let file = self.key_data_manager.cyfs_root().join(&data.local_path);
if !file.exists() {
warn!("target key data not exists! {}", file.display());
return;
}

if !self.key_data_manager.check_filter(&file) {
warn!("key data will be ignored by filter: {}", file.display());
return;
}

match data.data_type {
KeyDataType::File => {
result.count += 1;
},
KeyDataType::Dir => {
let walkdir = walkdir::WalkDir::new(file);
for item in walkdir.into_iter().filter_map(|e| e.ok()) {
if !self.key_data_manager.check_filter(&item.path()) {
warn!("key data will be ignored by filter: {}", item.path().display());
return;
}

if item.path().is_file() {
result.count += 1;
}
Expand Down
15 changes: 13 additions & 2 deletions src/component/cyfs-backup/src/key_data/test.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use super::zip_helper::*;
use super::{zip_helper::*, KeyDataManager};
use cyfs_util::get_cyfs_root_path;

#[test]
Expand All @@ -11,8 +11,19 @@ fn test() {

#[test]
fn test_zip() {
cyfs_base::init_simple_log("test-key-data-backup", None);

let root = get_cyfs_root_path().join("etc");
let buf = ZipHelper::zip_dir_to_buffer(&root, zip::CompressionMethod::Stored).unwrap();

let filter_dir = get_cyfs_root_path().join("etc").join("gateway\\**");
let filters = vec![
filter_dir.as_os_str().to_string_lossy().to_string(),
];

info!("filters: {:?}", filters);

let key_data_manager = KeyDataManager::new_uni("", &filters).unwrap();
let buf = ZipHelper::zip_dir_to_buffer(&root, zip::CompressionMethod::Stored, &key_data_manager).unwrap();

let data = std::io::Cursor::new(buf);
let target = get_cyfs_root_path().join("tmp/etc");
Expand Down
10 changes: 10 additions & 0 deletions src/component/cyfs-backup/src/key_data/zip_helper.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use cyfs_base::*;
use super::KeyDataManager;

use std::fs::File;
use std::io::prelude::*;
Expand All @@ -7,12 +8,14 @@ use std::iter::Iterator;
use std::path::Path;
use walkdir::{DirEntry, WalkDir};


pub struct ZipHelper {}

impl ZipHelper {
pub fn zip_dir_to_buffer(
src_dir: &Path,
method: zip::CompressionMethod,
key_data_manager: &KeyDataManager,
) -> BuckyResult<Vec<u8>> {
let walkdir = WalkDir::new(src_dir);
let it = walkdir.into_iter();
Expand All @@ -26,6 +29,7 @@ impl ZipHelper {
src_dir,
&mut cursor,
method,
key_data_manager,
)?;

Ok(buf)
Expand Down Expand Up @@ -136,6 +140,7 @@ impl ZipHelper {
prefix: &Path,
writer: T,
method: zip::CompressionMethod,
key_data_manager: &KeyDataManager,
) -> BuckyResult<()>
where
T: Write + Seek,
Expand All @@ -148,6 +153,11 @@ impl ZipHelper {
let mut buffer = Vec::new();
for entry in it {
let path = entry.path();
if !key_data_manager.check_filter(path) {
warn!("key data will be ignored by filter: {}", path.display());
continue;
}

let name = path.strip_prefix(prefix).unwrap();

// Write file or directory explicitly
Expand Down
1 change: 1 addition & 0 deletions src/tests/cyfs-stack-test/src/case/backup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ pub async fn test() {
isolate: isolate.clone(),
target_file: LocalFileBackupParam::default(),
password: Some(ProtectedPassword::new("123456")),
key_data_filters: vec![],
};

let target_dir = UniBackupTask::backup_dir(&params).to_path_buf();
Expand Down
Loading

0 comments on commit 89110cc

Please sign in to comment.