diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 1d0aece..6e394d0 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -27,6 +27,8 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install dependencies + run: sudo apt-get install -y protobuf-compiler - name: Build run: cargo build --verbose - name: Run clippy diff --git a/Cargo.lock b/Cargo.lock index 1fe231d..cbaf30b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -41,6 +41,12 @@ dependencies = [ "libc", ] +[[package]] +name = "anyhow" +version = "1.0.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" + [[package]] name = "autocfg" version = "1.1.0" @@ -197,9 +203,9 @@ checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" [[package]] name = "either" -version = "1.5.3" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" [[package]] name = "encode_unicode" @@ -343,9 +349,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.5" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] name = "hermit-abi" @@ -452,9 +458,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.3.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", "hashbrown", @@ -637,7 +643,7 @@ dependencies = [ "maxminddb", "pretty_env_logger", "protobuf", - "protobuf-codegen-pure", + "protobuf-codegen", "rand", "regex", "socket2 0.5.3", @@ -772,27 +778,53 @@ dependencies = [ [[package]] name = "protobuf" -version = "2.14.0" +version = "3.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e86d370532557ae7573551a1ec8235a0f8d6cb276c7c9e6aa490b511c447485" +checksum = "a3a7c64d9bf75b1b8d981124c14c179074e8caa7dfe7b6a12e6222ddcd0c8f72" +dependencies = [ + "once_cell", + "protobuf-support", + "thiserror", +] [[package]] name = "protobuf-codegen" -version = "2.14.0" +version = "3.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de113bba758ccf2c1ef816b127c958001b7831136c9bc3f8e9ec695ac4e82b0c" +checksum = "e26b833f144769a30e04b1db0146b2aaa53fd2fd83acf10a6b5f996606c18144" dependencies = [ + "anyhow", + "once_cell", "protobuf", + "protobuf-parse", + "regex", + "tempfile", + "thiserror", ] [[package]] -name = "protobuf-codegen-pure" -version = "2.14.0" +name = "protobuf-parse" +version = "3.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d1a4febc73bf0cada1d77c459a0c8e5973179f1cfd5b0f1ab789d45b17b6440" +checksum = "322330e133eab455718444b4e033ebfac7c6528972c784fcde28d2cc783c6257" dependencies = [ + "anyhow", + "indexmap", + "log", "protobuf", - "protobuf-codegen", + "protobuf-support", + "tempfile", + "thiserror", + "which", +] + +[[package]] +name = "protobuf-support" +version = "3.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b088fd20b938a875ea00843b6faf48579462630015c3788d397ad6a786663252" +dependencies = [ + "thiserror", ] [[package]] @@ -991,6 +1023,26 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "thiserror" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + [[package]] name = "tinyvec" version = "1.6.0" @@ -1225,6 +1277,17 @@ version = "0.2.82" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6598dd0bd3c7d51095ff6531a5b23e02acdc81804e30d8f07afb77b7215a140a" +[[package]] +name = "which" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +dependencies = [ + "either", + "libc", + "once_cell", +] + [[package]] name = "winapi" version = "0.3.9" diff --git a/Cargo.toml b/Cargo.toml index d2f6d85..c0b1d24 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,8 +12,8 @@ hyper = { version = "0.14", features = ["server", "http1", "http2", "runtime"] } log = "0.4" pretty_env_logger = "0.5" chrono = "0.4" -protobuf = "2.14.0" -url = "2.5.2" +protobuf = "3.7.1" +url = "2.3.1" regex = "1" ipnet = "2.9.0" treebitmap = "0.4.0" @@ -35,7 +35,7 @@ version = "1.29.1" features = ["macros", "rt-multi-thread"] [build-dependencies] -protobuf-codegen-pure = "2.14.0" +protobuf-codegen = "3.7.1" [dev-dependencies] tempfile = "3.10.1" diff --git a/build.rs b/build.rs index aa7a433..a7c558b 100644 --- a/build.rs +++ b/build.rs @@ -1,10 +1,9 @@ -extern crate protobuf_codegen_pure; +extern crate protobuf_codegen; fn main() { - protobuf_codegen_pure::Codegen::new() + protobuf_codegen::Codegen::new() .out_dir("src/bin/common/protos") .inputs(["protos/mirrormanager.proto"]) .include("protos") - .run() - .expect("Codegen failed."); + .run_from_script(); } diff --git a/src/bin/common/functions.rs b/src/bin/common/functions.rs index fc6853f..76d5dc8 100644 --- a/src/bin/common/functions.rs +++ b/src/bin/common/functions.rs @@ -9,7 +9,7 @@ pub fn find_in_file_details_cache_directory_cache( dir: &str, ) -> i64 { for (index, e) in fdcdc.iter().enumerate() { - if e.get_directory() == dir { + if e.directory() == dir { return index as i64; } } @@ -18,7 +18,7 @@ pub fn find_in_file_details_cache_directory_cache( pub fn find_in_mirrorlist_cache(mlc: &[MirrorListCacheType], dir: &str) -> i64 { for (index, mirrorlist_cache) in mlc.iter().enumerate() { - if mirrorlist_cache.get_directory() == dir { + if mirrorlist_cache.directory() == dir { return index as i64; } } @@ -28,8 +28,8 @@ pub fn find_in_mirrorlist_cache(mlc: &[MirrorListCacheType], dir: &str) -> i64 { pub fn find_in_string_string_map(ssm: &[StringStringMap], key: &str) -> String { let mut result = String::new(); for param in ssm { - if param.get_key() == key { - result.push_str(param.get_value()); + if param.key() == key { + result.push_str(param.value()); } } result @@ -37,8 +37,8 @@ pub fn find_in_string_string_map(ssm: &[StringStringMap], key: &str) -> String { pub fn find_in_string_bool_map(sbm: &[StringBoolMap], key: &str) -> bool { for param in sbm { - if param.get_key() == key { - return param.get_value(); + if param.key() == key { + return param.value(); } } false @@ -46,8 +46,8 @@ pub fn find_in_string_bool_map(sbm: &[StringBoolMap], key: &str) -> bool { pub fn find_in_int_int_map(iim: &[IntIntMap], key: i64) -> i64 { for e in iim { - if e.get_key() == key { - return e.get_value(); + if e.key() == key { + return e.value(); } } 0 @@ -55,8 +55,8 @@ pub fn find_in_int_int_map(iim: &[IntIntMap], key: i64) -> i64 { pub fn find_in_int_string_map(ism: &[IntStringMap], key: i64) -> String { for e in ism { - if e.get_key() == key { - return String::from(e.get_value()); + if e.key() == key { + return String::from(e.value()); } } String::new() @@ -64,7 +64,7 @@ pub fn find_in_int_string_map(ism: &[IntStringMap], key: i64) -> String { pub fn find_in_int_repeated_string_map(irsm: &[IntRepeatedStringMap], key: i64) -> i64 { for (index, param) in irsm.iter().enumerate() { - if param.get_key() == key { + if param.key() == key { return index as i64; } } @@ -73,7 +73,7 @@ pub fn find_in_int_repeated_string_map(irsm: &[IntRepeatedStringMap], key: i64) pub fn find_in_int_repeated_int_map(irim: &[IntRepeatedIntMap], key: i64) -> i64 { for (index, param) in irim.iter().enumerate() { - if param.get_key() == key { + if param.key() == key { return index as i64; } } @@ -82,7 +82,7 @@ pub fn find_in_int_repeated_int_map(irim: &[IntRepeatedIntMap], key: i64) -> i64 pub fn find_in_string_repeated_int_map(irim: &[StringRepeatedIntMap], key: &str) -> i64 { for (index, param) in irim.iter().enumerate() { - if param.get_key() == key { + if param.key() == key { return index as i64; } } @@ -94,7 +94,7 @@ pub fn find_in_file_details_cache_files_cache( file: &str, ) -> i64 { for (index, e) in fdcfc.iter().enumerate() { - if e.get_filename() == file { + if e.filename() == file { return index as i64; } } diff --git a/src/bin/common/protos/mod.rs b/src/bin/common/protos/mod.rs index dc941cb..3ada31a 100644 --- a/src/bin/common/protos/mod.rs +++ b/src/bin/common/protos/mod.rs @@ -1 +1,3 @@ +// @generated + pub mod mirrormanager; diff --git a/src/bin/generate-mirrorlist-cache.rs b/src/bin/generate-mirrorlist-cache.rs index 02942f1..21f08d8 100644 --- a/src/bin/generate-mirrorlist-cache.rs +++ b/src/bin/generate-mirrorlist-cache.rs @@ -27,8 +27,7 @@ use getopts::Options; use indicatif::{ProgressBar, ProgressDrawTarget, ProgressStyle}; use ipnet::{IpNet, Ipv4Net, Ipv6Net}; use ipnetwork::IpNetwork; -use protobuf::error::ProtobufError; -use protobuf::{CodedOutputStream, Message, RepeatedField}; +use protobuf::{CodedOutputStream, Message}; use std::collections::HashMap; use std::convert::TryInto; use std::env; @@ -415,8 +414,8 @@ fn parse_ip(input: String, host: String) -> Result, String> { } /* HostBandwidthCache */ -fn get_hbc(hosts: &[Host]) -> RepeatedField { - let mut hbc: RepeatedField = RepeatedField::new(); +fn get_hbc(hosts: &[Host]) -> Vec { + let mut hbc: Vec = Vec::new(); for h in hosts { let mut hb = IntIntMap::new(); @@ -431,8 +430,8 @@ fn get_hbc(hosts: &[Host]) -> RepeatedField { } /* HostMaxConnectionCache */ -fn get_hmcc(hosts: &[Host]) -> RepeatedField { - let mut hmcc: RepeatedField = RepeatedField::new(); +fn get_hmcc(hosts: &[Host]) -> Vec { + let mut hmcc: Vec = Vec::new(); for h in hosts { let mut hmc = IntIntMap::new(); @@ -445,8 +444,8 @@ fn get_hmcc(hosts: &[Host]) -> RepeatedField { } /* HostCountryCache */ -fn get_hcc(hosts: &[Host]) -> RepeatedField { - let mut hcc: RepeatedField = RepeatedField::new(); +fn get_hcc(hosts: &[Host]) -> Vec { + let mut hcc: Vec = Vec::new(); for h in hosts { let mut hc = IntStringMap::new(); @@ -462,8 +461,8 @@ fn get_hcc(hosts: &[Host]) -> RepeatedField { } /* HostAsnCache */ -fn get_hac(hosts: &[Host]) -> RepeatedField { - let mut hac: RepeatedField = RepeatedField::new(); +fn get_hac(hosts: &[Host]) -> Vec { + let mut hac: Vec = Vec::new(); for h in hosts { if !h.6 { @@ -474,13 +473,12 @@ fn get_hac(hosts: &[Host]) -> RepeatedField { } let i = find_in_int_repeated_int_map(&hac, h.7.unwrap().into()); if i != -1 { - let val = &mut hac[i as usize].mut_value(); + let val = &mut hac[i as usize].value; val.push(h.0.into()); } else { let mut hc = IntRepeatedIntMap::new(); hc.set_key(h.7.unwrap().into()); - let val = hc.mut_value(); - val.push(h.0.into()); + hc.value.push(h.0.into()); hac.push(hc); } } @@ -489,8 +487,8 @@ fn get_hac(hosts: &[Host]) -> RepeatedField { } /* HostCountryAllowedCache */ -fn get_hcac(c: &mut PgConnection, hosts: &[Host]) -> RepeatedField { - let mut hcac: RepeatedField = RepeatedField::new(); +fn get_hcac(c: &mut PgConnection, hosts: &[Host]) -> Vec { + let mut hcac: Vec = Vec::new(); let hcac_raw = get_host_country_allowed(c); @@ -505,16 +503,15 @@ fn get_hcac(c: &mut PgConnection, hosts: &[Host]) -> RepeatedField RepeatedField RepeatedField { - let mut hcurl: RepeatedField = RepeatedField::new(); +fn get_hcurlc(host_category_urls: &[(i32, i32, String)]) -> Vec { + let mut hcurl: Vec = Vec::new(); for hcu in host_category_urls { let mut hc_url = IntStringMap::new(); @@ -538,8 +535,8 @@ fn get_hcurlc(host_category_urls: &[(i32, i32, String)]) -> RepeatedField RepeatedField { - let mut hnbc: RepeatedField = RepeatedField::new(); +fn get_hnbc(c: &mut PgConnection, hosts: &[Host]) -> Vec { + let mut hnbc: Vec = Vec::new(); let netblocks_and_hosts = get_netblocks(c); let debug = DEBUG.load(Ordering::SeqCst); @@ -572,13 +569,12 @@ fn get_hnbc(c: &mut PgConnection, hosts: &[Host]) -> RepeatedField RepeatedField { - let mut ratdn: RepeatedField = RepeatedField::new(); +) -> Vec { + let mut ratdn: Vec = Vec::new(); let arches = get_arches(c); @@ -653,11 +649,11 @@ fn get_mlc( directories: &[db::models::Directory], host_category_urls: &[(i32, i32, String)], ) -> ( - RepeatedField, - RepeatedField, + Vec, + Vec, ) { - let mut mlc: RepeatedField = RepeatedField::new(); - let mut fdcdc: RepeatedField = RepeatedField::new(); + let mut mlc: Vec = Vec::new(); + let mut fdcdc: Vec = Vec::new(); let categories = get_categories(c); let host_categories = get_host_categories(c); @@ -760,7 +756,7 @@ fn get_mlc( i = find_in_file_details_cache_directory_cache(&fdcdc, &d.name); } let f: &mut FileDetailsCacheDirectoryType = &mut fdcdc[i as usize]; - let fdcfc = f.mut_FileDetailsCacheFiles(); + let fdcfc = &mut f.FileDetailsCacheFiles; i = find_in_file_details_cache_files_cache(fdcfc, &fd.1); if i == -1 { let mut tmp = FileDetailsCacheFilesType::new(); @@ -769,7 +765,7 @@ fn get_mlc( i = find_in_file_details_cache_files_cache(fdcfc, &fd.1); } let fdcf: &mut FileDetailsCacheFilesType = &mut fdcfc[i as usize]; - let fdc = fdcf.mut_FileDetails(); + let fdc = &mut fdcf.FileDetails; let mut file_detail_type = FileDetailsType::new(); if fd.2.is_none() { file_detail_type.set_TimeStamp(0); @@ -834,9 +830,9 @@ fn get_mlc( }; ml.set_Subpath(subpath); let mut global: Vec = Vec::new(); - let mut by_country: RepeatedField = RepeatedField::new(); - let mut by_internet2: RepeatedField = RepeatedField::new(); - let mut by_hostid: RepeatedField = RepeatedField::new(); + let mut by_country: Vec = Vec::new(); + let mut by_internet2: Vec = Vec::new(); + let mut by_hostid: Vec = Vec::new(); for (h_id, hc_id) in &host_cat_hash[&category_id] { let always_up2date: bool = host_cat_id_hash[&(*hc_id as i32)]; let host = get_host(*h_id as i32, hosts); @@ -866,7 +862,7 @@ fn get_mlc( let hcurl_ids = &hcurl_cat_url_id_hash[hc_id]; let mut hcurl_id = IntRepeatedIntMap::new(); hcurl_id.set_key(*h_id); - hcurl_id.set_value(hcurl_ids.to_vec()); + hcurl_id.value = hcurl_ids.to_vec(); by_hostid.push(hcurl_id); } @@ -879,12 +875,12 @@ fn get_mlc( let country: String = host.clone().5.unwrap().to_string().to_uppercase(); let i = find_in_string_repeated_int_map(&by_internet2, &country); if i != -1 { - let val = &mut by_internet2[i as usize].mut_value(); + let val = &mut by_internet2[i as usize].value; val.push(*h_id); } else { let mut bi = StringRepeatedIntMap::new(); bi.set_key(country); - let val = bi.mut_value(); + let val = &mut bi.value; val.push(*h_id); by_internet2.push(bi); } @@ -904,20 +900,20 @@ fn get_mlc( let country: String = host.clone().5.unwrap().to_string().to_uppercase(); let i = find_in_string_repeated_int_map(&by_country, &country); if i != -1 { - let val = &mut by_country[i as usize].mut_value(); + let val = &mut by_country[i as usize].value; val.push(*h_id); } else { let mut bc = StringRepeatedIntMap::new(); bc.set_key(country); - let val = bc.mut_value(); + let val = &mut bc.value; val.push(*h_id); by_country.push(bc); } } - ml.set_Global(global); - ml.set_ByCountry(by_country); - ml.set_ByCountryInternet2(by_internet2); - ml.set_ByHostId(by_hostid); + ml.Global = global; + ml.ByCountry = by_country; + ml.ByCountryInternet2 = by_internet2; + ml.ByHostId = by_hostid; /* Not setting OrderedMirrorList as the rust mirrorlist-server does not read it. */ @@ -928,8 +924,8 @@ fn get_mlc( } /* RepositoryRedirectCache */ -fn get_rrc(c: &mut PgConnection) -> RepeatedField { - let mut rrc: RepeatedField = RepeatedField::new(); +fn get_rrc(c: &mut PgConnection) -> Vec { + let mut rrc: Vec = Vec::new(); let rrc_raw = get_repository_redirects(c); @@ -946,8 +942,8 @@ fn get_rrc(c: &mut PgConnection) -> RepeatedField { } /* NetblockCountryCache */ -fn get_ncc(c: &mut PgConnection) -> RepeatedField { - let mut ncc: RepeatedField = RepeatedField::new(); +fn get_ncc(c: &mut PgConnection) -> Vec { + let mut ncc: Vec = Vec::new(); let ncc_raw = get_netblock_countries(c); @@ -962,8 +958,8 @@ fn get_ncc(c: &mut PgConnection) -> RepeatedField { } /* CountryContinentRedirectCache */ -fn get_ccrc(c: &mut PgConnection) -> RepeatedField { - let mut ccrc: RepeatedField = RepeatedField::new(); +fn get_ccrc(c: &mut PgConnection) -> Vec { + let mut ccrc: Vec = Vec::new(); let ccrc_raw = get_country_continent_redirects(c); @@ -978,8 +974,8 @@ fn get_ccrc(c: &mut PgConnection) -> RepeatedField { } /* DisabledRepositoryCache */ -fn get_drc(repositories: &[Repository]) -> RepeatedField { - let mut drc: RepeatedField = RepeatedField::new(); +fn get_drc(repositories: &[Repository]) -> Vec { + let mut drc: Vec = Vec::new(); for r in repositories { let mut dr = StringBoolMap::new(); @@ -1056,82 +1052,82 @@ fn main() { { /* HostCountryAllowedCache */ let hcac = get_hcac(connection, &hosts); - mirrorlist.set_HostCountryAllowedCache(hcac); + mirrorlist.HostCountryAllowedCache = hcac; } { /* HCUrlCache */ let hcurls = get_hcurlc(&host_category_urls); - mirrorlist.set_HCUrlCache(hcurls); + mirrorlist.HCUrlCache = hcurls; } { /* HostNetBlockCache */ let hnbc = get_hnbc(connection, &hosts); - mirrorlist.set_HostNetblockCache(hnbc); + mirrorlist.HostNetblockCache = hnbc; } { /* HostBandwidthCache */ let hbc = get_hbc(&hosts); - mirrorlist.set_HostBandwidthCache(hbc); + mirrorlist.HostBandwidthCache = hbc; } { /* HostCountryCache */ let hcc = get_hcc(&hosts); - mirrorlist.set_HostCountryCache(hcc); + mirrorlist.HostCountryCache = hcc; } { /* HostAsnCache */ let hac = get_hac(&hosts); - mirrorlist.set_HostAsnCache(hac); + mirrorlist.HostAsnCache = hac; } { /* HostMaxConnectionCache - Not actually used. */ let hmcc = get_hmcc(&hosts); - mirrorlist.set_HostMaxConnectionCache(hmcc); + mirrorlist.HostMaxConnectionCache = hmcc; } { /* MirrorListCache */ let (mlc, fdc) = get_mlc(connection, &hosts, &directories, &host_category_urls); - mirrorlist.set_MirrorListCache(mlc); - mirrorlist.set_FileDetailsCache(fdc); + mirrorlist.MirrorListCache = mlc; + mirrorlist.FileDetailsCache = fdc; } { let repositories = get_repositories(connection); /* RepoArchToDirectoryName */ let ratdn = get_ratdn(connection, &directories, &repositories); - mirrorlist.set_RepoArchToDirectoryName(ratdn); + mirrorlist.RepoArchToDirectoryName = ratdn; /* DisabledRepositoryCache */ let drc = get_drc(&repositories); - mirrorlist.set_DisabledRepositoryCache(drc); + mirrorlist.DisabledRepositoryCache = drc; } { /* RepositoryRedirectCache */ let rrc = get_rrc(connection); - mirrorlist.set_RepositoryRedirectCache(rrc); + mirrorlist.RepositoryRedirectCache = rrc; } { /* CountryContinentRedirectCache */ let ccrc = get_ccrc(connection); - mirrorlist.set_CountryContinentRedirectCache(ccrc); + mirrorlist.CountryContinentRedirectCache = ccrc; } { /* NetblockCountryCache */ let ncc = get_ncc(connection); - mirrorlist.set_NetblockCountryCache(ncc); + mirrorlist.NetblockCountryCache = ncc; } print_step(format!("Writing to {}", &cache_file)); - let mut file = match File::create(&cache_file).map_err(ProtobufError::IoError) { + let mut file = match File::create(&cache_file) { Ok(file) => file, _ => { println!("Error opening file {}", &cache_file); diff --git a/src/bin/generate_mirrorlist_cache_test/mod.rs b/src/bin/generate_mirrorlist_cache_test/mod.rs index 55109ec..a8d542e 100644 --- a/src/bin/generate_mirrorlist_cache_test/mod.rs +++ b/src/bin/generate_mirrorlist_cache_test/mod.rs @@ -218,20 +218,20 @@ fn get_mlc_test_empty_topdir() { let (mlc, fdc) = get_mlc(&mut c, &hosts, &directories, &host_category_urls); assert_eq!(fdc.len(), 1); - assert_eq!(fdc[0].get_directory(), "directory/repodata".to_string()); - assert_eq!(fdc[0].get_FileDetailsCacheFiles().len(), 1); - let fdcf = &fdc[0].get_FileDetailsCacheFiles()[0]; - assert_eq!(fdcf.get_filename(), "repomd.xml".to_string()); - let fdcfd = fdcf.get_FileDetails(); + assert_eq!(fdc[0].directory(), "directory/repodata".to_string()); + assert_eq!(fdc[0].FileDetailsCacheFiles.len(), 1); + let fdcf = &fdc[0].FileDetailsCacheFiles[0]; + assert_eq!(fdcf.filename.clone().unwrap(), "repomd.xml".to_string()); + let fdcfd = fdcf.FileDetails.clone(); assert_eq!(fdcfd.len(), 1); - assert_eq!(fdcfd[0].get_Size(), 177); + assert_eq!(fdcfd[0].Size(), 177); assert_eq!(mlc.len(), 1); - assert_eq!(mlc[0].get_Subpath(), "directory/repodata".to_string()); - assert_eq!(mlc[0].get_directory(), "directory/repodata".to_string()); - assert_eq!(mlc[0].get_Global()[0], 56); - assert_eq!(mlc[0].get_ByCountry()[0].get_key(), "UQ"); - assert_eq!(mlc[0].get_ByCountry()[0].get_value()[0], 56); + assert_eq!(mlc[0].Subpath(), "directory/repodata".to_string()); + assert_eq!(mlc[0].directory(), "directory/repodata".to_string()); + assert_eq!(mlc[0].Global[0], 56); + assert_eq!(mlc[0].ByCountry[0].key.clone().unwrap(), "UQ"); + assert_eq!(mlc[0].ByCountry[0].value[0], 56); } #[test] @@ -259,23 +259,23 @@ fn get_mlc_test_non_empty_topdir() { assert_eq!(fdc.len(), 1); assert_eq!( - fdc[0].get_directory(), + fdc[0].directory(), "test/topdir/directory/repodata".to_string() ); - assert_eq!(fdc[0].get_FileDetailsCacheFiles().len(), 1); - let fdcf = &fdc[0].get_FileDetailsCacheFiles()[0]; - assert_eq!(fdcf.get_filename(), "repomd.xml".to_string()); - let fdcfd = fdcf.get_FileDetails(); + assert_eq!(fdc[0].FileDetailsCacheFiles.len(), 1); + let fdcf = &fdc[0].FileDetailsCacheFiles[0]; + assert_eq!(fdcf.filename.clone().unwrap(), "repomd.xml".to_string()); + let fdcfd = fdcf.FileDetails.clone(); assert_eq!(fdcfd.len(), 1); - assert_eq!(fdcfd[0].get_Size(), 177); + assert_eq!(fdcfd[0].Size(), 177); assert_eq!(mlc.len(), 1); assert_eq!( - mlc[0].get_directory(), + mlc[0].directory(), "test/topdir/directory/repodata".to_string() ); - assert_eq!(mlc[0].get_Subpath(), "directory/repodata".to_string()); - assert_eq!(mlc[0].get_Global()[0], 56); - assert_eq!(mlc[0].get_ByCountry()[0].get_key(), "UQ"); - assert_eq!(mlc[0].get_ByCountry()[0].get_value()[0], 56); + assert_eq!(mlc[0].Subpath(), "directory/repodata".to_string()); + assert_eq!(mlc[0].Global[0], 56); + assert_eq!(mlc[0].ByCountry[0].key.clone().unwrap(), "UQ"); + assert_eq!(mlc[0].ByCountry[0].value[0], 56); } diff --git a/src/bin/mirrorlist-server.rs b/src/bin/mirrorlist-server.rs index 029de72..1570069 100644 --- a/src/bin/mirrorlist-server.rs +++ b/src/bin/mirrorlist-server.rs @@ -20,7 +20,6 @@ use ipnet::IpNet; use itertools::Itertools; use log::{error, info}; use maxminddb::{geoip2, Reader}; -use protobuf::parse_from_reader; use rand::distributions::Distribution; use rand::distributions::WeightedIndex; use rand::seq::SliceRandom; @@ -87,12 +86,12 @@ fn find_in_netblock_country_cache(nbcc: &[StringStringMap], client_ip: &IpAddr) // Fedora's database this only contains one entry. No need // to implement it via tree, yet.. In for e in nbcc { - let net: IpNet = match IpNet::from_str(e.get_key()) { + let net: IpNet = match IpNet::from_str(e.key()) { Ok(net) => net, _ => return "".to_string(), }; if net.contains(client_ip) { - return String::from(e.get_value()); + return e.value.clone().unwrap(); } } "".to_string() @@ -137,7 +136,7 @@ fn trim_by_client_country( continue; } let mut found = false; - for country in hcac[index as usize].get_value() { + for country in &hcac[index as usize].value { // Check if the client country is part of host_country_allowed_cache[host] if country == &client_country { // Yes it is. We do not need to remove this host from the list @@ -173,7 +172,7 @@ fn do_countrylist( // Check if the country exists at all in the by_country cache let i = find_in_string_repeated_int_map(by_country, &country.to_uppercase()); if i != -1 { - for host in by_country[i as usize].get_value() { + for host in &by_country[i as usize].value { // Add all hostids to the result hosts.push(*host); } @@ -239,16 +238,16 @@ fn append_path( path_is_dir: bool, ) -> Vec<(i64, Vec)> { let mut result: Vec<(i64, Vec)> = Vec::new(); - let subpath = String::from(cache.get_Subpath()); + let subpath = String::from(cache.Subpath()); for hid in all_hosts { let mut hcurls: Vec = Vec::new(); - let by_host_id = cache.get_ByHostId(); + let by_host_id = &cache.ByHostId; let i = find_in_int_repeated_int_map(by_host_id, *hid); if i == -1 { continue; } - for hcurl_id in by_host_id[i as usize].get_value() { + for hcurl_id in &by_host_id[i as usize].value { let mut s = String::from(&find_in_int_string_map(hcurl_cache, *hcurl_id)); if !subpath.is_empty() { s.push('/'); @@ -378,7 +377,7 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { let mut path_is_dir = false; let mut header = String::new(); let cache: &MirrorListCacheType; - let mirrorlist_caches = &p.mirrorlist.get_MirrorListCache(); + let mirrorlist_caches = &p.mirrorlist.MirrorListCache; if check_for_param(&query_params, "path") { let mut path = get_param(&query_params, "path"); @@ -409,7 +408,7 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { // path was a directory path_is_dir = true; cache = &mirrorlist_caches[index as usize]; - dir.push_str(cache.get_directory()); + dir.push_str(cache.directory()); } } else { if get_param(&query_params, "repo").contains("source") { @@ -418,7 +417,7 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { } query_params.insert("arch", "source"); } - let repo_redirect_cache = &p.mirrorlist.get_RepositoryRedirectCache(); + let repo_redirect_cache = &p.mirrorlist.RepositoryRedirectCache; let mut repo = find_in_string_string_map(repo_redirect_cache, &get_param(&query_params, "repo")); if repo.is_empty() { @@ -426,17 +425,17 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { } let arch = get_param(&query_params, "arch"); let _ = write!(header, "# repo = {} arch = {} ", repo, arch); - if find_in_string_bool_map(p.mirrorlist.get_DisabledRepositoryCache(), &repo) { + if find_in_string_bool_map(&p.mirrorlist.DisabledRepositoryCache, &repo) { return http_response(metalink, "repo disabled".to_string(), StatusCode::OK); } let key = find_in_string_string_map( - p.mirrorlist.get_RepoArchToDirectoryName(), + &p.mirrorlist.RepoArchToDirectoryName, &format!("{}+{}", repo, arch), ); if key.is_empty() { let mut repos: Vec = Vec::new(); - for e in p.mirrorlist.get_RepoArchToDirectoryName() { - repos.push(e.get_key().to_string()); + for e in &p.mirrorlist.RepoArchToDirectoryName { + repos.push(e.key().to_string()); } repos.sort(); let mut repo_information = String::from(&header); @@ -518,16 +517,16 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { && (!check_for_param(&query_params, "netblock") || get_param(&query_params, "netblock") == "1") { - let hnbc = &p.mirrorlist.get_HostNetblockCache(); - for hnb in *hnbc { - let net: IpNet = match IpNet::from_str(hnb.get_key()) { + let hnbc = &p.mirrorlist.HostNetblockCache; + for hnb in hnbc { + let net: IpNet = match IpNet::from_str(hnb.key()) { Ok(net) => net, _ => { - let ip = match IpAddr::from_str(hnb.get_key()) { + let ip = match IpAddr::from_str(hnb.key()) { Ok(ip) => ip, _ => continue, }; - let mut with_mask = String::from(hnb.get_key()); + let mut with_mask = String::from(hnb.key()); if ip.is_ipv4() { with_mask.push_str("/32"); } else { @@ -537,9 +536,9 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { } }; if net.contains(&client_ip) { - for id in hnb.get_value() { + for id in &hnb.value { // Check if the host actually caries the requested content - if find_in_int_repeated_int_map(cache.get_ByHostId(), *id) > 0 { + if find_in_int_repeated_int_map(&cache.ByHostId, *id) > 0 { netblock_results.push((String::from(&net.to_string()), *id)); } } @@ -559,14 +558,14 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { // First find the ASN from the global_netblocks file let asn = find_in_ip_tree(p.asn_cache, &client_ip); if !asn.1.is_empty() { - let host_asn_cache = &p.mirrorlist.get_HostAsnCache(); + let host_asn_cache = &p.mirrorlist.HostAsnCache; let asn_number = match asn.1.parse::() { Ok(x) => x, _ => -1, }; let i = find_in_int_repeated_int_map(host_asn_cache, asn_number); if i != -1 { - for id in host_asn_cache[i as usize].get_value() { + for id in &host_asn_cache[i as usize].value { asn_results.push(*id); } } @@ -584,7 +583,7 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { // First check if we assigned this IP to another country let mut client_country: String = - find_in_netblock_country_cache(p.mirrorlist.get_NetblockCountryCache(), &client_ip); + find_in_netblock_country_cache(&p.mirrorlist.NetblockCountryCache, &client_ip); if client_country.is_empty() { // Do a GeoIP 2 lookup. In the Python implementation // this was more complicated as it was doing IPv6, Teredo @@ -625,16 +624,16 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { if !requested_countries.is_empty() { for country in &requested_countries { if country.to_uppercase() == "global".to_uppercase() { - country_results.append(&mut cache.get_Global().to_vec()); + country_results.append(&mut cache.Global.to_vec()); let ret = do_global( - p.mirrorlist.get_HostCountryAllowedCache(), + &p.mirrorlist.HostCountryAllowedCache, &mut country_results, client_country.to_string(), ); header.push_str(&ret); } let ret = do_countrylist( - cache.get_ByCountry(), + &cache.ByCountry, &mut country_results, country.to_string(), ); @@ -646,7 +645,7 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { // No mirror in that country found, let's use all countries from the continent for country in requested_countries { let ret = get_same_continent_hosts( - cache.get_ByCountry(), + &cache.ByCountry, p.cc, country.to_string(), &mut continent_results, @@ -658,7 +657,7 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { } } if !country_results.is_empty() || !continent_results.is_empty() { - let hcac = &p.mirrorlist.get_HostCountryAllowedCache(); + let hcac = &p.mirrorlist.HostCountryAllowedCache; country_results = trim_by_client_country(hcac, &mut country_results, client_country.to_string()); continent_results = @@ -680,7 +679,7 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { if !only_country { // Use GeoIP location do get a country list let ret = do_countrylist( - cache.get_ByCountry(), + &cache.ByCountry, &mut geoip_results, client_country.to_string(), ); @@ -698,7 +697,7 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { if !only_country { // Use GeoIP location do get a country on continent list let ret = get_same_continent_hosts( - cache.get_ByCountry(), + &cache.ByCountry, p.cc, client_country.to_string(), &mut continent_results, @@ -732,7 +731,7 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { let mut hosts_and_urls = append_path( &actual_hosts, cache, - p.mirrorlist.get_HCUrlCache(), + &p.mirrorlist.HCUrlCache, file.clone(), path_is_dir, ); @@ -755,9 +754,9 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { let mut global_results: Vec = Vec::new(); if mirrors_found < p.minimum && !only_country { // Use mirrors from everywhere - global_results = cache.get_Global().to_vec(); + global_results = cache.Global.to_vec(); let ret = do_global( - p.mirrorlist.get_HostCountryAllowedCache(), + &p.mirrorlist.HostCountryAllowedCache, &mut global_results, client_country, ); @@ -790,7 +789,7 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { all_hosts.append(&mut asn_results); { - let hbc = &p.mirrorlist.get_HostBandwidthCache(); + let hbc = &p.mirrorlist.HostBandwidthCache; // Weighted shuffle by bandwidth weigthed_shuffle(&mut country_results, hbc, &mut all_hosts); weigthed_shuffle(&mut geoip_results, hbc, &mut all_hosts); @@ -802,7 +801,7 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { let mut hosts_and_urls = append_path( &all_hosts, cache, - p.mirrorlist.get_HCUrlCache(), + &p.mirrorlist.HCUrlCache, file.clone(), path_is_dir, ); @@ -825,7 +824,7 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { let _ = write!( header, "\n# database creation time: {}", - &p.mirrorlist.get_Time() + &p.mirrorlist.Time.unwrap(), ); } @@ -890,35 +889,35 @@ fn do_mirrorlist(req: Request, p: &mut DoMirrorlist) -> Response { fn metalink_details(fd: &FileDetailsType, indent: String) -> String { let mut result = String::new(); - if fd.get_TimeStamp() != 0 { + if fd.TimeStamp() != 0 { result.push_str(&indent); let _ = writeln!( result, " {}", - fd.get_TimeStamp() + fd.TimeStamp() ); } - if fd.get_Size() != 0 { + if fd.Size() != 0 { result.push_str(&indent); - let _ = writeln!(result, " {}", fd.get_Size()); + let _ = writeln!(result, " {}", fd.Size()); } result.push_str(&indent); result.push_str(" \n"); - if fd.get_MD5() != "" { + if fd.MD5() != "" { result.push_str(&indent); - let _ = writeln!(result, " {}", fd.get_MD5()); + let _ = writeln!(result, " {}", fd.MD5()); } - if fd.get_SHA1() != "" { + if fd.SHA1() != "" { result.push_str(&indent); - let _ = writeln!(result, " {}", fd.get_SHA1()); + let _ = writeln!(result, " {}", fd.SHA1()); } - if fd.get_SHA256() != "" { + if fd.SHA256() != "" { result.push_str(&indent); - let _ = writeln!(result, " {}", fd.get_SHA256()); + let _ = writeln!(result, " {}", fd.SHA256()); } - if fd.get_SHA512() != "" { + if fd.SHA512() != "" { result.push_str(&indent); - let _ = writeln!(result, " {}", fd.get_SHA512()); + let _ = writeln!(result, " {}", fd.SHA512()); } result.push_str(&indent); result.push_str(" \n"); @@ -934,19 +933,19 @@ fn do_metalink( ) -> (hyper::StatusCode, String) { let mut preference = 100; let fdcdc_index = - find_in_file_details_cache_directory_cache(mirrorlist.get_FileDetailsCache(), &dir); + find_in_file_details_cache_directory_cache(&mirrorlist.FileDetailsCache, &dir); if fdcdc_index == -1 { return ( StatusCode::NOT_FOUND, metalink_failuredoc(format!("{}/{} not found or has not metalink", dir, file)), ); } - let fdcf = &mirrorlist.get_FileDetailsCache()[fdcdc_index as usize] - .get_FileDetailsCacheFiles() + let fdcf = &mirrorlist.FileDetailsCache[fdcdc_index as usize] + .FileDetailsCacheFiles .to_vec(); let mut wrong_file = true; for e in fdcf { - if e.get_filename() == file { + if e.filename() == file { wrong_file = false; } } @@ -961,10 +960,10 @@ fn do_metalink( let _ = writeln!(doc, " ", file); let mut count = 0; for e in fdcf { - if e.get_filename() != file { + if e.filename() != file { continue; } - for fd in e.get_FileDetails() { + for fd in &e.FileDetails { if count == 0 { // It does not make much sense that a filename can have multiple file_details // Just use the first one @@ -986,7 +985,7 @@ fn do_metalink( doc += " \n"; for (host, hcurls) in hosts_and_urls { let mut private = String::from(" mm0:private=\"True\""); - for i in cache.get_Global() { + for i in &cache.Global { if i == host { private = String::new(); } @@ -1010,7 +1009,7 @@ fn do_metalink( doc += "\" type=\""; doc += protocol[0]; doc += "\" location=\""; - doc += &find_in_int_string_map(mirrorlist.get_HostCountryCache(), *host).to_uppercase(); + doc += &find_in_int_string_map(&mirrorlist.HostCountryCache, *host).to_uppercase(); let _ = write!(doc, "\" preference=\"{}\"{}>", preference, private); doc += url; doc += "\n"; @@ -1255,7 +1254,7 @@ async fn main() { process::exit(1) } }; - let mirrorlist = Arc::new(match parse_from_reader::(&mut file) { + let mirrorlist : Arc= Arc::new(match protobuf::Message::parse_from_reader(&mut file) { Ok(f) => f, Err(e) => { error!("Parsing {} failed: {}", &cache_file, e); @@ -1263,8 +1262,8 @@ async fn main() { } }); - if let Some(t) = chrono::NaiveDateTime::from_timestamp_opt(mirrorlist.get_Time() as i64, 0) { - info!("Database creation time {} ({}) ", t, &mirrorlist.get_Time()); + if let Some(t) = chrono::NaiveDateTime::from_timestamp_opt(mirrorlist.Time.unwrap() as i64, 0) { + info!("Database creation time {} ({}) ", t, &mirrorlist.Time.unwrap()); } info!("Loading geoip database"); @@ -1287,7 +1286,7 @@ async fn main() { info!("Loading country-continents"); let cc_redirect = Arc::new(setup_continents( &cccsv, - mirrorlist.get_CountryContinentRedirectCache(), + &mirrorlist.CountryContinentRedirectCache, )); if cc_redirect.len() == 0 { diff --git a/src/bin/mirrorlist_server_test/mod.rs b/src/bin/mirrorlist_server_test/mod.rs index 2b4b530..6fd4fae 100644 --- a/src/bin/mirrorlist_server_test/mod.rs +++ b/src/bin/mirrorlist_server_test/mod.rs @@ -3,7 +3,6 @@ use crate::common::protos::mirrormanager::{ FileDetailsCacheDirectoryType, FileDetailsCacheFilesType, IntRepeatedIntMap, }; use hyper::body; -use protobuf::RepeatedField; use tempfile::tempdir; use tokio::runtime::Runtime; @@ -81,47 +80,47 @@ pub async fn read_response_body(res: Response) -> Result = RepeatedField::new(); + let mut mlc: Vec = Vec::new(); let mut ml1 = MirrorListCacheType::new(); ml1.set_directory("directory/level/three".to_string()); let mut ml2 = MirrorListCacheType::new(); ml2.set_directory("directory/level/three/repodata".to_string()); let global: Vec = vec![1, 42, 100]; - ml1.set_Global(global.clone()); - ml2.set_Global(global); + ml1.Global = global.clone(); + ml2.Global = global; - let mut by_hostid: RepeatedField = RepeatedField::new(); + let mut by_hostid: Vec = Vec::new(); let mut hcurl_id = IntRepeatedIntMap::new(); - hcurl_id.set_key(42); - hcurl_id.set_value(vec![421, 422, 423]); + hcurl_id.key = Some(42); + hcurl_id.value = vec![421, 422, 423]; by_hostid.push(hcurl_id); hcurl_id = IntRepeatedIntMap::new(); - hcurl_id.set_key(1); - hcurl_id.set_value(vec![11, 12, 13]); + hcurl_id.key = Some(1); + hcurl_id.value = vec![11, 12, 13]; by_hostid.push(hcurl_id); hcurl_id = IntRepeatedIntMap::new(); - hcurl_id.set_key(100); - hcurl_id.set_value(vec![1001, 1002, 1003]); + hcurl_id.key = Some(100); + hcurl_id.value = vec![1001, 1002, 1003]; by_hostid.push(hcurl_id); - ml1.set_ByHostId(by_hostid.clone()); - ml2.set_ByHostId(by_hostid); + ml1.ByHostId = by_hostid.clone(); + ml2.ByHostId = by_hostid; - let mut by_country: RepeatedField = RepeatedField::new(); + let mut by_country: Vec = Vec::new(); let mut bc = StringRepeatedIntMap::new(); - bc.set_key("SE".to_string()); - bc.set_value(vec![42]); + bc.key = Some("SE".to_string()); + bc.value = vec![42]; by_country.push(bc); - ml1.set_ByCountry(by_country.clone()); - ml2.set_ByCountry(by_country); + ml1.ByCountry = by_country.clone(); + ml2.ByCountry = by_country; mlc.push(ml1); - mirrorlist.set_MirrorListCache(mlc.clone()); + mirrorlist.MirrorListCache = mlc.clone(); - let mut hbc: RepeatedField = RepeatedField::new(); + let mut hbc: Vec = Vec::new(); let mut hb = IntIntMap::new(); hb.set_key(1); hb.set_value(100); @@ -135,9 +134,9 @@ fn do_mirrorlist_test() { hb.set_value(1000); hbc.push(hb); - mirrorlist.set_HostBandwidthCache(hbc); + mirrorlist.HostBandwidthCache = hbc; - let mut hcurl: RepeatedField = RepeatedField::new(); + let mut hcurl: Vec = Vec::new(); let vec = vec![11, 12, 13, 1001, 1002, 1003, 421, 422, 423]; for id in vec { let mut hc_url = IntStringMap::new(); @@ -145,7 +144,7 @@ fn do_mirrorlist_test() { hc_url.set_value(format!("http://hcurl{}/test-{}", id, id)); hcurl.push(hc_url); } - mirrorlist.set_HCUrlCache(hcurl); + mirrorlist.HCUrlCache = hcurl; let mut request = Request::new(Body::empty()); @@ -300,9 +299,8 @@ fn do_mirrorlist_test() { repo.set_key("repo-name+arch-name".to_string()); repo.set_value("directory/level/three".to_string()); - let mut ratdn: RepeatedField = RepeatedField::new(); - ratdn.push(repo); - mirrorlist.set_RepoArchToDirectoryName(ratdn); + let ratdn: Vec = vec![repo]; + mirrorlist.RepoArchToDirectoryName = ratdn; request = Request::new(Body::empty()); *request.uri_mut() = "/metalink?repo=repo-name&arch=arch-name&ip=89.160.20.113" @@ -319,7 +317,7 @@ fn do_mirrorlist_test() { .contains("mirrorlist cache index out of range, you broke it!")); mlc.push(ml2); - mirrorlist.set_MirrorListCache(mlc); + mirrorlist.MirrorListCache = mlc; request = Request::new(Body::empty()); *request.uri_mut() = "/metalink?repo=repo-name&arch=arch-name&ip=89.160.20.113" @@ -335,26 +333,26 @@ fn do_mirrorlist_test() { .unwrap() .contains("repomd.xml not found or has not metalink")); - let mut fdcdc: RepeatedField = RepeatedField::new(); + let mut fdcdc: Vec = Vec::new(); let mut fdcd = FileDetailsCacheDirectoryType::new(); fdcd.set_directory("directory/level/three/repodata".to_string()); fdcdc.push(fdcd); let f: &mut FileDetailsCacheDirectoryType = &mut fdcdc[0]; - let fdcfc = f.mut_FileDetailsCacheFiles(); + let fdcfc = &mut f.FileDetailsCacheFiles; let mut fdcf = FileDetailsCacheFilesType::new(); fdcf.set_filename("repomd.xml".to_string()); fdcfc.push(fdcf); let fdcf: &mut FileDetailsCacheFilesType = &mut fdcfc[0]; - let fdc = fdcf.mut_FileDetails(); + let fdc = &mut fdcf.FileDetails; let mut file_detail_type = FileDetailsType::new(); file_detail_type.set_Size(3); file_detail_type.set_TimeStamp(17); file_detail_type.set_MD5("MD5555".to_string()); fdc.push(file_detail_type); - mirrorlist.set_FileDetailsCache(fdcdc); + mirrorlist.FileDetailsCache = fdcdc; request = Request::new(Body::empty()); *request.uri_mut() = "/metalink?repo=repo-name&arch=arch-name&ip=89.160.20.113"