1 use std::{
2 borrow::Cow,
3 fmt, fs,
4 fs::File,
5 io::{self, Read, Write},
6 path::Path,
7 path::PathBuf,
8 str::FromStr,
9 };
10
11 use bytes::Bytes;
12 use serde::{de::DeserializeOwned, Serialize};
13
14 use rpki::uri;
15
16 use crate::commons::{
17 api::{Base64, HexEncodedHash, ListElement, Publish, Update, Withdraw},
18 error::KrillIoError,
19 };
20
21 /// Creates a sub dir if needed, return full path to it
sub_dir(base: &Path, name: &str) -> Result<PathBuf, KrillIoError>22 pub fn sub_dir(base: &Path, name: &str) -> Result<PathBuf, KrillIoError> {
23 let mut full_path = base.to_path_buf();
24 full_path.push(name);
25 create_dir(&full_path)?;
26 Ok(full_path)
27 }
28
create_dir(dir: &Path) -> Result<(), KrillIoError>29 pub fn create_dir(dir: &Path) -> Result<(), KrillIoError> {
30 if !dir.is_dir() {
31 fs::create_dir(dir)
32 .map_err(|e| KrillIoError::new(format!("could not create dir: {}", dir.to_string_lossy()), e))?;
33 }
34 Ok(())
35 }
36
create_file_with_path(path: &Path) -> Result<File, KrillIoError>37 pub fn create_file_with_path(path: &Path) -> Result<File, KrillIoError> {
38 if !path.exists() {
39 if let Some(parent) = path.parent() {
40 trace!("Creating path: {}", parent.to_string_lossy());
41 fs::create_dir_all(parent).map_err(|e| {
42 KrillIoError::new(
43 format!("Could not create dir path for: {}", parent.to_string_lossy()),
44 e,
45 )
46 })?;
47 }
48 }
49 File::create(path).map_err(|e| KrillIoError::new(format!("Could not create file: {}", path.to_string_lossy()), e))
50 }
51
52 /// Derive the path for this file.
file_path(base_path: &Path, file_name: &str) -> PathBuf53 pub fn file_path(base_path: &Path, file_name: &str) -> PathBuf {
54 let mut path = base_path.to_path_buf();
55 path.push(file_name);
56 path
57 }
58
59 /// Saves a file, creating parent dirs as needed
save(content: &[u8], full_path: &Path) -> Result<(), KrillIoError>60 pub fn save(content: &[u8], full_path: &Path) -> Result<(), KrillIoError> {
61 let mut f = create_file_with_path(full_path)?;
62 f.write_all(content)
63 .map_err(|e| KrillIoError::new(format!("Could not write to: {}", full_path.to_string_lossy()), e))?;
64
65 trace!("Saved file: {}", full_path.to_string_lossy());
66 Ok(())
67 }
68
69 /// Saves an object to json - unwraps any json errors!
save_json<O: Serialize>(object: &O, full_path: &Path) -> Result<(), KrillIoError>70 pub fn save_json<O: Serialize>(object: &O, full_path: &Path) -> Result<(), KrillIoError> {
71 let json = serde_json::to_string(object).unwrap();
72 save(&Bytes::from(json), full_path)
73 }
74
75 /// Loads a files and deserializes as json for the expected type. Maps json errors to KrillIoError
load_json<O: DeserializeOwned>(full_path: &Path) -> Result<O, KrillIoError>76 pub fn load_json<O: DeserializeOwned>(full_path: &Path) -> Result<O, KrillIoError> {
77 let bytes = read(full_path)?;
78 serde_json::from_slice(&bytes).map_err(|e| {
79 KrillIoError::new(
80 format!("Could not load json for file: {}", full_path.to_string_lossy()),
81 io::Error::new(io::ErrorKind::Other, format!("could not deserialize json: {}", e)),
82 )
83 })
84 }
85
86 /// Saves a file, creating parent dirs as needed
save_in_dir(content: &Bytes, base_path: &Path, name: &str) -> Result<(), KrillIoError>87 pub fn save_in_dir(content: &Bytes, base_path: &Path, name: &str) -> Result<(), KrillIoError> {
88 let mut full_path = base_path.to_path_buf();
89 full_path.push(name);
90 save(content, &full_path)
91 }
92
93 /// Saves a file under a base directory, using the rsync uri to create
94 /// sub-directories preserving the rsync authority and module in dir names.
save_with_rsync_uri(content: &Bytes, base_path: &Path, uri: &uri::Rsync) -> Result<(), KrillIoError>95 pub fn save_with_rsync_uri(content: &Bytes, base_path: &Path, uri: &uri::Rsync) -> Result<(), KrillIoError> {
96 let path = path_with_rsync(base_path, uri);
97 save(content, &path)
98 }
99
100 /// Reads a file to Bytes
read(path: &Path) -> Result<Bytes, KrillIoError>101 pub fn read(path: &Path) -> Result<Bytes, KrillIoError> {
102 let mut f =
103 File::open(path).map_err(|e| KrillIoError::new(format!("Could not open: '{}'", path.to_string_lossy()), e))?;
104 let mut bytes = Vec::new();
105 f.read_to_end(&mut bytes)
106 .map_err(|e| KrillIoError::new(format!("Could not read: {}", path.to_string_lossy()), e))?;
107 Ok(Bytes::from(bytes))
108 }
109
read_with_rsync_uri(base_path: &Path, uri: &uri::Rsync) -> Result<Bytes, KrillIoError>110 pub fn read_with_rsync_uri(base_path: &Path, uri: &uri::Rsync) -> Result<Bytes, KrillIoError> {
111 let path = path_with_rsync(base_path, uri);
112 read(&path)
113 }
114
delete_with_rsync_uri(base_path: &Path, uri: &uri::Rsync) -> Result<(), KrillIoError>115 pub fn delete_with_rsync_uri(base_path: &Path, uri: &uri::Rsync) -> Result<(), KrillIoError> {
116 delete_file(&path_with_rsync(base_path, uri))
117 }
118
delete_in_dir(base_path: &Path, name: &str) -> Result<(), KrillIoError>119 pub fn delete_in_dir(base_path: &Path, name: &str) -> Result<(), KrillIoError> {
120 let mut full_path = base_path.to_path_buf();
121 full_path.push(name);
122 delete_file(&full_path)
123 }
124
125 /// Deletes a file, but does not touch the parent directories. See [`clean_file_and_path`] for an alternative
126 /// that does.
delete_file(full_path: &Path) -> Result<(), KrillIoError>127 pub fn delete_file(full_path: &Path) -> Result<(), KrillIoError> {
128 trace!("Removing file: {}", full_path.to_string_lossy());
129 fs::remove_file(full_path)
130 .map_err(|e| KrillIoError::new(format!("Could not remove file: {}", full_path.to_string_lossy()), e))
131 }
132
133 /// Removes the file and any **empty** directories on the path after removing it.
clean_file_and_path(path: &Path) -> Result<(), KrillIoError>134 pub fn clean_file_and_path(path: &Path) -> Result<(), KrillIoError> {
135 if path.exists() {
136 delete_file(path)?;
137
138 let mut parent_opt = path.parent();
139
140 while parent_opt.is_some() {
141 let parent = parent_opt.unwrap();
142 if parent
143 .read_dir()
144 .map_err(|e| KrillIoError::new(format!("Could not read directory: '{}'", parent.to_string_lossy()), e))?
145 .count()
146 == 0
147 {
148 trace!("Will delete {}", parent.to_string_lossy().to_string());
149 fs::remove_dir(parent)
150 .map_err(|e| KrillIoError::new(format!("Could not remove dir: {}", parent.to_string_lossy()), e))?;
151 }
152
153 parent_opt = parent.parent();
154 }
155 }
156 Ok(())
157 }
158
path_with_rsync(base_path: &Path, uri: &uri::Rsync) -> PathBuf159 fn path_with_rsync(base_path: &Path, uri: &uri::Rsync) -> PathBuf {
160 let mut path = base_path.to_path_buf();
161 path.push(uri.authority());
162 path.push(uri.module_name());
163 path.push(uri.path());
164 path
165 }
166
167 /// Recurses a path on disk and returns all files found as ['CurrentFile'],
168 /// using the provided rsync_base URI as the rsync prefix.
169 /// Allows a publication client to publish the contents below some base
170 /// dir, in their own designated rsync URI name space.
crawl_incl_rsync_base(base_path: &Path, rsync_base: &uri::Rsync) -> Result<Vec<CurrentFile>, Error>171 pub fn crawl_incl_rsync_base(base_path: &Path, rsync_base: &uri::Rsync) -> Result<Vec<CurrentFile>, Error> {
172 crawl_disk(base_path, base_path, Some(rsync_base))
173 }
174
175 /// Recurses a path on disk and returns all files found as ['CurrentFile'],
176 /// deriving the rsync_base URI from the directory structure. This is
177 /// useful when reading ['CurrentFile'] instances that were saved in some
178 /// base directory as is done by the ['FileStore'].
crawl_derive_rsync_uri(base_path: &Path) -> Result<Vec<CurrentFile>, Error>179 pub fn crawl_derive_rsync_uri(base_path: &Path) -> Result<Vec<CurrentFile>, Error> {
180 crawl_disk(base_path, base_path, None)
181 }
182
crawl_disk(base_path: &Path, path: &Path, rsync_base: Option<&uri::Rsync>) -> Result<Vec<CurrentFile>, Error>183 fn crawl_disk(base_path: &Path, path: &Path, rsync_base: Option<&uri::Rsync>) -> Result<Vec<CurrentFile>, Error> {
184 let mut res = Vec::new();
185
186 for entry in fs::read_dir(path).map_err(|_| Error::cannot_read(path))? {
187 let entry = entry.map_err(|_| Error::cannot_read(path))?;
188 let path = entry.path();
189 if path.is_dir() {
190 let mut other = crawl_disk(base_path, &path, rsync_base)?;
191 res.append(&mut other);
192 } else {
193 let uri = derive_uri(base_path, &path, rsync_base)?;
194 let content = read(&path).map_err(|_| Error::cannot_read(&path))?;
195 let current_file = CurrentFile::new(uri, &content);
196
197 res.push(current_file);
198 }
199 }
200
201 Ok(res)
202 }
203
derive_uri(base_path: &Path, path: &Path, rsync_base: Option<&uri::Rsync>) -> Result<uri::Rsync, Error>204 fn derive_uri(base_path: &Path, path: &Path, rsync_base: Option<&uri::Rsync>) -> Result<uri::Rsync, Error> {
205 let rel = path.strip_prefix(base_path).map_err(|_| Error::PathOutsideBasePath)?;
206
207 let rel_string = rel.to_string_lossy().to_string();
208
209 let uri_string = match rsync_base {
210 Some(rsync_base) => format!("{}{}", rsync_base.to_string(), rel_string),
211 None => format!("rsync://{}", rel_string),
212 };
213
214 let uri = uri::Rsync::from_str(&uri_string).map_err(|_| Error::UnsupportedFileName(uri_string))?;
215 Ok(uri)
216 }
217
218 /// Recursively copy a base_path (if it's a dir that is), and preserve the permissions
219 /// timestamps and all that goodness..
220 ///
221 /// This is needed when making a back-up copy when we need to do upgrades on data, which
222 /// could in theory fail, in which case we want to leave teh old data in place.
backup_dir(base_path: &Path, target_path: &Path) -> Result<(), Error>223 pub fn backup_dir(base_path: &Path, target_path: &Path) -> Result<(), Error> {
224 if base_path.to_string_lossy() == Cow::Borrowed("/") || target_path.to_string_lossy() == Cow::Borrowed("/") {
225 Err(Error::BackupExcessive)
226 } else if base_path.is_file() {
227 let mut target = target_path.to_path_buf();
228 target.push(base_path.file_name().unwrap());
229
230 if target.exists() {
231 Err(Error::backup_target_exists(target_path))
232 } else {
233 fs::copy(base_path, target_path).map_err(|e| {
234 KrillIoError::new(
235 format!(
236 "Could not back up file from '{}' to '{}'",
237 base_path.to_string_lossy(),
238 target_path.to_string_lossy()
239 ),
240 e,
241 )
242 })?;
243 Ok(())
244 }
245 } else if base_path.is_dir() {
246 for entry in fs::read_dir(base_path).map_err(|e| {
247 KrillIoError::new(
248 format!("Could not read dir '{}' for backup", base_path.to_string_lossy()),
249 e,
250 )
251 })? {
252 let path = entry
253 .map_err(|e| {
254 KrillIoError::new(
255 format!(
256 "Could not read entry for dir '{}' for backup",
257 base_path.to_string_lossy()
258 ),
259 e,
260 )
261 })?
262 .path();
263 let mut target = target_path.to_path_buf();
264 target.push(path.file_name().unwrap());
265 if path.is_dir() {
266 backup_dir(&path, &target)?;
267 } else if path.is_file() {
268 if let Some(parent) = target.parent() {
269 fs::create_dir_all(parent).map_err(|e| {
270 KrillIoError::new(
271 format!("Could not create dir(s) '{}' for backup", parent.to_string_lossy()),
272 e,
273 )
274 })?;
275 }
276 fs::copy(&path, &target).map_err(|e| {
277 KrillIoError::new(
278 format!(
279 "Could not backup '{}' to '{}'",
280 path.to_string_lossy(),
281 target.to_string_lossy()
282 ),
283 e,
284 )
285 })?;
286 } else {
287 return Err(Error::backup_cannot_read(&path));
288 }
289 }
290 Ok(())
291 } else {
292 Err(Error::backup_cannot_read(base_path))
293 }
294 }
295
296 //------------ CurrentFile ---------------------------------------------------
297
298 #[derive(Clone, Debug, Deserialize, Serialize)]
299 pub struct CurrentFile {
300 /// The full uri for this file.
301 uri: uri::Rsync,
302
303 /// The actual file content. Note that we may want to store this
304 /// only on disk in future (look up by sha256 hash), to save memory.
305 content: Base64,
306
307 /// The sha-256 hash of the file (as is used on the RPKI manifests and
308 /// in the publication protocol for list, update and withdraw). Saving
309 /// this rather than calculating on demand seems a small price for some
310 /// performance gain.
311 hash: HexEncodedHash,
312 }
313
314 impl CurrentFile {
new(uri: uri::Rsync, content: &Bytes) -> Self315 pub fn new(uri: uri::Rsync, content: &Bytes) -> Self {
316 let content = Base64::from_content(content);
317 let hash = content.to_encoded_hash();
318 CurrentFile { uri, content, hash }
319 }
320
321 /// Saves this file under a base directory, based on the (rsync) uri of
322 /// this file.
save(&self, base_path: &Path) -> Result<(), KrillIoError>323 pub fn save(&self, base_path: &Path) -> Result<(), KrillIoError> {
324 save_with_rsync_uri(&self.content.to_bytes(), base_path, &self.uri)
325 }
326
uri(&self) -> &uri::Rsync327 pub fn uri(&self) -> &uri::Rsync {
328 &self.uri
329 }
330
content(&self) -> &Base64331 pub fn content(&self) -> &Base64 {
332 &self.content
333 }
334
to_bytes(&self) -> Bytes335 pub fn to_bytes(&self) -> Bytes {
336 self.content.to_bytes()
337 }
338
hash(&self) -> &HexEncodedHash339 pub fn hash(&self) -> &HexEncodedHash {
340 &self.hash
341 }
342
as_publish(&self) -> Publish343 pub fn as_publish(&self) -> Publish {
344 let tag = Some(self.hash.to_string());
345 let uri = self.uri.clone();
346 let content = self.content.clone();
347 Publish::new(tag, uri, content)
348 }
349
as_update(&self, old_hash: &HexEncodedHash) -> Update350 pub fn as_update(&self, old_hash: &HexEncodedHash) -> Update {
351 let tag = None;
352 let uri = self.uri.clone();
353 let content = self.content.clone();
354 let hash = old_hash.clone();
355 Update::new(tag, uri, content, hash)
356 }
357
as_withdraw(&self) -> Withdraw358 pub fn as_withdraw(&self) -> Withdraw {
359 let tag = None;
360 let uri = self.uri.clone();
361 let hash = self.hash.clone();
362 Withdraw::new(tag, uri, hash)
363 }
364
into_list_element(self) -> ListElement365 pub fn into_list_element(self) -> ListElement {
366 ListElement::new(self.uri, self.hash)
367 }
368 }
369
370 impl PartialEq for CurrentFile {
eq(&self, other: &CurrentFile) -> bool371 fn eq(&self, other: &CurrentFile) -> bool {
372 self.uri == other.uri && self.hash == other.hash && self.content == other.content
373 }
374 }
375
376 impl Eq for CurrentFile {}
377
378 //------------ Error ---------------------------------------------------------
379 #[derive(Debug)]
380 pub enum Error {
381 CannotRead(String),
382 UnsupportedFileName(String),
383 PathOutsideBasePath,
384 BackupExcessive,
385 BackupCannotReadSource(String),
386 BackupTargetExists(String),
387 Io(KrillIoError),
388 }
389
390 impl fmt::Display for Error {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result391 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
392 match self {
393 Error::CannotRead(s) => write!(f, "Cannot read: {}", s),
394 Error::UnsupportedFileName(name) => write!(f, "Unsupported characters: {}", name),
395 Error::PathOutsideBasePath => write!(f, "Cannot use path outside of rsync jail"),
396 Error::BackupExcessive => write!(f, "Do not ever use '/' as the source or target for backups"),
397 Error::BackupCannotReadSource(e) => write!(f, "Source for backup cannot be read: {}", e),
398 Error::BackupTargetExists(e) => write!(f, "Target for backup already exists: {}", e),
399 Error::Io(e) => e.fmt(f),
400 }
401 }
402 }
403
404 impl Error {
cannot_read(path: &Path) -> Error405 pub fn cannot_read(path: &Path) -> Error {
406 let str = path.to_string_lossy().to_string();
407 Error::CannotRead(str)
408 }
409
backup_cannot_read(path: &Path) -> Error410 fn backup_cannot_read(path: &Path) -> Error {
411 let str = path.to_string_lossy().to_string();
412 Error::BackupCannotReadSource(str)
413 }
414
backup_target_exists(path: &Path) -> Error415 fn backup_target_exists(path: &Path) -> Error {
416 let str = path.to_string_lossy().to_string();
417 Error::BackupTargetExists(str)
418 }
419 }
420
421 impl std::error::Error for Error {}
422
423 impl From<KrillIoError> for Error {
from(e: KrillIoError) -> Self424 fn from(e: KrillIoError) -> Self {
425 Error::Io(e)
426 }
427 }
428
429 //------------ Tests ---------------------------------------------------------
430
431 #[cfg(test)]
432 mod tests {
433 use crate::test;
434
435 use super::*;
436
437 #[test]
should_scan_disk()438 fn should_scan_disk() {
439 test::test_under_tmp(|base_dir| {
440 let file_1 = CurrentFile::new(
441 test::rsync("rsync://host:10873/module/alice/file1.txt"),
442 &Bytes::from("content 1"),
443 );
444 let file_2 = CurrentFile::new(
445 test::rsync("rsync://host:10873/module/alice/file2.txt"),
446 &Bytes::from("content 2"),
447 );
448 let file_3 = CurrentFile::new(
449 test::rsync("rsync://host:10873/module/alice/sub/file1.txt"),
450 &Bytes::from("content sub file"),
451 );
452 let file_4 = CurrentFile::new(
453 test::rsync("rsync://host:10873/module/bob/file.txt"),
454 &Bytes::from("content"),
455 );
456
457 file_1.save(&base_dir).unwrap();
458 file_2.save(&base_dir).unwrap();
459 file_3.save(&base_dir).unwrap();
460 file_4.save(&base_dir).unwrap();
461
462 let files = crawl_derive_rsync_uri(&base_dir).unwrap();
463
464 assert!(files.contains(&file_1));
465 assert!(files.contains(&file_2));
466 assert!(files.contains(&file_3));
467 assert!(files.contains(&file_4));
468 });
469 }
470 }
471