keyfork-shard: ignore duplicate certificate entries
This commit is contained in:
parent
001fc0bccc
commit
94617722a0
|
@ -194,33 +194,35 @@ impl<P: PromptHandler> OpenPGP<P> {
|
|||
}
|
||||
|
||||
impl<P: PromptHandler> OpenPGP<P> {
|
||||
/// Read all OpenPGP certificates in a path and return a [`Vec`] of them. Certificates are read
|
||||
/// from a file, or from files one level deep in a directory.
|
||||
/// Read all OpenPGP certificates in a path and return a [`Vec`] of them.
|
||||
///
|
||||
/// Certificates are read from a file, or from files one level deep in a directory.
|
||||
/// Certificates with duplicated fingerprints will be discarded.
|
||||
///
|
||||
/// # Errors
|
||||
/// The function may return an error if it is unable to read the directory or if Sequoia is unable
|
||||
/// to load certificates from the file.
|
||||
/// The function may return an error if it is unable to read the directory or if Sequoia is
|
||||
/// unable to load certificates from the file.
|
||||
pub fn discover_certs(path: impl AsRef<Path>) -> Result<Vec<Cert>> {
|
||||
let path = path.as_ref();
|
||||
|
||||
let mut certs = HashMap::new();
|
||||
if path.is_file() {
|
||||
let mut vec = vec![];
|
||||
for cert in CertParser::from_file(path).map_err(Error::Sequoia)? {
|
||||
vec.push(cert.map_err(Error::Sequoia)?);
|
||||
for maybe_cert in CertParser::from_file(path).map_err(Error::Sequoia)? {
|
||||
let cert = maybe_cert.map_err(Error::Sequoia)?;
|
||||
certs.insert(cert.fingerprint(), cert);
|
||||
}
|
||||
Ok(vec)
|
||||
} else {
|
||||
let mut vec = vec![];
|
||||
for entry in path
|
||||
.read_dir()
|
||||
.map_err(Error::Io)?
|
||||
.filter_map(Result::ok)
|
||||
.filter(|p| p.path().is_file())
|
||||
{
|
||||
vec.push(Cert::from_file(entry.path()).map_err(Error::Sequoia)?);
|
||||
let cert = Cert::from_file(entry.path()).map_err(Error::Sequoia)?;
|
||||
certs.insert(cert.fingerprint(), cert);
|
||||
}
|
||||
Ok(vec)
|
||||
}
|
||||
Ok(certs.into_values().collect())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue