Compare commits
3 Commits
12fb704461
...
5ee7d6a30b
Author | SHA1 | Date |
---|---|---|
Ryan Heywood | 5ee7d6a30b | |
Ryan Heywood | 9a1312fa16 | |
Ryan Heywood | 496df4b353 |
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,6 @@
|
||||||
|
[workspace]
|
||||||
|
|
||||||
|
members = [
|
||||||
|
"bloom-filter-generator",
|
||||||
|
"mnemonic-hash-checker",
|
||||||
|
]
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,14 @@
|
||||||
|
[package]
|
||||||
|
name = "bloom-filter-generator"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
hex = "0.4.3"
|
||||||
|
serde = "1.0"
|
||||||
|
serde_derive = "1.0"
|
||||||
|
rayon = "1.7.0"
|
||||||
|
clap = {version = "4.0.32", features = ["derive"]}
|
||||||
|
bloomfilter = "1"
|
|
@ -0,0 +1,75 @@
|
||||||
|
use std::error::Error;
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io::{BufWriter, BufReader, BufRead, Write};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use bloomfilter::Bloom;
|
||||||
|
use clap::{arg, Parser};
|
||||||
|
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(author, version, about, long_about = None)]
|
||||||
|
struct Opts {
|
||||||
|
#[arg(short, long, help = "Input file (sha256 hashes csv)")]
|
||||||
|
input_file: PathBuf,
|
||||||
|
|
||||||
|
#[arg(short, long, help = "Output file (bloom filter dump of sha256 hashes)")]
|
||||||
|
output_file: PathBuf,
|
||||||
|
|
||||||
|
#[arg(
|
||||||
|
long,
|
||||||
|
help = "Bloom filter: number of items",
|
||||||
|
default_value_t = 13_194_396_000
|
||||||
|
)]
|
||||||
|
num_items: usize,
|
||||||
|
|
||||||
|
#[arg(
|
||||||
|
long,
|
||||||
|
help = "Bloom filter: wanted rate of false positives",
|
||||||
|
default_value_t = 0.000_001
|
||||||
|
)]
|
||||||
|
fp_rate: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
let opts: Opts = Opts::parse();
|
||||||
|
|
||||||
|
let mut bloom = Bloom::new_for_fp_rate(opts.num_items, opts.fp_rate);
|
||||||
|
|
||||||
|
let reader = BufReader::new(File::open(&opts.input_file)?).lines();
|
||||||
|
|
||||||
|
let writer = File::create(&opts.output_file)
|
||||||
|
.expect("error opening output file");
|
||||||
|
let mut writer = BufWriter::new(writer);
|
||||||
|
|
||||||
|
for (count, line) in reader.enumerate() {
|
||||||
|
if count & 0b1_1111_1111_1111_1111_1111 == 0 {
|
||||||
|
println!("Read {} lines", count);
|
||||||
|
}
|
||||||
|
|
||||||
|
match line {
|
||||||
|
Ok(hash) => {
|
||||||
|
bloom.set(&hash)
|
||||||
|
}
|
||||||
|
Err(err) => eprintln!("Error reading line record: {}", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dump bloom filter to file
|
||||||
|
println!("Serializing bloom filter to output file");
|
||||||
|
|
||||||
|
// - metadata
|
||||||
|
writer.write_all(&bloom.number_of_bits().to_be_bytes())?;
|
||||||
|
writer.write_all(&bloom.number_of_hash_functions().to_be_bytes())?;
|
||||||
|
writer.write_all(&bloom.sip_keys()[0].0.to_be_bytes())?;
|
||||||
|
writer.write_all(&bloom.sip_keys()[0].1.to_be_bytes())?;
|
||||||
|
writer.write_all(&bloom.sip_keys()[1].0.to_be_bytes())?;
|
||||||
|
writer.write_all(&bloom.sip_keys()[1].1.to_be_bytes())?;
|
||||||
|
|
||||||
|
writer.flush()?;
|
||||||
|
|
||||||
|
// - bitmap
|
||||||
|
writer.write_all(&bloom.bitmap())?;
|
||||||
|
|
||||||
|
writer.flush()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
|
@ -2,7 +2,9 @@ use std::{fs::File, io::{BufReader, Read}, path::Path};
|
||||||
|
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
use bloomfilter::Bloom;
|
use bloomfilter::Bloom;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
pub fn load(file: &Path) -> Result<Bloom<String>> {
|
pub fn load(file: &Path) -> Result<Bloom<String>> {
|
||||||
let file = File::open(file)?;
|
let file = File::open(file)?;
|
||||||
let length = file.metadata().unwrap().len();
|
let length = file.metadata().unwrap().len();
|
||||||
|
@ -31,9 +33,11 @@ pub fn load(file: &Path) -> Result<Bloom<String>> {
|
||||||
(u64::from_be_bytes(sk10), (u64::from_be_bytes(sk11))),
|
(u64::from_be_bytes(sk10), (u64::from_be_bytes(sk11))),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
info!("Reading {length} bytes into memory");
|
||||||
let mut bitmap = vec![0; (length - 8 - 4 - 32) as usize];
|
let mut bitmap = vec![0; (length - 8 - 4 - 32) as usize];
|
||||||
buf.read_exact(&mut bitmap)?;
|
buf.read_exact(&mut bitmap)?;
|
||||||
|
|
||||||
|
info!("Generating bloom filter from loaded bitmap");
|
||||||
Ok(Bloom::from_existing(
|
Ok(Bloom::from_existing(
|
||||||
&bitmap,
|
&bitmap,
|
||||||
number_of_bits,
|
number_of_bits,
|
||||||
|
|
|
@ -18,11 +18,67 @@
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<h1>MilkSad Lookup Service</h1>
|
<h1>Milk Sad Lookup Service</h1>
|
||||||
<p>
|
<p>
|
||||||
Query a SHA256 hash of your mnemonic phrase. <b>DO NOT ENTER</b> your
|
To help people identify if they are impacted by Milksad, we are
|
||||||
mnemonic phrase in cleartext. To generate a hash of your mnemonic, use:
|
providing a web service to check if your mnemonic is in the vulnerable
|
||||||
<code>echo -n "milk sad wage cup reward umbrella raven visa give list decorate broccoli" | sha256sum</code> with your mnemonic.
|
set. Note that this service <i>only</i> covers mnemonics impacted by
|
||||||
|
Libbitcoin Explorer (<code>bx</code>) versions <code>3.0.0</code> to
|
||||||
|
<code>3.6.0</code>, though it may be updated over time to cover other
|
||||||
|
related vulnerabilities we are researching.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<h2>Who should use this tool?</h2>
|
||||||
|
<ul>
|
||||||
|
<li>
|
||||||
|
If you know you generated your wallet with <code>bx 3.0.0</code> or
|
||||||
|
higher (after March 2017)
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
If you know you generated your wallet with a CLI tool, but don't
|
||||||
|
remember which tool.
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
<h2>What do the results mean?</h2>
|
||||||
|
<ul>
|
||||||
|
<li>
|
||||||
|
Vulnerable means that we are 99.99% sure your mnemonic is vulnerable
|
||||||
|
to being brute forced. You should recreate your mnemonic and move
|
||||||
|
your funds.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
Match Not Found means that we do not have a record of your mnemonic,
|
||||||
|
but it does not mean it may not be impacted by other issues. If you
|
||||||
|
are not certain your wallet is secure, you may consider changing
|
||||||
|
regardless.
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
<h2>Security and Privacy</h2>
|
||||||
|
<p>
|
||||||
|
We do not want to store BIP39 mnemonics for this lookup service, or
|
||||||
|
have people submit their BIP39 mnemonic private keys to us, so we had
|
||||||
|
to sacrifice the user experience to provide this service safely. Our
|
||||||
|
server contains SHA256 hashes of all currently known vulnerable
|
||||||
|
mnemonics, so you can submit the SHA256 hash of your own mnemonic and
|
||||||
|
see if it is in our set.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
Please note that it is usually a <i>very</i> bad idea to follow
|
||||||
|
invitations from strangers on the internet when it comes to sharing
|
||||||
|
something about your wallet private keys. Typically they are scammers
|
||||||
|
with bad intentions. We're aware of this and want to avoid being a poor
|
||||||
|
example, so we decided to avoid including a convenient HTML input field
|
||||||
|
to do the hashing for you (and could steal your mnemonic in the
|
||||||
|
process). Users must bring their own SHA256 hash of their mnemonic
|
||||||
|
ideally calculated in their own offline machine. If other people offer
|
||||||
|
a similar lookup service, especially one that accepts a mnemonic as
|
||||||
|
input, please be very cautious.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
For those wishing to limit metadata sent to us or our service provider,
|
||||||
|
we encourage using Whonix/Tor.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<form action="/check" method="get" enctype="multipart/form-data">
|
<form action="/check" method="get" enctype="multipart/form-data">
|
||||||
|
|
|
@ -7,7 +7,7 @@ use axum::{
|
||||||
http::{header::CONTENT_TYPE, StatusCode},
|
http::{header::CONTENT_TYPE, StatusCode},
|
||||||
response::{AppendHeaders, IntoResponse},
|
response::{AppendHeaders, IntoResponse},
|
||||||
routing::get,
|
routing::get,
|
||||||
Json, Router,
|
Router,
|
||||||
};
|
};
|
||||||
|
|
||||||
use bloomfilter::Bloom;
|
use bloomfilter::Bloom;
|
||||||
|
@ -76,13 +76,11 @@ struct CheckQuery {
|
||||||
async fn check_hash_slug(
|
async fn check_hash_slug(
|
||||||
query: Query<CheckQuery>,
|
query: Query<CheckQuery>,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
) -> (StatusCode, Json<bool>) {
|
) -> (StatusCode, &'static str) {
|
||||||
let result = check_hash(&query.sha256, &state).await;
|
match state.bloom_filter.check(&query.sha256.to_string()) {
|
||||||
let status_code = match state.bloom_filter.check(&query.sha256.to_string()) {
|
true => (StatusCode::OK, "vulnerable"),
|
||||||
true => StatusCode::OK,
|
false => (StatusCode::NOT_FOUND, "match not found")
|
||||||
false => StatusCode::NOT_FOUND,
|
}
|
||||||
};
|
|
||||||
(status_code, Json(result))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
|
@ -108,7 +106,7 @@ async fn main() -> Result<()> {
|
||||||
.layer(CatchPanicLayer::new())
|
.layer(CatchPanicLayer::new())
|
||||||
.layer(TraceLayer::new_for_http());
|
.layer(TraceLayer::new_for_http());
|
||||||
|
|
||||||
info!("server go nyoom");
|
info!("server go nyoom: {addr}");
|
||||||
|
|
||||||
axum::Server::bind(&addr)
|
axum::Server::bind(&addr)
|
||||||
.serve(app.into_make_service())
|
.serve(app.into_make_service())
|
||||||
|
|
Loading…
Reference in New Issue