Compare commits

..

No commits in common. "main" and "merge-blob-and-values" have entirely different histories.

55 changed files with 794 additions and 8098 deletions

2793
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -3,17 +3,8 @@
resolver = "2"
members = [
"crates/icepick",
"crates/icepick-workflow",
"crates/icepick-module",
"crates/builtins/icepick-internal",
"crates/builtins/icepick-ed25519",
"crates/by-chain/icepick-solana",
"crates/by-chain/icepick-cosmos",
"crates/miniquorum",
"crates/spacemesh/api-client",
"crates/spacemesh/codec",
"crates/spacemesh/spacemesh",
"crates/by-chain/icepick-spacemesh",
]
[workspace.dependencies]

View File

@ -1,13 +0,0 @@
[package]
name = "icepick-ed25519"
version = "0.1.0"
edition = "2021"
publish = ["distrust"]
[dependencies]
ed25519-dalek = "2.1.1"
icepick-module = { version = "0.1.0", path = "../../icepick-module" }
serde.workspace = true
serde_json.workspace = true
smex = { version = "0.1.0", registry = "distrust" }
thiserror = "2.0.9"

View File

@ -1,91 +0,0 @@
use ed25519_dalek::Signer;
use icepick_module::Module;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "operation", content = "values", rename_all = "kebab-case")]
pub enum Operation {
GetPubkey {},
Sign { message: Vec<u8> },
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Request {
derived_keys: Option<Vec<[u8; 32]>>,
#[serde(flatten)]
operation: Operation,
}
#[derive(thiserror::Error, Debug)]
pub enum Error {}
pub struct Ed25519;
impl Module for Ed25519 {
type Error = Error;
type Request = Request;
fn describe_operations() -> Vec<icepick_module::help::Operation> {
use icepick_module::help::*;
let message = Argument::builder()
.name("message")
.description("The message to sign, as an array of bytes.")
.r#type(ArgumentType::Required)
.build();
let get_pubkey = Operation::builder()
.name("get-pubkey")
.description("Get an Ed25519 public key from the provided private key.")
.build();
let sign = Operation::builder()
.name("sign")
.description("Sign a message using an Ed25519 private key.")
.build()
.argument(&message);
vec![get_pubkey, sign]
}
fn handle_request(request: Self::Request) -> Result<serde_json::Value, Self::Error> {
let Request {
derived_keys,
operation,
} = request;
match operation {
Operation::GetPubkey {} => {
let key = derived_keys
.iter()
.flatten()
.next()
.map(ed25519_dalek::SigningKey::from_bytes)
.unwrap();
let key = key.verifying_key().to_bytes();
Ok(serde_json::json!({
"blob": {
"pubkey": key,
}
}))
}
Operation::Sign { message } => {
let key = derived_keys
.iter()
.flatten()
.next()
.map(ed25519_dalek::SigningKey::from_bytes)
.unwrap();
let signature = key.sign(&message);
Ok(serde_json::json!({
"blob": {
"signature": signature.to_vec(),
}
}))
}
}
}
}

View File

@ -1,6 +0,0 @@
use icepick_module::Module;
use icepick_ed25519::Ed25519;
fn main() -> Result<(), Box<dyn std::error::Error>> {
Ed25519::run_responder()
}

View File

@ -1,10 +0,0 @@
[package]
name = "icepick-internal"
version = "0.1.0"
edition = "2021"
[dependencies]
icepick-module = { version = "0.1.0", path = "../../icepick-module" }
serde.workspace = true
serde_json.workspace = true
thiserror = "2.0.9"

View File

@ -1,121 +0,0 @@
use icepick_module::{
help::{Argument, ArgumentType},
Module,
};
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
fn path_for_filename(filename: &Path) -> PathBuf {
PathBuf::from(
std::env::vars()
.find(|(k, _)| k == "ICEPICK_DATA_DIRECTORY")
.map(|(_, v)| v)
.as_deref()
.unwrap_or("/media/external"),
)
.join(filename)
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "operation", content = "values", rename_all = "kebab-case")]
pub enum Request {
LoadFile {
filename: PathBuf,
},
SaveFile {
filename: PathBuf,
#[serde(flatten)]
values: serde_json::Value,
},
Cat {
#[serde(flatten)]
values: serde_json::Value,
},
}
#[derive(thiserror::Error, Debug)]
pub enum Error {}
pub struct Internal;
impl Module for Internal {
type Error = Error;
type Request = Request;
fn describe_operations() -> Vec<icepick_module::help::Operation> {
let filename = Argument {
name: "filename".to_string(),
description: "The file to load or save data to.".to_string(),
r#type: ArgumentType::Required,
};
vec![
icepick_module::help::Operation {
name: "load-file".to_string(),
description: "Load data from a JSON file.".to_string(),
arguments: vec![filename.clone()],
},
icepick_module::help::Operation {
name: "save-file".to_string(),
description: "Save data from a JSON file.".to_string(),
arguments: vec![filename.clone()],
},
icepick_module::help::Operation {
name: "cat".to_string(),
description: "Return all inputs. Usable in workflows to sum up all desired outputs"
.to_string(),
arguments: vec![],
},
]
}
fn handle_request(request: Self::Request) -> Result<serde_json::Value, Self::Error> {
match request {
Request::LoadFile { filename } => {
let path = path_for_filename(&filename);
let mut attempt = 0;
while !std::fs::exists(&path).is_ok_and(|v| v) {
if attempt % 10 == 0 {
eprintln!(
"Waiting for {path} to be populated...",
path = path.to_string_lossy()
);
}
attempt += 1;
std::thread::sleep(std::time::Duration::from_secs(1));
}
// if we ran at least once, we should have previously printed a message. write a
// confirmation that we are no longer waiting. if we haven't, we've never printed
// a message, therefore we don't need to confirm the prior reading.
if attempt > 0 {
eprintln!("File contents loaded.");
}
let file = std::fs::File::open(path).unwrap();
let json: serde_json::Value = serde_json::from_reader(file).unwrap();
Ok(serde_json::json!({
"blob": json,
}))
}
Request::SaveFile { filename, values } => {
let path = path_for_filename(&filename);
let file = std::fs::File::create(path).unwrap();
serde_json::to_writer(file, &values).unwrap();
Ok(serde_json::json!({
"blob": {},
}))
}
Request::Cat { values } => {
Ok(serde_json::json!({
"blob": values,
}))
}
}
}
}

View File

@ -1,6 +0,0 @@
use icepick_module::Module;
use icepick_internal::Internal;
fn main() -> Result<(), Box<dyn std::error::Error>> {
Internal::run_responder()
}

View File

@ -1,16 +0,0 @@
[package]
name = "icepick-cosmos"
version = "0.1.0"
edition = "2021"
[dependencies]
bon = "3.3.2"
cosmrs = { version = "0.21.0", features = ["rpc", "tokio"] }
icepick-module = { version = "0.1.0", path = "../../icepick-module" }
serde.workspace = true
serde_json = { workspace = true, features = ["arbitrary_precision"] }
thiserror = "2.0.9"
tokio = { version = "1.43.0", features = ["rt"] }
[dev-dependencies]
cosmrs = { version = "0.21.0", features = ["dev"] }

View File

@ -1,318 +0,0 @@
use bon::{bon, Builder};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize, Builder)]
#[serde(rename_all = "camelCase")]
pub struct Bip44Config {
pub coin_type: u32,
}
// NOTE: Are `public` variants used?
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Bech32Config {
#[serde(rename = "bech32PrefixAccAddress")]
pub account_address_prefix: String,
#[serde(rename = "bech32PrefixAccPub")]
pub account_address_public_prefix: String,
#[serde(rename = "bech32PrefixValOper")]
pub validator_operator_prefix: String,
#[serde(rename = "bech32PrefixValPub")]
pub validator_operator_public_prefix: String,
#[serde(rename = "bech32PrefixConsAddr")]
pub consensus_node_prefix: String,
#[serde(rename = "bech32PrefixConsPub")]
pub consensus_node_public_prefix: String,
}
#[bon]
impl Bech32Config {
#[builder]
fn new(
account_address_prefix: &'static str,
account_address_public_prefix: &'static str,
validator_operator_prefix: &'static str,
validator_operator_public_prefix: &'static str,
consensus_node_prefix: &'static str,
consensus_node_public_prefix: &'static str,
) -> Self {
Self {
account_address_prefix: account_address_prefix.to_string(),
account_address_public_prefix: account_address_public_prefix.to_string(),
validator_operator_prefix: validator_operator_prefix.to_string(),
validator_operator_public_prefix: validator_operator_public_prefix.to_string(),
consensus_node_prefix: consensus_node_prefix.to_string(),
consensus_node_public_prefix: consensus_node_public_prefix.to_string(),
}
}
fn with_similar_prefix(prefix: &'static str) -> Self {
#[allow(clippy::useless_format)]
Self {
account_address_prefix: format!("{prefix}"),
account_address_public_prefix: format!("{prefix}pub"),
validator_operator_prefix: format!("{prefix}valoper"),
validator_operator_public_prefix: format!("{prefix}valoperpub"),
consensus_node_prefix: format!("{prefix}valcons"),
consensus_node_public_prefix: format!("{prefix}valconspub"),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, Builder)]
pub struct GasPriceStep {
pub low: f64,
pub average: f64,
pub high: f64,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Currency {
pub coin_denom: String,
pub coin_minimal_denom: String,
pub coin_decimals: u8,
pub coin_gecko_id: String,
}
#[bon]
impl Currency {
#[builder]
fn new(
coin_denom: &'static str,
coin_minimal_denom: &'static str,
coin_decimals: u8,
coin_gecko_id: &'static str,
) -> Self {
Self {
coin_denom: coin_denom.to_string(),
coin_minimal_denom: coin_minimal_denom.to_string(),
coin_decimals,
coin_gecko_id: coin_gecko_id.to_string(),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, Builder)]
#[serde(rename_all = "camelCase")]
pub struct CurrencyWithGas {
#[serde(flatten)]
pub currency: Currency,
pub gas_price_step: GasPriceStep,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Blockchain {
pub chain_name: String,
pub chain_id: String,
pub rpc_url: String,
pub rest_url: String,
pub explorer_url_format: String,
#[serde(rename = "bip44")]
pub bip44_config: Bip44Config,
#[serde(rename = "bech32Config")]
pub bech32_config: Bech32Config,
pub currencies: Vec<Currency>,
pub fee_currencies: Vec<CurrencyWithGas>,
pub gas_price_step: GasPriceStep,
pub stake_currency: Currency,
}
#[bon]
impl Blockchain {
#[builder]
fn new(
chain_id: &'static str,
chain_name: &'static str,
rpc_url: &'static str,
rest_url: &'static str,
explorer_url_format: &'static str,
bip44_config: Bip44Config,
bech32_config: Bech32Config,
currencies: &[Currency],
fee_currencies: &[CurrencyWithGas],
gas_price_step: GasPriceStep,
stake_currency: Currency,
) -> Self {
Self {
chain_id: chain_id.to_string(),
chain_name: chain_name.to_string(),
rpc_url: rpc_url.to_string(),
rest_url: rest_url.to_string(),
explorer_url_format: explorer_url_format.to_string(),
bip44_config,
bech32_config,
currencies: currencies.to_vec(),
fee_currencies: fee_currencies.to_vec(),
gas_price_step,
stake_currency,
}
}
}
fn seda_chains() -> Vec<Blockchain> {
let mut chains = vec![];
let aseda = Currency::builder()
.coin_denom("seda")
.coin_minimal_denom("aseda")
.coin_decimals(18)
.coin_gecko_id("ID")
.build();
let aseda_gas = GasPriceStep::builder()
.low(5000000000.)
.average(10000000000.)
.high(15000000000.)
.build();
chains.push(
Blockchain::builder()
.chain_id("seda-1-devnet")
// NOTE: Officially, this is just "devnet", but otherwise this would conflict.
// We'll override it in our config.
.chain_name("seda-devnet")
.rpc_url("https://rpc.devnet.seda.xyz")
.rest_url("https://lcd.devnet.seda.xyz")
.explorer_url_format("https://devnet.explorer.seda.xyz/txs/%s")
.bip44_config(Bip44Config::builder().coin_type(118).build())
.bech32_config(Bech32Config::with_similar_prefix("seda"))
.currencies(&[aseda.clone()])
.fee_currencies(&[CurrencyWithGas::builder()
.currency(aseda.clone())
.gas_price_step(aseda_gas.clone()).build()])
.gas_price_step(aseda_gas.clone())
.stake_currency(aseda.clone())
.build(),
);
chains.push(
Blockchain::builder()
.chain_id("seda-1")
.chain_name("seda")
.rpc_url("https://rpc.seda.xyz")
.rest_url("https://lcd.seda.xyz")
.explorer_url_format("https://explorer.seda.xyz/txs/%s")
.bip44_config(Bip44Config::builder().coin_type(118).build())
.bech32_config(Bech32Config::with_similar_prefix("seda"))
.currencies(&[aseda.clone()])
.fee_currencies(&[CurrencyWithGas::builder()
.currency(aseda.clone())
.gas_price_step(aseda_gas.clone()).build()])
.gas_price_step(aseda_gas.clone())
.stake_currency(aseda.clone())
.build(),
);
chains
}
fn kyve_chains() -> Vec<Blockchain> {
let mut chains = vec![];
let tkyve = Currency::builder()
.coin_denom("KYVE")
.coin_minimal_denom("tkyve")
.coin_decimals(6)
.coin_gecko_id("unknown")
.build();
let tkyve_gas = GasPriceStep::builder()
.low(0.01)
.average(0.025)
.high(0.03)
.build();
let ukyve = Currency::builder()
.coin_denom("KYVE")
.coin_minimal_denom("ukyve")
.coin_decimals(6)
.coin_gecko_id("unknown")
.build();
let ukyve_gas = GasPriceStep::builder()
.low(0.01)
.average(0.025)
.high(0.03)
.build();
chains.push(
Blockchain::builder()
.chain_id("korellia-2")
.chain_name("korellia")
.rpc_url("https://rpc.korellia.kyve.network")
.rest_url("https://api.korellia.kyve.network")
.explorer_url_format("https://explorer.kyve.network/korellia/tx/%s")
.bip44_config(Bip44Config::builder().coin_type(118).build())
.bech32_config(Bech32Config::with_similar_prefix("kyve"))
.currencies(&[tkyve.clone()])
.fee_currencies(&[CurrencyWithGas::builder()
.currency(tkyve.clone())
.gas_price_step(tkyve_gas.clone())
.build()])
.gas_price_step(tkyve_gas.clone())
.stake_currency(tkyve.clone())
.build(),
);
chains.push(
Blockchain::builder()
.chain_id("kaon-1")
.chain_name("kaon")
.rpc_url("https://rpc.kaon.kyve.network")
.rest_url("https://api.kaon.kyve.network")
.explorer_url_format("https://explorer.kyve.network/kaon/tx/%s")
.bip44_config(Bip44Config::builder().coin_type(118).build())
.bech32_config(Bech32Config::with_similar_prefix("kyve"))
.currencies(&[tkyve.clone()])
.fee_currencies(&[CurrencyWithGas::builder()
.currency(tkyve.clone())
.gas_price_step(tkyve_gas.clone())
.build()])
.gas_price_step(tkyve_gas.clone())
.stake_currency(tkyve.clone())
.build(),
);
chains.push(
Blockchain::builder()
.chain_id("kyve-1")
.chain_name("kyve")
.rpc_url("https://rpc.kyve.network")
.rest_url("https://api.kyve.network")
.explorer_url_format("https://explorer.kyve.network/kyve/tx/%s")
.bip44_config(Bip44Config::builder().coin_type(118).build())
.bech32_config(Bech32Config::with_similar_prefix("kyve"))
.currencies(&[ukyve.clone()])
.fee_currencies(&[CurrencyWithGas::builder()
.currency(ukyve.clone())
.gas_price_step(ukyve_gas.clone())
.build()])
.gas_price_step(ukyve_gas.clone())
.stake_currency(ukyve.clone())
.build(),
);
chains
}
pub fn default_chains() -> Vec<Blockchain> {
let mut chains = vec![];
chains.extend(kyve_chains());
chains.extend(seda_chains());
chains
}

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +0,0 @@
use icepick_module::Module;
use icepick_cosmos::Cosmos;
fn main() -> Result<(), Box<dyn std::error::Error>> {
Cosmos::run_responder()
}

View File

@ -1,68 +0,0 @@
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
pub struct Coin {
amount: [u8; 16],
denom: cosmrs::Denom,
}
impl From<&cosmrs::Coin> for Coin {
fn from(value: &cosmrs::Coin) -> Self {
let cosmrs::Coin { denom, amount } = value;
Coin {
denom: denom.clone(),
amount: amount.to_be_bytes(),
}
}
}
impl From<&Coin> for cosmrs::Coin {
fn from(value: &Coin) -> Self {
let Coin { amount, denom } = value;
cosmrs::Coin {
denom: denom.clone(),
amount: u128::from_be_bytes(*amount),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Fee {
amount: Vec<Coin>,
gas_limit: u64,
}
impl From<&cosmrs::tx::Fee> for Fee {
fn from(value: &cosmrs::tx::Fee) -> Self {
let cosmrs::tx::Fee {
amount,
gas_limit,
payer,
granter,
} = value;
assert!(payer.is_none(), "unimplemented: payer");
assert!(granter.is_none(), "unimplemented: granter");
let amounts = amount.iter().map(Coin::from).collect::<Vec<_>>();
Fee {
amount: amounts,
gas_limit: *gas_limit,
}
}
}
impl From<&Fee> for cosmrs::tx::Fee {
fn from(value: &Fee) -> Self {
let Fee { amount, gas_limit } = value;
let amounts = amount.iter().map(cosmrs::Coin::from).collect::<Vec<_>>();
cosmrs::tx::Fee {
amount: amounts,
gas_limit: *gas_limit,
payer: None,
granter: None,
}
}
}

View File

@ -5,18 +5,12 @@ edition = "2021"
[dependencies]
base64 = "0.22.1"
bincode = "1.3.3"
bs58 = "0.5.1"
ed25519-dalek = "=1.0.1"
icepick-module = { version = "0.1.0", path = "../../icepick-module" }
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
solana-rpc-client = { version = "2.1.1", default-features = false }
solana-rpc-client-api = "2.1.7"
solana-rpc-client-nonce-utils = "2.1.7"
solana-sdk = { version = "2.1.1" }
solana-transaction-status = "2.1.1"
solana-transaction-status-client-types = "2.1.1"
spl-associated-token-account = "6.0.0"
spl-token = "7.0.0"
spl-token-2022 = "6.0.0"

View File

@ -63,43 +63,16 @@ use icepick_module::{
Module,
};
use serde::{Deserialize, Serialize};
use solana_rpc_client::rpc_client::SerializableTransaction;
use solana_rpc_client_api::client_error::Result as ClientResult;
use solana_sdk::{
pubkey::Pubkey,
signer::{keypair::Keypair, Signer},
system_instruction,
transaction::TransactionError,
};
use std::{collections::HashSet, str::FromStr};
use solana_sdk::signer::Signer;
use std::str::FromStr;
// How does this not exist in solana_sdk.
const LAMPORTS_PER_SOL: u64 = 1_000_000_000;
fn get_account(
account_index: impl Into<Option<u8>>,
account_keys: &[String],
instruction_keys: &[u8],
) -> Pubkey {
let instruction_index: usize = account_index
.into()
.expect("account index did not exist")
.into();
let account_index: usize = instruction_keys
.get(instruction_index)
.copied()
.unwrap_or_else(|| panic!("instruction account {instruction_index} did not exist"))
.into();
let account_string = account_keys
.get(account_index)
.unwrap_or_else(|| panic!("account at index {account_index} did not exist"));
Pubkey::from_str(account_string).expect("could not parse account from string")
}
#[derive(thiserror::Error, Debug)]
pub enum Error {}
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "kebab-case")]
pub enum Cluster {
Devnet,
@ -132,21 +105,6 @@ impl std::fmt::Display for Cluster {
}
}
// NOTE: While, technically, they both fit in the same width, it is _important_ to have different
// functionality based on which is provided, as Nonce requires an incremention instruction.
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "snake_case", untagged)]
pub enum Hashable {
Nonce {
nonce_data: String,
nonce_address: String,
nonce_authority: String,
},
Blockhash {
blockhash: String,
},
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetBlockhash {
cluster: Option<Cluster>,
@ -160,35 +118,11 @@ pub struct GenerateWallet {
#[derive(Serialize, Deserialize, Debug)]
pub struct GetWalletAddress {}
#[derive(Serialize, Deserialize, Debug)]
pub struct AwaitFunds {
address: String,
lamports: String,
cluster: Option<Cluster>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetTokenInfo {
token: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct CreateNonceAccountAndSigningKey {
authorization_address: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetNonceAccountData {
nonce_address: String,
cluster: Option<Cluster>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct FindNonceAccounts {
authorization_address: String,
cluster: Option<Cluster>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Transfer {
amount: String,
@ -220,27 +154,10 @@ pub struct TransferToken {
fee_payer_address: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Compile {
#[serde(flatten)]
hashable: Hashable,
derivation_accounts: Option<Vec<u32>>,
instructions: Vec<solana_sdk::instruction::Instruction>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Inspect {
transaction: solana_sdk::transaction::Transaction,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Sign {
blockhash: String,
instructions: Vec<solana_sdk::instruction::Instruction>,
#[serde(default)]
signing_keys: Vec<[u8; Keypair::SECRET_KEY_LENGTH]>,
#[serde(default)]
payer_address: Option<String>,
transaction: solana_sdk::transaction::Transaction,
}
#[derive(Serialize, Deserialize, Debug)]
@ -265,16 +182,10 @@ pub enum Operation {
GetBlockhash(GetBlockhash),
GenerateWallet(GenerateWallet),
GetWalletAddress(GetWalletAddress),
AwaitFunds(AwaitFunds),
GetTokenInfo(GetTokenInfo),
CreateNonceAccountAndSigningKey(CreateNonceAccountAndSigningKey),
GetNonceAccountData(GetNonceAccountData),
FindNonceAccounts(FindNonceAccounts),
Transfer(Transfer),
CreateTokenAccount(CreateTokenAccount),
TransferToken(TransferToken),
Compile(Compile),
Inspect(Inspect),
Sign(Sign),
Broadcast(Broadcast),
}
@ -282,13 +193,14 @@ pub enum Operation {
pub struct Solana;
impl Solana {
fn keypair_from_bytes(given_bytes: [u8; 32]) -> Keypair {
fn keypair_from_bytes(given_bytes: [u8; 32]) -> solana_sdk::signer::keypair::Keypair {
use ed25519_dalek::{PublicKey, SecretKey};
let secret_key = SecretKey::from_bytes(&given_bytes).expect("key should be 32 bytes");
let mut bytes = [0u8; 64];
bytes[..32].clone_from_slice(&given_bytes);
bytes[32..].clone_from_slice(PublicKey::from(&secret_key).as_bytes());
Keypair::from_bytes(&bytes).expect("solana sdk should expect 64 bytes")
solana_sdk::signer::keypair::Keypair::from_bytes(&bytes)
.expect("solana sdk should expect 64 bytes")
}
}
@ -332,42 +244,28 @@ impl Module for Solana {
.to_string(),
r#type: ArgumentType::Required,
};
let get_blockhash = icepick_module::help::Operation {
vec![
icepick_module::help::Operation {
name: "get-blockhash".to_string(),
description: "Get the latest blockhash".to_string(),
arguments: vec![cluster.clone()],
};
let generate_wallet = icepick_module::help::Operation {
},
icepick_module::help::Operation {
name: "generate-wallet".to_string(),
description: "Generate the derivation index for a wallet.".to_string(),
arguments: vec![Argument {
name: "account".to_string(),
description: "The derivation account used for generating the wallet.".to_string(),
description: "The derivation account used for generating the wallet."
.to_string(),
r#type: ArgumentType::Optional,
}],
};
let get_wallet_address = icepick_module::help::Operation {
},
icepick_module::help::Operation {
name: "get-wallet-address".to_string(),
description: "Get the address for a given wallet.".to_string(),
arguments: vec![],
};
let await_funds = icepick_module::help::Operation {
name: "await-funds".to_string(),
description: "Await a minimum amount of funds in an account".to_string(),
arguments: vec![
Argument {
name: "address".to_string(),
description: "The address to monitor".to_string(),
r#type: ArgumentType::Required,
},
Argument {
name: "amount".to_string(),
description: "The amount of lamports to await".to_string(),
r#type: ArgumentType::Required,
},
],
};
let get_token_info = icepick_module::help::Operation {
icepick_module::help::Operation {
name: "get-token-info".to_string(),
description: "Get the address for a given token.".to_string(),
arguments: vec![Argument {
@ -375,48 +273,11 @@ impl Module for Solana {
description: "The token to look up".to_string(),
r#type: ArgumentType::Required,
}],
};
let create_nonce_account_and_signing_key = icepick_module::help::Operation {
name: "create-nonce-account-and-signing-key".to_string(),
description: "Create a nonce account for signing durable transactions".to_string(),
arguments: vec![
account.clone(),
from_address.clone(),
Argument {
name: "authorization_address".to_string(),
description: "The account authorized to use and advance the nonce.".to_string(),
r#type: ArgumentType::Required,
},
],
};
let get_nonce_account_data = icepick_module::help::Operation {
name: "get-nonce-account-data".to_string(),
description: "Get the data for a nonce account".to_string(),
arguments: vec![
cluster.clone(),
Argument {
name: "nonce_address".to_string(),
description: "The address of the nonce account.".to_string(),
r#type: ArgumentType::Required,
},
],
};
let find_nonce_accounts = icepick_module::help::Operation {
name: "find-nonce-accounts".to_string(),
description: "Find all nonce accounts for an authorized address".to_string(),
arguments: vec![
cluster.clone(),
Argument {
name: "authorization_address".to_string(),
description: "The account authorized to use and advance nonces.".to_string(),
r#type: ArgumentType::Required,
},
],
};
let transfer = icepick_module::help::Operation {
icepick_module::help::Operation {
name: "transfer".to_string(),
description: "Transfer SOL from a Keyfork wallet to an external wallet.".to_string(),
description: "Transfer SOL from a Keyfork wallet to an external wallet."
.to_string(),
arguments: vec![
Argument {
name: "amount".to_string(),
@ -434,8 +295,8 @@ impl Module for Solana {
fee_payer.clone(),
fee_payer_address.clone(),
],
};
let stake = icepick_module::help::Operation {
},
icepick_module::help::Operation {
name: "stake".to_string(),
description: "Stake SOL to earn rewards.".to_string(),
arguments: vec![
@ -450,9 +311,9 @@ impl Module for Solana {
fee_payer.clone(),
fee_payer_address.clone(),
],
};
},
// kinda BS that you have to make an account for a token, but ok.
let create_token_account = icepick_module::help::Operation {
icepick_module::help::Operation {
name: "create-token-account".to_string(),
description: "Create an account for a given token".to_string(),
arguments: vec![
@ -472,8 +333,8 @@ impl Module for Solana {
r#type: ArgumentType::Optional,
},
],
};
let transfer_token = icepick_module::help::Operation {
},
icepick_module::help::Operation {
name: "transfer-token".to_string(),
description: "Transfer tokens from a Keyfork wallet to an external wallet."
.to_string(),
@ -508,31 +369,8 @@ impl Module for Solana {
fee_payer.clone(),
fee_payer_address.clone(),
],
};
let compile = icepick_module::help::Operation {
name: "compile".to_string(),
description: "Compile instructions into a transaction".to_string(),
arguments: vec![
Argument {
name: "blockhash".to_string(),
description: "A recent blockhash, must be provided in place of nonce"
.to_string(),
r#type: ArgumentType::Optional,
},
Argument {
name: "nonce".to_string(),
description: "A durable nonce, must be provided in place of blockhash"
.to_string(),
r#type: ArgumentType::Optional,
},
],
};
let inspect = icepick_module::help::Operation {
name: "inspect".to_string(),
description: "Print a transaction using base64.".to_string(),
arguments: vec![],
};
let sign = icepick_module::help::Operation {
icepick_module::help::Operation {
name: "sign".to_string(),
description: "Sign a previously-generated transaction.".to_string(),
arguments: vec![Argument {
@ -540,29 +378,12 @@ impl Module for Solana {
description: "A recent blockhash".to_string(),
r#type: ArgumentType::Required,
}],
};
let broadcast = icepick_module::help::Operation {
},
icepick_module::help::Operation {
name: "broadcast".to_string(),
description: "Broadcast a signed transaction".to_string(),
arguments: vec![cluster.clone()],
};
vec![
get_blockhash,
generate_wallet,
get_wallet_address,
await_funds,
get_token_info,
create_nonce_account_and_signing_key,
get_nonce_account_data,
find_nonce_accounts,
transfer,
stake,
create_token_account,
transfer_token,
compile,
inspect,
sign,
broadcast,
},
]
}
@ -597,51 +418,6 @@ impl Module for Solana {
}
}))
}
Operation::AwaitFunds(AwaitFunds {
address,
lamports,
cluster,
}) => {
let cluster = cluster.unwrap_or(Cluster::MainnetBeta);
let cluster_url = format!("https://api.{cluster}.solana.com");
let client = solana_rpc_client::rpc_client::RpcClient::new(cluster_url);
let account_pk = Pubkey::from_str(&address).unwrap();
let minimum_balance = u64::from_str(&lamports).unwrap();
let sleep = || {
std::thread::sleep(std::time::Duration::from_secs(10));
};
let account_balance = loop {
let account = match client.get_account(&account_pk) {
Ok(account) => account,
Err(_) => {
eprintln!("Waiting for account to be created and funded: {account_pk}");
sleep();
continue;
}
};
let account_size = account.data.len();
let rent = client
.get_minimum_balance_for_rent_exemption(account_size)
.unwrap();
let balance = account.lamports;
if balance
.checked_sub(rent)
.is_some_and(|bal| bal > minimum_balance)
{
break balance;
}
eprintln!("Waiting for {minimum_balance} + rent ({rent}) in {account_pk}");
sleep();
};
Ok(serde_json::json!({
"blob": {
"lamports": account_balance,
},
}))
}
Operation::GetTokenInfo(GetTokenInfo { token }) => {
let values = match token.as_str() {
// Only exists on devnet
@ -655,10 +431,7 @@ impl Module for Solana {
Some((address, decimals)) => serde_json::json!({
"blob": {
"token_address": address,
// forgive me father, for i have sinned
// see: https://git.distrust.co/public/icepick/issues/26
// TransferToken { decimals: String }
"token_decimals": decimals.to_string(),
"token_decimals": decimals,
}
}),
None => serde_json::json!({
@ -667,223 +440,63 @@ impl Module for Solana {
}),
})
}
Operation::CreateNonceAccountAndSigningKey(CreateNonceAccountAndSigningKey {
authorization_address,
}) => {
// NOTE: Since this transaction is meant to be run on an online system with a
// freshly generated mnemonic, only designed to live to make the nonce account, we
// are going to assume we're not using a separate fee payer. It's a stretch having
// a `--from-account` option, really, but it is probably to be expected given the
// `from-address` variable. In truth, we will likely have the account randomly
// generated using `generate-wallet | get-wallet-address`.
// NOTE: new() calls generate() which requires CryptoRng. By default,
// this uses OsRng, which sources from getrandom() if available, which pulls from
// /dev/urandom, or sources from `/dev/urandom` directly.
let keypair = Keypair::new();
let payer_keypair = Keypair::new();
let authorization_pk = Pubkey::from_str(&authorization_address).unwrap();
let instructions = system_instruction::create_nonce_account(
&payer_keypair.pubkey(),
&keypair.pubkey(),
&authorization_pk,
// just above the approximate rent necessary for a nonce account
1500000,
);
#[allow(clippy::identity_op)]
Ok(serde_json::json!({
"blob": {
"nonce_pubkey": keypair.pubkey().to_string(),
"nonce_privkey": [keypair.secret().to_bytes()],
"payer_pubkey": payer_keypair.pubkey().to_string(),
"payer_privkey": [payer_keypair.secret().to_bytes()],
"privkeys": [
keypair.secret().to_bytes(),
payer_keypair.secret().to_bytes()
],
"transaction": instructions,
},
}))
}
Operation::GetNonceAccountData(GetNonceAccountData {
nonce_address,
cluster,
}) => {
let nonce_pk = Pubkey::from_str(&nonce_address).unwrap();
let cluster = cluster.unwrap_or(Cluster::MainnetBeta);
let cluster_url = format!("https://api.{cluster}.solana.com");
let client = solana_rpc_client::rpc_client::RpcClient::new(cluster_url);
let nonce_account = client.get_account(&nonce_pk).unwrap();
let nonce =
solana_rpc_client_nonce_utils::data_from_account(&nonce_account).unwrap();
Ok(serde_json::json!({
"blob": {
"authority": nonce.authority.to_string(),
"durable_nonce": nonce.durable_nonce.as_hash().to_string(),
"lamports_per_signature": nonce.fee_calculator.lamports_per_signature,
},
}))
}
Operation::FindNonceAccounts(FindNonceAccounts {
authorization_address,
cluster,
}) => {
use solana_sdk::{
instruction::CompiledInstruction, system_instruction::SystemInstruction,
};
use solana_transaction_status_client_types::{
EncodedConfirmedTransactionWithStatusMeta, EncodedTransaction,
EncodedTransactionWithStatusMeta, UiMessage, UiRawMessage, UiTransaction,
};
let cluster = cluster.unwrap_or(Cluster::MainnetBeta);
let cluster_url = format!("https://api.{cluster}.solana.com");
let client = solana_rpc_client::rpc_client::RpcClient::new(cluster_url);
let authorized_pk = Pubkey::from_str(&authorization_address).unwrap();
let mut nonced_accounts: HashSet<Pubkey> = HashSet::new();
let transaction_statuses =
client.get_signatures_for_address(&authorized_pk).unwrap();
for status in transaction_statuses
/*.iter().rev()*/
{
let signature = solana_sdk::signature::Signature::from_str(&status.signature)
.expect("cluster provided invalid signature");
let transaction = client
.get_transaction_with_config(&signature, Default::default())
.unwrap();
let EncodedConfirmedTransactionWithStatusMeta {
slot: _,
block_time: _,
transaction:
EncodedTransactionWithStatusMeta {
meta: _,
version: _,
transaction:
EncodedTransaction::Json(UiTransaction {
signatures: _,
message:
UiMessage::Raw(UiRawMessage {
header: _,
account_keys,
recent_blockhash: _,
address_table_lookups: _,
instructions,
}),
}),
},
}: EncodedConfirmedTransactionWithStatusMeta = transaction
else {
eprintln!("Unable to destructure transaction");
continue;
};
// search for program based on the following:
// * program is SystemProgram
// * instruction is
for ui_instruction in &instructions {
let instruction = CompiledInstruction {
program_id_index: ui_instruction.program_id_index,
accounts: ui_instruction.accounts.clone(),
data: bs58::decode(ui_instruction.data.as_bytes())
.into_vec()
.unwrap(),
};
let program_pk = account_keys
.get(instruction.program_id_index as usize)
.map(|k| &**k)
.map(Pubkey::from_str)
.transpose()
.ok()
.flatten()
.expect("could not get program key from transaction");
if solana_sdk::system_program::check_id(&program_pk) {
let parsed_instruction: SystemInstruction =
bincode::deserialize(&instruction.data).unwrap();
match parsed_instruction {
SystemInstruction::InitializeNonceAccount(pubkey) => {
// [Nonce, RecentBlockhashes, Rent]
// Argument is new authority
let nonce_account =
get_account(0, &account_keys, &instruction.accounts);
if authorized_pk == pubkey {
nonced_accounts.insert(nonce_account);
}
}
SystemInstruction::AuthorizeNonceAccount(pubkey) => {
// [Nonce, Authority]
// Argument is new authority
let nonce_account =
get_account(0, &account_keys, &instruction.accounts);
let authorizing_pk =
get_account(1, &account_keys, &instruction.accounts);
if authorized_pk == pubkey {
// we are given it
nonced_accounts.insert(nonce_account);
} else if authorizing_pk == pubkey {
// we are giving it
nonced_accounts.remove(&nonce_account);
}
}
SystemInstruction::WithdrawNonceAccount(_lamports) => {
// [Nonce, Recipient, RecentBlockhashes, Rent, Authority]
// Because the nonce account will be deleted due to nonpayment
// of rent, we do not re-insert into created accounts.
let nonce_account =
get_account(0, &account_keys, &instruction.accounts);
nonced_accounts.remove(&nonce_account);
}
_ => {}
}
}
}
}
let nonced_accounts = nonced_accounts
.iter()
.map(|account| account.to_string())
.collect::<Vec<_>>();
Ok(serde_json::json!({
"blob": {
"nonced_accounts": nonced_accounts,
}
}))
}
Operation::Transfer(Transfer {
amount,
from_account,
to_address,
from_address,
fee,
fee: _,
fee_payer,
fee_payer_address,
}) => {
if from_account.is_some() {
unimplemented!("from_account");
}
if fee.is_some() | fee_payer.is_some() | fee_payer_address.is_some() {
unimplemented!("fee")
}
// TODO:
// parse address for to_address
let amount = f64::from_str(&amount).expect("float amount");
let amount: u64 = (amount * LAMPORTS_PER_SOL as f64) as u64;
use solana_sdk::pubkey::Pubkey;
let to_pk = Pubkey::from_str(&to_address).unwrap();
let from_pk = Pubkey::from_str(&from_address).unwrap();
let instruction = system_instruction::transfer(&from_pk, &to_pk, amount);
#[allow(clippy::identity_op)]
let payer_account_and_pk = {
// If a fee payer is given, a fee payer address must also be given, since the
// address must be known before signing the transaction.
match (&fee_payer, &fee_payer_address) {
(Some(payer), Some(address)) => {
// Use the provided account
Some((
u32::from_str(payer).unwrap(),
Pubkey::from_str(address).unwrap(),
))
}
(None, None) => {
// Use the transaction account
None
}
_ => panic!("Invalid combination of fee_payer and fee_payer_address"),
}
};
let instruction =
solana_sdk::system_instruction::transfer(&from_pk, &to_pk, amount);
let message = solana_sdk::message::Message::new(
&[instruction],
payer_account_and_pk.map(|v| v.1).as_ref(),
);
let transaction = solana_sdk::transaction::Transaction::new_unsigned(message);
// TODO: error handling from_str
let from_account = from_account
.and_then(|a| u32::from_str(&a).ok())
.unwrap_or(0);
let mut requested_accounts = vec![];
requested_accounts.push(from_account | 1 << 31);
if let Some((account, _)) = &payer_account_and_pk {
requested_accounts.push(*account | 1 << 31);
}
Ok(serde_json::json!({
"blob": {
"instructions": [instruction],
// This is done in blob since it's compiled in the next step
"derivation_accounts": [0u32 | 1 << 31],
"transaction": transaction,
},
"derivation_accounts": requested_accounts,
}))
}
Operation::CreateTokenAccount(CreateTokenAccount {
@ -892,9 +505,9 @@ impl Module for Solana {
token_address,
}) => {
// TODO: allow changing derivation account of funder_address
use spl_associated_token_account as sata;
use sata::instruction::create_associated_token_account;
use solana_sdk::pubkey::Pubkey;
use spl_associated_token_account as sata;
use spl_token::ID as TOKEN_ID;
let funder_address = funder_address.unwrap_or_else(|| wallet_address.clone());
let funder_pubkey = Pubkey::from_str(&funder_address).unwrap();
@ -928,12 +541,7 @@ impl Module for Solana {
fee_payer,
fee_payer_address,
}) => {
if from_account.is_some() {
unimplemented!("from_account");
}
if fee.is_some() | fee_payer.is_some() | fee_payer_address.is_some() {
unimplemented!("fee")
}
// TODO: deduplicate code used in Transfer
let amount = f64::from_str(&amount).expect("float amount");
let decimals = u8::from_str(&decimals).expect("decimals");
@ -944,7 +552,29 @@ impl Module for Solana {
let to_pk = Pubkey::from_str(&to_address).unwrap();
let from_pk = Pubkey::from_str(&from_address).unwrap();
let token_pk = Pubkey::from_str(&token_address).unwrap();
let payer_account_and_pk = {
// If a fee payer is given, a fee payer address must also be given, since the
// address must be known before signing the transaction.
match (&fee_payer, &fee_payer_address) {
(Some(payer), Some(address)) => {
// Use the provided account
Some((
u32::from_str(payer).unwrap(),
Pubkey::from_str(address).unwrap(),
))
}
(None, None) => {
// Use the transaction account
None
}
_ => panic!("Invalid combination of fee_payer and fee_payer_address"),
}
};
let token_program_id = spl_token::ID;
let mut signers = vec![&from_pk];
if let Some((_, pk)) = payer_account_and_pk.as_ref() {
signers.push(pk);
}
let from_token_address = get_associated_token_address(&from_pk, &token_pk);
let to_token_address = get_associated_token_address(&to_pk, &token_pk);
@ -961,194 +591,74 @@ impl Module for Solana {
decimals, // decimals
)
.unwrap();
// TODO: check if this works with multisig
// TODO: check if this works with payer
// this is required because the Solana SDK does not set the primary transactional
// key as writable (the one that would be paying computation fees) in the event a
// payer is not provided. The transactional account must be writable for the
// computation fee to be paid.
if payer_account_and_pk.is_none() {
for account in instruction.accounts.iter_mut() {
if account.pubkey == from_pk {
account.is_writable = true;
}
}
}
let message = solana_sdk::message::Message::new(
&[instruction],
payer_account_and_pk.map(|v| v.1).as_ref(),
);
// message.header.num_readonly_signed_accounts = 0;
let transaction =
solana_sdk::transaction::Transaction::new_unsigned(message.clone());
/*
use base64::prelude::*;
eprintln!("{}", BASE64_STANDARD.encode(transaction.message_data()));
*/
#[allow(clippy::identity_op)]
Ok(serde_json::json!({
"blob": {
"instructions": [instruction],
"transaction": transaction,
},
"derivation_accounts": [0u32 | 1 << 31],
},
}))
}
Operation::Compile(Compile {
hashable,
derivation_accounts,
mut instructions,
}) => {
use solana_sdk::hash::Hash;
let hash = match hashable {
// We already have the account from GetNonceAccountData,
// which also gives us the authority and the nonce itself.
Hashable::Nonce {
nonce_data,
nonce_address,
nonce_authority,
} => {
let account_pk = Pubkey::from_str(&nonce_address).unwrap();
let authority_pk = Pubkey::from_str(&nonce_authority).unwrap();
let hash = Hash::from_str(&nonce_data).unwrap();
let increment_nonce =
system_instruction::advance_nonce_account(&account_pk, &authority_pk);
instructions.insert(0, increment_nonce);
hash
}
Hashable::Blockhash { blockhash } => Hash::from_str(&blockhash).unwrap(),
};
Ok(serde_json::json!({
"blob": {
"hash": hash,
"instructions": instructions,
},
"derivation_accounts": derivation_accounts.as_deref().unwrap_or(&[]),
}))
}
Operation::Inspect(Inspect { transaction }) => {
use base64::prelude::*;
Ok(serde_json::json!({
"blob": {
"formatted_transaction": BASE64_STANDARD.encode(transaction.message_data())
}
}))
}
Operation::Sign(Sign {
blockhash,
instructions,
signing_keys,
payer_address,
}) => {
use solana_sdk::{message::Message, transaction::Transaction};
Operation::Sign(Sign { blockhash, mut transaction }) => {
let keys = request
.derived_keys
.unwrap_or_default()
.iter()
.chain(&signing_keys)
.map(|k| Self::keypair_from_bytes(*k))
.collect::<Vec<_>>();
let payer_pk = payer_address
.as_deref()
.map(Pubkey::from_str)
.transpose()
.unwrap();
let message =
Message::new(&instructions, Some(&payer_pk.unwrap_or(keys[0].pubkey())));
let mut transaction = Transaction::new_unsigned(message);
let hash = solana_sdk::hash::Hash::from_str(&blockhash).unwrap();
transaction
.try_sign(&keys, hash)
.expect("not enough keys provided");
Ok(serde_json::json!({
"blob": {
"transaction": transaction,
}
}))
}
Operation::Broadcast(Broadcast {
cluster,
transaction,
}) => {
Operation::Broadcast(Broadcast { cluster, transaction }) => {
let cluster = cluster.unwrap_or(Cluster::MainnetBeta);
let cluster_url = format!("https://api.{cluster}.solana.com");
transaction.verify().expect("invalid signatures");
let client = solana_rpc_client::rpc_client::RpcClient::new(cluster_url);
let simulated_response = client.simulate_transaction(&transaction).unwrap();
if let Some(err) = simulated_response.value.err {
return Ok(serde_json::json!({
"blob": {
"status": "simulate_transaction",
"error": err.to_string(),
}
}));
}
let _simulated_response = client.simulate_transaction(&transaction).unwrap();
let response = client.send_and_confirm_transaction(&transaction);
let cluster_suffix = {
if cluster == Cluster::MainnetBeta {
String::new()
} else {
format!("?cluster={cluster}")
}
};
Ok(match response {
Ok(s) => {
serde_json::json!({
"blob": {
"status": "send_and_confirm",
"succcess": s.to_string(),
"url": format!("https://explorer.solana.com/tx/{s}{cluster_suffix}"),
}
})
}
Err(_) => {
let signature = transaction.get_signature();
let status = client.get_signature_status(signature);
blob_for_signature_status(status, signature, &cluster_suffix)
}
})
}
}
}
}
fn blob_for_signature_status(
status: ClientResult<Option<Result<(), TransactionError>>>,
signature: &solana_sdk::signature::Signature,
cluster_suffix: &str,
) -> serde_json::Value {
match status {
Ok(Some(Ok(()))) => {
// transaction passed.
eprintln!("An error occurred while broadcasting the transaction, but the transaction was confirmed manually.");
serde_json::json!({
"blob": {
"status": "send_and_confirm",
"succcess": signature.to_string(),
"url": format!("https://explorer.solana.com/tx/{signature}{cluster_suffix}"),
}
})
}
Ok(Some(Err(e))) => {
// transaction failed on-cluster
eprintln!("The transaction failed on-chain: {e}");
serde_json::json!({
"blob": {
"status": "send_and_confirm",
"error": e.to_string(),
}
})
}
Ok(None) => {
// transaction may not have been broadcast
eprintln!("The transaction was possibly not received by the cluster.");
serde_json::json!({
"blob": {
"status": "send_and_confirm",
"error": format!("Transaction {signature} does not exist on-cluster"),
}
})
}
Err(e) => {
// RPC request failed
eprintln!("An error occurred while interacting with the cluster: {e}");
serde_json::json!({
"blob": {
"status": "send_and_confirm",
@ -1156,5 +666,8 @@ fn blob_for_signature_status(
}
})
}
})
}
}
}
}

View File

@ -1,13 +0,0 @@
[package]
name = "icepick-spacemesh"
version = "0.1.0"
edition = "2021"
publish = ["distrust"]
[dependencies]
icepick-module = { version = "0.1.0", path = "../../icepick-module" }
serde.workspace = true
serde_json.workspace = true
spacemesh = { version = "0.1.0", path = "../../spacemesh/spacemesh" }
thiserror = "2.0.11"
tokio = { version = "1.43.0", features = ["rt", "net"] }

View File

@ -1,172 +0,0 @@
use icepick_module::Module;
use serde::{Deserialize, Serialize};
use spacemesh::bech32::{self, Hrp};
use std::str::FromStr;
#[derive(Serialize, Deserialize, PartialEq, Eq, Clone, Debug, Default)]
#[serde(rename_all = "kebab-case")]
pub enum Cluster {
Testnet,
#[default]
Mainnet,
}
impl Cluster {
fn hrp(&self) -> bech32::Hrp {
match self {
Cluster::Testnet => Hrp::parse("stest").unwrap(),
Cluster::Mainnet => Hrp::parse("sm").unwrap(),
}
}
}
impl std::str::FromStr for Cluster {
type Err = &'static str;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"testnet" => Ok(Self::Testnet),
"mainnet" => Ok(Self::Mainnet),
_ => Err("Invalid value"),
}
}
}
impl std::fmt::Display for Cluster {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Cluster::Testnet => f.write_str("testnet"),
Cluster::Mainnet => f.write_str("mainnet"),
}
}
}
#[derive(thiserror::Error, Debug)]
pub enum Error {}
#[derive(Serialize, Deserialize, Debug)]
pub struct GenerateWallet {
account: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetWalletAddress {
pubkey: [u8; 32],
cluster: Option<Cluster>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountData {
account: String,
cluster: Option<Cluster>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct AwaitFunds {
address: String,
amount: String,
cluster: Option<Cluster>,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "operation", content = "values", rename_all = "kebab-case")]
pub enum Operation {
GenerateWallet(GenerateWallet),
GetWalletAddress(GetWalletAddress),
AwaitFunds(AwaitFunds),
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Request {
derived_keys: Option<Vec<[u8; 32]>>,
#[serde(flatten)]
operation: Operation,
}
pub fn run_async<F: std::future::Future>(f: F) -> F::Output {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(f)
}
pub struct Spacemesh;
impl Module for Spacemesh {
type Error = Error;
type Request = Request;
fn describe_operations() -> Vec<icepick_module::help::Operation> {
use icepick_module::help::*;
let account = Argument::builder()
.name("account")
.description("The derivation index for the account.")
.r#type(ArgumentType::Optional)
.build();
let cluster = Argument::builder()
.name("cluster")
.description("Spacemesh cluster to interact with (mainnet, testnet).")
.r#type(ArgumentType::Required)
.build();
let generate_wallet = Operation::builder()
.name("generate-wallet")
.description("Generate a wallet for the given account.")
.build()
.argument(&account);
let get_wallet_address = Operation::builder()
.name("get-wallet-address")
.description("Get the address for a given wallet.")
.build()
.argument(&cluster)
.argument(
&Argument::builder()
.name("wallet_pubkey")
.description("Public key of the wallet.")
.r#type(ArgumentType::Required)
.build(),
);
vec![generate_wallet, get_wallet_address]
}
fn handle_request(request: Self::Request) -> Result<serde_json::Value, Self::Error> {
let Request {
operation,
derived_keys: _,
} = request;
match operation {
Operation::GenerateWallet(GenerateWallet { account }) => {
let account = u32::from_str(account.as_deref().unwrap_or("0")).unwrap();
Ok(serde_json::json!({
"blob": {},
"derivation_accounts": [(account | 1 << 31)],
}))
}
Operation::GetWalletAddress(GetWalletAddress { pubkey, cluster }) => {
use spacemesh::wallet::AsAddress;
let account = pubkey.as_address();
let hrp = cluster.unwrap_or_default().hrp();
let address = bech32::encode(hrp, &account).unwrap();
Ok(serde_json::json!({
"blob": {
"address": address,
},
"derivation_accounts": [],
}))
}
Operation::AwaitFunds(AwaitFunds {
address,
amount,
cluster,
}) => todo!(),
}
}
}

View File

@ -1,6 +0,0 @@
use icepick_module::Module;
use icepick_spacemesh::Spacemesh;
fn main() -> Result<(), Box<dyn std::error::Error>> {
Spacemesh::run_responder()
}

View File

@ -4,6 +4,5 @@ version = "0.1.0"
edition = "2021"
[dependencies]
bon = "3.3.2"
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true

View File

@ -16,26 +16,6 @@ pub mod help {
pub arguments: Vec<Argument>,
}
#[bon::bon]
impl Operation {
#[builder]
pub fn new(name: &'static str, description: &'static str) -> Self {
Operation {
name: name.into(),
description: description.into(),
arguments: vec![],
}
}
}
impl Operation {
pub fn argument(mut self, arg: &Argument) -> Self {
self.arguments.push(arg.clone());
self
}
}
/*
/// The context of whether a signature is signed, needs to be signed, or has been signed.
#[derive(Serialize, Deserialize, Clone)]
@ -70,19 +50,6 @@ pub mod help {
/// The type of argument - this may affect how it displays in the frontend.
pub r#type: ArgumentType,
}
#[bon::bon]
impl Argument {
#[builder]
pub fn new(name: &'static str, description: &'static str, r#type: ArgumentType) -> Self {
Argument {
name: name.into(),
description: description.into(),
r#type,
}
}
}
}
/// Implementation methods for Icepick Modules, performed over command I/O using JSON.

View File

@ -1,10 +0,0 @@
[package]
name = "icepick-workflow"
version = "0.1.0"
edition = "2021"
[dependencies]
keyfork-derive-util = { version = "0.2.2", registry = "distrust", default-features = false }
serde.workspace = true
serde_json.workspace = true
thiserror = "2.0.11"

View File

@ -1,234 +0,0 @@
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::{BTreeMap, HashSet};
#[derive(thiserror::Error, Debug)]
pub enum SimulationError {
#[error("Step not found: {0}")]
StepNotFound(String),
#[error("Expected input variable or static value not found in step {0}: {1}")]
ValueNotFound(String, String),
}
#[derive(thiserror::Error, Debug)]
pub enum WorkflowError {
#[error("Invocable operation could not be found: {0}")]
InvocableOperationNotFound(String),
#[error("Derivation configuration for operation not found: {0}")]
DerivationConfigurationNotFound(String),
#[error("An error was encountered while invoking an operation")]
InvocationError(String),
}
/// An input for a workflow argument. When inputs are read, they should be referenced by the first
/// name. Additional names can be provided as aliases, to allow chaining workflows together when
/// names may not make sense - such as a Solana address then being used as an authorization
/// address.
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Input {
/// An input with a single identifier.
/// The name of the input.
pub name: String,
/// A description of the input.
pub description: String,
/// Aliases used when loading inputs.
#[serde(default)]
pub aliases: Vec<String>,
/// Whether the workflow input is optional.
pub optional: Option<bool>,
}
impl Input {
pub fn identifiers(&self) -> impl Iterator<Item = &String> {
[&self.name].into_iter().chain(self.aliases.iter())
}
pub fn is_required(&self) -> bool {
self.optional.is_some_and(|o| !o)
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Workflow {
pub name: String,
pub description: String,
#[serde(default)]
pub inputs: Vec<Input>,
#[serde(rename = "step")]
steps: Vec<WorkflowStep>,
}
pub type StringMap<T = String> = BTreeMap<String, T>;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct WorkflowStep {
r#type: String,
#[serde(default)]
values: StringMap,
#[serde(default)]
inputs: StringMap,
#[serde(default)]
outputs: StringMap,
}
// TODO: This should probably be migrated to an actual Result type, instead of
// currently just shoving everything in "blob". Probably done after derivation_accounts
// gets hoisted out of here.
#[derive(Serialize, Deserialize)]
pub struct OperationResult {
// All values returned from an operation.
blob: StringMap<Value>,
// Any requested accounts from an operation.
//
// TODO: Move this to its own step.
#[serde(default)]
derivation_accounts: Vec<DerivationIndex>,
}
type DeriveKeys<'a> =
&'a dyn Fn(&DerivationAlgorithm, &DerivationPath, &[DerivationIndex]) -> Vec<Vec<u8>>;
impl Workflow {
pub fn simulate_workflow<T: InvocableOperation + Sized>(
&self,
mut data: HashSet<String>,
operations: &[T],
) -> Result<Vec<String>, SimulationError> {
let mut reports = vec![];
for step in self.steps.iter() {
let step_type = step.r#type.clone();
let Some(invocable) = operations.iter().find(|op| *op.name() == step_type) else {
return Err(SimulationError::StepNotFound(step_type));
};
// Check we have the values the module expects
for in_memory_name in step.inputs.values() {
if !data.contains(in_memory_name) && !step.values.contains_key(in_memory_name) {
return Err(SimulationError::ValueNotFound(
step_type,
in_memory_name.to_owned(),
));
}
}
// Check whether the module expects the keys as arguments, or if the
// keys will be passed as a "payload" variable.
let mut inputs = step.inputs.keys().collect::<HashSet<_>>();
for argument in invocable.argument_names() {
inputs.remove(argument);
}
for remaining_input in inputs {
reports.push(format!(
"Step {step_type}: Input {remaining_input} is not interpreted as a argument"
));
}
// Add the return values from the module into memory
data.extend(step.outputs.values().cloned());
}
Ok(reports)
}
pub fn run_workflow<T: InvocableOperation>(
&self,
mut data: StringMap<Value>,
operations: &[T],
derive_keys: DeriveKeys,
) -> Result<StringMap<Value>, WorkflowError> {
let mut derived_keys = vec![];
let mut derivation_accounts = vec![];
for step in &self.steps {
let step_type = step.r#type.clone();
let Some(operation) = operations.iter().find(|op| *op.name() == step_type) else {
return Err(WorkflowError::InvocableOperationNotFound(step_type));
};
// Prepare all inputs for the operation invocation
let inputs: StringMap<Value> = data
.iter()
.map(|(k, v)| (k, v.clone()))
.filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (desired, _stored) = step.inputs.iter().find(|(_, v)| k == *v)?;
Some((desired.clone(), v))
})
.chain(
step.values
.iter()
.map(|(k, v)| (k.clone(), Value::String(v.clone()))),
)
.collect();
let OperationResult {
blob,
derivation_accounts: new_accounts,
} = operation.invoke(&inputs, &derived_keys);
derived_keys.clear();
derivation_accounts.extend(new_accounts);
data.extend(blob.into_iter().filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (_given, stored) = step.outputs.iter().find(|(k1, _)| k == **k1)?;
Some((stored.clone(), v))
}));
// Add requested derivation keys and clear derivation account requests.
if !derivation_accounts.is_empty() {
let Some((algo, path_prefix)) = operation.derivation_configuration() else {
return Err(WorkflowError::DerivationConfigurationNotFound(step_type));
};
derived_keys.extend(derive_keys(algo, path_prefix, &derivation_accounts));
}
derivation_accounts.clear();
}
if let Some(last_step) = &self.steps.last() {
let values = last_step.outputs.values().collect::<HashSet<_>>();
data.retain(|stored_name, _| values.contains(stored_name));
}
Ok(data)
}
}
pub trait WorkflowHandler {
/// Load all inputs for the Workflow from some external source, such as CLI arguments or
/// JSON payloads. The inputs can then be used to simulate or perform a workflow.
fn load_inputs(&self) -> StringMap;
}
/// The configuration for an Icepick operation that can be invoked.
///
/// Implementors of this trait should include all necessary requirements to invoke the operation
/// within themselves.
pub trait InvocableOperation {
/// Invoke the operation with the supplied inputs and derived keys.
fn invoke(&self, input: &StringMap<Value>, derived_keys: &[Vec<u8>]) -> OperationResult;
/// The name of the operation.
fn name(&self) -> &String;
/// The names of arguments that can be passed to the function.
fn argument_names(&self) -> impl Iterator<Item = &String>;
/// The derivation algorithm and derivation path to be prefixed to all derivation requests.
fn derivation_configuration(&self) -> Option<(&DerivationAlgorithm, &DerivationPath)>;
}

View File

@ -4,26 +4,12 @@ version = "0.1.0"
edition = "2021"
[dependencies]
bincode = "1.3.3"
chrono = { version = "0.4.39", default-features = false, features = ["now", "serde", "std"] }
clap = { version = "4.5.20", features = ["cargo", "derive", "string"] }
icepick-module = { version = "0.1.0", path = "../icepick-module" }
icepick-workflow = { version = "0.1.0", path = "../icepick-workflow" }
keyfork-derive-util = { version = "0.2.1", registry = "distrust" }
keyfork-prompt = { version = "0.2.1", registry = "distrust", default-features = false }
keyfork-shard = { version = "0.3.0", registry = "distrust", default-features = false, features = ["openpgp", "openpgp-card"] }
keyforkd-client = { version = "0.2.1", registry = "distrust" }
keyforkd-models = { version = "0.2.0", registry = "distrust" }
miniquorum = { version = "0.1.0", path = "../miniquorum", default-features = false }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["arbitrary_precision"] }
serde_yaml = "0.9.34"
smex = { version = "0.1.0", registry = "distrust" }
serde_json.workspace = true
thiserror = "2.0.3"
toml = "0.8.19"
[build-dependencies]
bincode = "1.3.3"
icepick-workflow = { version = "0.1.0", path = "../icepick-workflow" }
serde_yaml = "0.9.34"
smex = { version = "0.1.0", registry = "distrust" }

View File

@ -1,55 +0,0 @@
use icepick_workflow::Workflow;
use std::{collections::BTreeMap, path::{PathBuf, Path}};
fn env_var(var: &'static str) -> String {
println!("cargo::rerun-if-env-changed={var}");
std::env::var(var).expect(var)
}
fn track_path(path: &Path) {
println!("cargo::rerun-if-changed={}", path.to_str().unwrap());
}
fn main() {
let out_dir = env_var("OUT_DIR");
let crate_dir = env_var("CARGO_MANIFEST_DIR");
let workflows_dir = PathBuf::from(crate_dir).join("workflows");
track_path(&workflows_dir);
let mut workflows_by_module: BTreeMap<String, Vec<Workflow>> = Default::default();
for module_dir in std::fs::read_dir(&workflows_dir).unwrap() {
let module_dir = module_dir.unwrap();
dbg!(&module_dir);
let path = module_dir.path();
if !path.is_dir() {
panic!("found unexpected file {}", path.to_string_lossy());
}
let mut workflows = vec![];
for workflow_file in std::fs::read_dir(&path).unwrap() {
dbg!(&workflow_file);
let workflow_file = workflow_file.unwrap();
let path = workflow_file.path();
if !path.is_file() {
panic!("found unexpected non-file {}", path.to_string_lossy());
}
let file_content = std::fs::read(path).unwrap();
let workflow: Workflow = serde_yaml::from_slice(&file_content).unwrap();
workflows.push(workflow);
}
workflows.sort_by(|a, b| a.name.cmp(&b.name));
workflows_by_module.insert(
module_dir.file_name().to_str().unwrap().to_owned(),
workflows,
);
}
let out_path = PathBuf::from(out_dir).join("workflows.hex");
let result = bincode::serialize(&workflows_by_module).unwrap();
let hexed = smex::encode(&result);
std::fs::write(out_path, hexed).unwrap();
}

View File

@ -1,11 +1,9 @@
use clap::{builder::ArgPredicate, command, value_parser};
use clap::command;
use icepick_module::help::*;
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath};
use serde::{Deserialize, Serialize};
use std::{
collections::{HashMap, BTreeMap},
collections::HashMap,
io::{IsTerminal, Write},
path::PathBuf,
process::{Command, Stdio},
};
@ -19,59 +17,17 @@ pub fn get_command(bin_name: &str) -> (&str, Vec<&str>) {
}
}
pub fn derive_keys(
algo: &DerivationAlgorithm,
path_prefix: &DerivationPath,
accounts: &[DerivationIndex],
) -> Vec<Vec<u8>> {
if accounts.is_empty() {
return vec![];
}
let mut derived_keys = vec![];
let mut client = keyforkd_client::Client::discover_socket().expect("keyforkd started");
for account in accounts {
let request = keyfork_derive_util::request::DerivationRequest::new(
algo.clone(),
&path_prefix.clone().chain_push(account.clone()),
);
let request = keyforkd_models::Request::Derivation(request);
let response = client.request(&request).expect("valid derivation");
match response {
keyforkd_models::Response::Derivation(
keyfork_derive_util::request::DerivationResponse { data, .. },
) => {
derived_keys.push(data.to_vec());
}
_ => panic!("Unexpected response"),
}
}
derived_keys
}
#[derive(Serialize, Deserialize, Debug)]
struct ModuleConfig {
/// The name of the module.
name: String,
/// The name of the command used to invoke the module. If not given, the default would be
/// `format!("icepick-{name}")`, using the name of the module.
command_name: Option<String>,
algorithm: keyfork_derive_util::request::DerivationAlgorithm,
/// The bip32 derivation algorithm. This is currently used for deriving keys from Keyfork, but
/// may be passed to modules within the workflow to provide additional context, such as the
/// algorithm for a generic signer.
#[serde(default)]
algorithm: Option<DerivationAlgorithm>,
#[serde(with = "serde_derivation")]
derivation_prefix: keyfork_derive_util::DerivationPath,
/// The bip44 derivation prefix. This is currently used for deriving keys from Keyfork directly
/// within Icepick, but may be passed to modules within the workflow to provide additional
/// context, such as a module for deriving keys.
#[serde(with = "serde_derivation", default)]
derivation_prefix: Option<DerivationPath>,
/// All workflows for a module.
#[serde(rename = "workflow", default)]
workflows: Vec<icepick_workflow::Workflow>,
workflows: Vec<workflow::Workflow>,
}
mod serde_derivation {
@ -79,28 +35,21 @@ mod serde_derivation {
use serde::{Deserialize, Deserializer, Serializer};
use std::str::FromStr;
pub fn serialize<S>(p: &Option<DerivationPath>, serializer: S) -> Result<S::Ok, S::Error>
pub fn serialize<S>(p: &DerivationPath, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
if let Some(p) = p {
let path = p.to_string();
serializer.serialize_str(&path)
} else {
serializer.serialize_none()
}
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<DerivationPath>, D::Error>
pub fn deserialize<'de, D>(deserializer: D) -> Result<DerivationPath, D::Error>
where
D: Deserializer<'de>,
{
use serde::de::Error;
let opt_string = Option::<String>::deserialize(deserializer)?;
opt_string
.map(|string| DerivationPath::from_str(&string).map_err(Error::custom))
.transpose()
String::deserialize(deserializer)
.and_then(|string| DerivationPath::from_str(&string).map_err(Error::custom))
}
}
@ -113,11 +62,6 @@ struct Config {
// command name, invocable binary, operations
type Commands<'a> = &'a [(String, String, Vec<Operation>)];
fn default_workflows() -> HashMap<String, Vec<icepick_workflow::Workflow>> {
let workflows_hex = include_str!(concat!(env!("OUT_DIR"), "/workflows.hex"));
bincode::deserialize(&smex::decode(workflows_hex).unwrap()).unwrap()
}
pub fn do_cli_thing() {
/* parse config file to get module names */
let config_file = std::env::vars().find_map(|(k, v)| {
@ -126,52 +70,9 @@ pub fn do_cli_thing() {
}
None
});
let config_path = config_file.unwrap_or_else(|| "/etc/icepick/icepick.toml".to_string());
let config_path = config_file.unwrap_or_else(|| "icepick.toml".to_string());
let config_content = std::fs::read_to_string(config_path).expect("can't read config file");
let mut config: Config = match toml::from_str(&config_content) {
Ok(config) => config,
Err(e) => panic!("Error while loading TOML config: {e}"),
};
config.modules.push(ModuleConfig {
name: "internal".to_string(),
command_name: Default::default(),
algorithm: Default::default(),
derivation_prefix: Default::default(),
workflows: Default::default(),
});
config.modules.push(ModuleConfig {
name: "ed25519".to_string(),
command_name: Default::default(),
algorithm: Some(DerivationAlgorithm::Ed25519),
// TODO: impl Last
derivation_prefix: Default::default(),
workflows: Default::default(),
});
let workflows = default_workflows();
for module in &mut config.modules {
if let Some(module_workflows) = workflows.get(&module.name) {
module.workflows.extend(module_workflows.iter().cloned());
}
}
let workflows_file = std::env::vars().find_map(|(k, v)| {
if k == "ICEPICK_WORKFLOWS_FILE" {
return Some(v);
}
None
});
let workflows_path = workflows_file.unwrap_or_else(|| "workflows.yaml".to_string());
if let Ok(content) = std::fs::read(&workflows_path) {
let workflows: HashMap<String, Vec<icepick_workflow::Workflow>> =
serde_yaml::from_slice(&content).unwrap();
for module in &mut config.modules {
if let Some(module_workflows) = workflows.get(&module.name) {
module.workflows.extend(module_workflows.iter().cloned());
}
}
}
let config: Config = toml::from_str(&config_content).expect("config file had invalid toml");
let mut commands = vec![];
let mut icepick_command = command!();
@ -208,89 +109,12 @@ pub fn do_cli_thing() {
}
let workflows = workflows.leak();
let mut workflow_command = clap::Command::new("workflow")
.about("Run a pre-defined Icepick workflow.")
.arg(
clap::arg!(--"run-quorum" <FILE> "Run a workflow signed by a quorum of approvers.")
.long_help(concat!(
"Run a workflow signed by a quorum of approvers. ",
"This command requires either `--shardfile` or `--keyring`. If given a ",
"Shardfile, the certificates stored within the Shardfile will be used to ",
"verify the quorum payload. If given an OpenPGP keyring, the ",
"certificates within the keyring will be used to verify the quorum ",
"payload. Both formats require all keys to be signed by the key matching a ",
"currently plugged-in OpenPGP smartcard."
))
.value_parser(value_parser!(PathBuf))
.conflicts_with_all([
"simulate-workflow",
"export-for-quorum",
"add-signature-to-quorum",
]),
)
.arg(
clap::arg!(--"add-signature-to-quorum" <FILE> "Add a signature to a workflow quorum.")
.long_help(concat!(
"Add a signature to a workflow quorum. ",
"Any existing signatures will be verified. ",
"This command requires either `--shardfile` or `--keyring`. If given a ",
"Shardfile, the certificates stored within the Shardfile will be used to ",
"verify the quorum payload. If given an OpenPGP keyring, the ",
"certificates within the keyring will be used to verify the quorum ",
"payload. Both formats require all keys to be signed by the key matching a ",
"currently plugged-in OpenPGP smartcard."
))
.value_parser(value_parser!(PathBuf)),
)
.arg(
clap::arg!(--"keyring" <FILE> "OpenPGP Keyring file for verifying quorum.")
.value_parser(value_parser!(PathBuf))
.requires_ifs([
(ArgPredicate::IsPresent, "run-quorum"),
(ArgPredicate::IsPresent, "add-signature-to-quorum"),
]),
)
.arg(
clap::arg!(--"quorum-threshold" <THRESHOLD> "Quorum of signatures required to run.")
.long_help(concat!(
"Quorum of signatures required to run. ",
"When not present, the default behavior is to require a signature from ",
"every certificate present."
))
.value_parser(value_parser!(u8))
.requires("run-quorum")
.conflicts_with("shardfile"), // Shardfile contains its own threshold.
)
.arg(
clap::arg!(--"shardfile" <FILE> "OpenPGP Shardfile for verifying quorum.")
.long_help(concat!(
"OpenPGP Shardfile for verifying quorum. ",
"An OpenPGP Smartcard will be required to decrypt the quorum threshold and ",
"OpenPGP certificates used for verifying the payload.",
))
.value_parser(value_parser!(PathBuf))
.requires_ifs([
(ArgPredicate::IsPresent, "run-quorum"),
(ArgPredicate::IsPresent, "add-signature-to-quorum"),
])
.conflicts_with("keyring"),
)
.arg(clap::arg!(--"simulate-workflow" "Simulate an Icepick Workflow.").global(true))
.arg(
clap::arg!(
--"export-for-quorum"
"Export the given inputs as a quorum file."
)
.global(true),
)
.arg(
clap::arg!(--"sign" "Sign the exported workflow values.")
.global(true)
.requires_if(ArgPredicate::IsPresent, "export-for-quorum"),
);
.about("Run a pre-defined Icepick workflow")
.arg(clap::arg!(--"simulate-workflow").global(true));
for module in workflows.iter() {
let mut module_subcommand = clap::Command::new(module.0.as_str());
for workflow in &module.1 {
module_subcommand = module_subcommand.subcommand(workflow::generate_command(workflow));
module_subcommand = module_subcommand.subcommand(workflow.generate_command());
}
workflow_command = workflow_command.subcommand(module_subcommand);
}
@ -324,7 +148,9 @@ pub fn do_cli_thing() {
// If we have a Workflow command, run the workflow and exit.
if let Some(("workflow", matches)) = matches.subcommand() {
if let Some((module_name, matches)) = matches.subcommand() {
let (module_name, matches) = matches
.subcommand()
.expect("icepick workflow: missing module");
let (workflow_name, matches) = matches
.subcommand()
.expect("icepick workflow: missing workflow");
@ -333,60 +159,7 @@ pub fn do_cli_thing() {
.find(|(module, _)| module == module_name)
.and_then(|(_, workflows)| workflows.iter().find(|x| x.name == workflow_name))
.expect("workflow from CLI should match config");
workflow::handle(workflow, module_name, matches, commands, &config.modules);
} else if let Some(payload_file) = matches.get_one::<PathBuf>("add-signature-to-quorum") {
let purpose = workflow::Purpose::AddSignature;
let mut payload = {
if let Some(keyring_file) = matches.get_one::<PathBuf>("keyring") {
workflow::parse_quorum_file(
payload_file,
keyring_file,
matches.get_one::<u8>("quorum-threshold").copied(),
purpose,
)
} else if let Some(shardfile) = matches.get_one::<PathBuf>("shardfile") {
workflow::parse_quorum_with_shardfile(payload_file, shardfile, purpose)
} else {
panic!("neither --keyring nor --shardfile were given, no keys to verify")
}
};
payload.add_signature().unwrap();
let output_file = payload_file.with_extension("tmp");
let mut file = std::fs::File::create_new(&output_file).unwrap();
serde_json::to_writer_pretty(&mut file, &payload).unwrap();
drop(file);
std::fs::copy(&output_file, payload_file).unwrap();
std::fs::remove_file(output_file).unwrap();
} else if let Some(payload_file) = matches.get_one::<PathBuf>("run-quorum") {
let purpose = workflow::Purpose::RunQuorum;
let (module_name, workflow_name, inputs) = {
if let Some(keyring_file) = matches.get_one::<PathBuf>("keyring") {
workflow::parse_quorum_file(
payload_file,
keyring_file,
matches.get_one::<u8>("quorum-threshold").copied(),
purpose,
)
.into_values()
} else if let Some(shardfile) = matches.get_one::<PathBuf>("shardfile") {
workflow::parse_quorum_with_shardfile(payload_file, shardfile, purpose)
.into_values()
} else {
panic!("neither --keyring nor --shardfile were given, no keys to verify")
}
};
let inputs: BTreeMap<String, serde_json::Value> =
serde_json::from_value(inputs).unwrap();
let workflow = workflows
.iter()
.find(|(module, _)| *module == module_name)
.and_then(|(_, workflows)| workflows.iter().find(|x| x.name == workflow_name))
.expect("workflow from CLI should match config");
workflow::handle_payload(workflow, inputs, commands, &config.modules);
}
workflow.handle(matches, commands);
return;
}
@ -446,11 +219,24 @@ pub fn do_cli_thing() {
let accounts: Vec<keyfork_derive_util::DerivationIndex> =
serde_json::from_value(accounts.clone())
.expect("valid derivation_accounts");
derived_keys.extend(derive_keys(
&algo.expect("a module requested keys but didn't provide algorithm"),
&path.expect("a module requested keys but didn't provide prefix"),
&accounts,
));
let mut client =
keyforkd_client::Client::discover_socket().expect("keyforkd started");
for account in accounts {
let request = keyfork_derive_util::request::DerivationRequest::new(
algo.clone(),
&path.clone().chain_push(account),
);
let request = keyforkd_models::Request::Derivation(request);
let response = client.request(&request).expect("valid derivation");
match response {
keyforkd_models::Response::Derivation(
keyfork_derive_util::request::DerivationResponse { data, .. },
) => {
derived_keys.push(data.to_vec());
}
_ => panic!("Unexpected response"),
}
}
}
let json = serde_json::json!({
@ -478,17 +264,10 @@ pub fn do_cli_thing() {
let mut input = child.stdin.take().unwrap();
serde_json::to_writer(&mut input, &json).unwrap();
input.write_all(b"\n{\"operation\": \"exit\"}\n").unwrap();
let output = child.wait_with_output().unwrap();
let stdout = &output.stdout;
if output.status.success() {
let json: serde_json::Value =
serde_json::from_slice(stdout).expect("valid json");
let output = child.wait_with_output().unwrap().stdout;
let json: serde_json::Value = serde_json::from_slice(&output).expect("valid json");
let json_as_str = serde_json::to_string(&json).unwrap();
println!("{json_as_str}");
} else {
eprintln!("Error while invoking operation, check logs");
std::process::exit(1);
}
}
}
}

View File

@ -1,277 +1,152 @@
use icepick_workflow::{Input, InvocableOperation, OperationResult, StringMap, Workflow};
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationPath};
use keyfork_shard::{openpgp::OpenPGP, Format};
use miniquorum::{Payload, PayloadVerification};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::{
io::Write,
process::{Command, Stdio},
};
use std::collections::{HashMap, HashSet};
use super::{derive_keys, get_command, Commands, ModuleConfig, Operation};
use super::{Commands, Operation};
/// The purpose for interacting with a payload.
pub enum Purpose {
/// Adding a signature.
AddSignature,
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Workflow {
pub name: String,
pub inputs: Vec<String>,
/// Running a quorum-signed payload.
RunQuorum,
#[serde(rename = "step")]
steps: Vec<WorkflowStep>,
}
pub type StringMap = std::collections::HashMap<String, String>;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct WorkflowStep {
r#type: String,
#[serde(default)]
blob: StringMap,
#[serde(default)]
values: StringMap,
#[serde(default)]
inputs: StringMap,
#[serde(default)]
outputs: StringMap,
}
#[derive(Clone, Debug)]
struct CLIOperation {
/// The name of the operation (i.e. `transfer-token`).
struct InvocableOperation {
name: String,
/// The binary to invoke the operation.
binary: String,
/// Information describing the invocation requirements of the operation.
operation: Operation,
/// The derivation algorithm used when deriving keys for the operation.
derivation_algorithm: Option<DerivationAlgorithm>,
/// The derivation prefix used when deriving keys for the operation.
derivation_prefix: Option<DerivationPath>,
}
impl InvocableOperation for CLIOperation {
fn invoke(&self, input: &StringMap<Value>, derived_keys: &[Vec<u8>]) -> OperationResult {
let (command, args) = get_command(&self.binary);
let json = serde_json::json!({
"operation": self.operation.name,
"values": input,
"derived_keys": derived_keys,
});
let mut child = Command::new(command)
.args(args)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
.unwrap();
let mut child_input = child.stdin.take().unwrap();
serde_json::to_writer(&mut child_input, &json).unwrap();
child_input
.write_all(b"\n{\"operation\": \"exit\"}\n")
.unwrap();
let result = child.wait_with_output().unwrap();
if !result.status.success() {
panic!(
"Bad exit ({}: {}): {}",
&self.binary,
&self.operation.name,
String::from_utf8_lossy(&result.stderr)
);
}
let output = result.stdout;
let json: OperationResult = serde_json::from_slice(&output).expect("valid json");
json
}
fn name(&self) -> &String {
&self.name
}
fn argument_names(&self) -> impl Iterator<Item = &String> {
self.operation.arguments.iter().map(|i| &i.name)
}
fn derivation_configuration(&self) -> Option<(&DerivationAlgorithm, &DerivationPath)> {
self.derivation_algorithm
.as_ref()
.zip(self.derivation_prefix.as_ref())
}
}
pub fn generate_command(workflow: &Workflow) -> clap::Command {
let mut command = clap::Command::new(&workflow.name).about(&workflow.description);
// NOTE: all required inputs are still marked as .required(false) since they could be included
// in the `--input-file` argument.
for input in workflow.inputs.iter() {
let name = &input.name;
let arg = clap::Arg::new(name)
.required(false)
.help(&input.description)
.long(name.replace('_', "-"))
.value_name(name.to_uppercase())
.visible_aliases(&input.aliases);
command = command.arg(arg);
}
command.arg(
clap::arg!(
impl Workflow {
/// Generate a [`clap::Command`] for a [`Workflow`], where the inputs can be defined either by
/// command-line arguments or via a JSON input file.
pub fn generate_command(&self) -> clap::Command {
let mut command = clap::Command::new(&self.name).arg(clap::arg!(
--"input-file" [FILE]
"A file containing any inputs not passed on the command line"
)
.value_parser(clap::value_parser!(std::path::PathBuf)),
)
));
for input in &self.inputs {
let arg = clap::Arg::new(input)
.required(false)
.long(input.replace('_', "-"))
.value_name(input.to_uppercase());
command = command.arg(arg);
}
command
}
fn load_inputs<'a>(
inputs: impl IntoIterator<Item = &'a Input>,
matches: &clap::ArgMatches,
) -> StringMap {
fn load_inputs(&self, matches: &clap::ArgMatches) -> StringMap {
let mut map = StringMap::default();
let input_file: Option<StringMap> = matches
.get_one::<std::path::PathBuf>("input-file")
.and_then(|p| std::fs::File::open(p).ok())
.and_then(|f| serde_json::from_reader(f).ok());
for input in inputs {
let identifier = &input.name;
match matches.get_one::<String>(identifier) {
for input in &self.inputs {
match matches.get_one::<String>(input) {
Some(value) => {
map.insert(identifier.clone(), value.clone());
map.insert(input.clone(), value.clone());
continue;
}
None => {
for aliasable_identifier in input.identifiers() {
if let Some(value) = input_file
.as_ref()
.and_then(|f| f.get(aliasable_identifier))
{
map.insert(identifier.clone(), value.clone());
if let Some(value) = input_file.as_ref().and_then(|f| f.get(input)) {
map.insert(input.clone(), value.clone());
continue;
}
}
}
}
if input.is_required() {
panic!("Required workflow input was not found: {identifier}");
}
panic!("Key was not found: {input}");
}
map
}
fn load_operations(commands: Commands, config: &[ModuleConfig]) -> Vec<CLIOperation> {
fn simulate_workflow(&self, mut data: HashSet<String>, operations: &[InvocableOperation]) {
// simulate the steps by using a HashSet to traverse the inputs and outputs and ensure
// there's no inconsistencies
for (i, step) in self.steps.iter().enumerate() {
// NOTE: overflow possible but unlikely
let step_index = i + 1;
let step_type = &step.r#type;
// Find the relevant Operation
let Some(invocable) = operations.iter().find(|op| op.name == *step_type) else {
panic!("Could not find operation: {step_type}");
};
// Check if we have the keys we want to pass into the module.
for in_memory_name in step.inputs.values() {
if !data.contains(in_memory_name) && !step.values.contains_key(in_memory_name) {
panic!("Failed simulation: step #{step_index} ({step_type}): missing value {in_memory_name}");
}
}
// Check that the module accepts those keys.
for module_input_name in step.inputs.keys() {
if !invocable
.operation
.arguments
.iter()
.any(|arg| *module_input_name == arg.name)
{
eprintln!("Simulation: step #{step_index} ({step_type}): input value {module_input_name} will be passed through as JSON input");
}
}
// Add the keys we get from the module.
for in_memory_name in step.outputs.values() {
data.insert(in_memory_name.clone());
}
}
}
pub fn handle(&self, matches: &clap::ArgMatches, modules: Commands) {
let inputs = self.load_inputs(matches);
let data: HashMap<String, Value> = inputs
.into_iter()
.map(|(k, v)| (k, Value::String(v)))
.collect();
let mut operations = vec![];
for (module_name, module_binary, module_operations) in commands {
for (module_name, module_binary, module_operations) in modules {
for operation in module_operations {
let operation_name = &operation.name;
let module_config = config.iter().find(|conf| conf.name == *module_name);
let io = CLIOperation {
let io = InvocableOperation {
name: format!("{module_name}-{operation_name}"),
binary: module_binary.clone(),
operation: operation.clone(),
derivation_algorithm: module_config.and_then(|m| m.algorithm.clone()),
derivation_prefix: module_config.and_then(|m| m.derivation_prefix.clone()),
};
operations.push(io);
}
}
operations
}
pub fn parse_quorum_file(
quorum_path: impl AsRef<std::path::Path>,
cert_path: impl AsRef<std::path::Path>,
threshold: Option<u8>,
purpose: Purpose,
) -> Payload {
let (payload, certs) = Payload::load(quorum_path, cert_path).unwrap();
let threshold = threshold.unwrap_or(u8::try_from(certs.len()).expect("too many certs!"));
let policy = match purpose {
Purpose::AddSignature => {
// All signatures must be valid, but we don't require a minimum.
PayloadVerification::new().with_threshold(0)
}
Purpose::RunQuorum => PayloadVerification::new().with_threshold(threshold),
};
payload.verify_signatures(&certs, &policy, None).unwrap();
payload
}
pub fn parse_quorum_with_shardfile(
quorum_path: impl AsRef<std::path::Path>,
shardfile_path: impl AsRef<std::path::Path>,
purpose: Purpose,
) -> Payload {
let payload_file = std::fs::File::open(quorum_path).unwrap();
let payload: Payload = serde_json::from_reader(payload_file).unwrap();
let opgp = OpenPGP;
let (threshold, certs) = opgp
.decrypt_metadata_from_file(
None::<&std::path::Path>,
std::fs::File::open(shardfile_path).unwrap(),
keyfork_prompt::default_handler().unwrap(),
)
.unwrap();
let policy = match purpose {
Purpose::AddSignature => {
// All signatures must be valid, but we don't require a minimum.
PayloadVerification::new().with_threshold(0)
}
Purpose::RunQuorum => PayloadVerification::new().with_threshold(threshold),
};
payload.verify_signatures(&certs, &policy, None).unwrap();
payload
}
pub fn handle_payload(
workflow: &Workflow,
inputs: StringMap<Value>,
modules: Commands,
config: &[ModuleConfig],
) {
let operations = load_operations(modules, config);
let result = workflow
.run_workflow(inputs, &operations, &derive_keys)
.expect("Invocation failure");
println!("{}", serde_json::to_string(&result).expect("valid JSON"));
}
pub fn handle(
workflow: &Workflow,
module_name: &str,
matches: &clap::ArgMatches,
modules: Commands,
config: &[ModuleConfig],
) {
let inputs = load_inputs(&workflow.inputs, matches);
let data: StringMap<Value> = inputs
.into_iter()
.map(|(k, v)| (k, Value::String(v)))
.collect();
let operations = load_operations(modules, config);
if matches.get_flag("simulate-workflow") {
let reports = workflow
.simulate_workflow(data.into_keys().collect(), &operations)
.expect("Simulation failure");
for report in reports {
println!("{report}");
}
self.simulate_workflow(data.into_keys().collect(), &operations);
return;
}
if matches.get_flag("export-for-quorum") {
let mut payload = Payload::new(
serde_json::to_value(data).unwrap(),
module_name,
&workflow.name,
);
if matches.get_flag("sign") {
payload.add_signature().unwrap();
todo!("Unsimulated transaction!");
}
println!("{}", serde_json::to_string_pretty(&payload).unwrap());
return;
}
let result = workflow
.run_workflow(data, &operations, &derive_keys)
.expect("Invocation failure");
println!("{}", serde_json::to_string(&result).expect("valid JSON"));
}

View File

@ -1,43 +0,0 @@
name: "broadcast"
description: |-
Broadcast a transaction on a Cosmos-based blockchain.
inputs:
- name: "nonce_address"
description: >-
The address of the account used for the transaction nonce.
- name: "chain_name"
description: >-
The name of the Cosmos chain to broadcast a transaction on.
step:
- type: "cosmos-get-chain-info"
inputs:
chain_name: "chain_name"
outputs:
blockchain_config: "blockchain_config"
- type: "cosmos-get-account-data"
inputs:
account_id: "nonce_address"
blockchain_config: "blockchain_config"
outputs:
account_number: "account_number"
sequence_number: "sequence_number"
- type: "internal-save-file"
values:
filename: "account_info.json"
inputs:
account_number: "account_number"
sequence_number: "sequence_number"
- type: "internal-load-file"
values:
filename: "transaction.json"
outputs:
transaction: "transaction"
- type: "cosmos-broadcast"
inputs:
blockchain_config: "blockchain_config"
transaction: "transaction"
outputs:
status: "status"
url: "url"
error: "error"
error_code: "error_code"

View File

@ -1,26 +0,0 @@
name: generate-address
description: |-
Generate an address on a given Cosmos-based blockchain.
inputs:
- name: chain_name
description: >-
The name of the Cosmos chain you'd like to generate an address for.
- name: account
description: >-
The account to use, if not the default account.
optional: true
step:
- type: cosmos-get-chain-info
inputs:
chain_name: chain_name
outputs:
blockchain_config: blockchain_config
- type: cosmos-generate-wallet
inputs:
account: account
blockchain_config: blockchain_config
- type: cosmos-get-wallet-address
inputs:
blockchain_config: blockchain_config
outputs:
pubkey: pubkey

View File

@ -1,61 +0,0 @@
name: stake
description: |-
Stake coins on the provided chain.
inputs:
- name: delegate_address
description: >-
Address holding the coins to be staked to a validator.
- name: validator_address
description: >-
Address of the validator operator.
- name: chain_name
description: >-
The name of the Cosmos-based chain.
- name: asset_name
description: >-
The name of the asset to stake.
- name: asset_amount
description: >-
The amount of the asset to stake.
- name: gas_factor
description: >-
An amount to multiply the required gas by; necessary if a chain requires
more gas for a specific operation.
optional: true
step:
- type: cosmos-get-chain-info
inputs:
chain_name: chain_name
outputs:
blockchain_config: blockchain_config
- type: internal-load-file
values:
filename: "account_info.json"
outputs:
account_number: account_number
sequence_number: sequence_number
- type: cosmos-stake
inputs:
delegate_address: delegate_address
validator_address: validator_address
amount: asset_amount
denom: asset_name
blockchain_config: blockchain_config
gas_factor: gas_factor
outputs:
fee: fee
tx_messages: tx_messages
- type: cosmos-sign
inputs:
fee: fee
tx_messages: tx_messages
account_number: account_number
sequence_number: sequence_number
blockchain_config: blockchain_config
outputs:
transaction: signed_transaction
- type: internal-save-file
values:
filename: "transaction.json"
inputs:
transaction: signed_transaction

View File

@ -1,60 +0,0 @@
name: "transfer"
description: |-
Transfer a Cosmos coin.
inputs:
- name: "from_address"
description: >-
The address from which to send coin.
- name: "to_address"
description: >-
The address to send coins to.
- name: "asset_name"
description: >-
The name of the asset to send.
- name: "chain_name"
description: >-
The name of the Cosmos chain the asset lives on.
- name: "asset_amount"
description: >-
The amount of the asset to send.
- name: gas_factor
description: >-
An amount to multiply the required gas by; necessary if a chain requires
more gas for a specific operation.
optional: true
step:
- type: "cosmos-get-chain-info"
inputs:
chain_name: "chain_name"
outputs:
blockchain_config: "blockchain_config"
- type: "internal-load-file"
values:
filename: "account_info.json"
outputs:
account_number: "account_number"
sequence_number: "sequence_number"
- type: "cosmos-transfer"
inputs:
from_address: "from_address"
to_address: "to_address"
amount: "asset_amount"
denom: "asset_name"
blockchain_config: "blockchain_config"
outputs:
fee: "fee"
tx_messages: "tx_messages"
- type: "cosmos-sign"
inputs:
fee: "fee"
tx_messages: "tx_messages"
account_number: "account_number"
sequence_number: "sequence_number"
blockchain_config: "blockchain_config"
outputs:
transaction: "signed_transaction"
- type: "internal-save-file"
values:
filename: "transaction.json"
inputs:
transaction: "signed_transaction"

View File

@ -1,53 +0,0 @@
name: withdraw-rewards
description: |-
Withdraw rewards gained from staking to a validator.
inputs:
- name: delegate_address
description: >-
The owner of the staked coins; also, the recipient of rewards.
- name: validator_address
description: >-
The validator from whom coins are staked.
- name: chain_name
description: >-
The name of the Cosmos-based chain.
- name: gas_factor
description: >-
An amount to multiply the required gas by; necessary if a chain requires
more gas for a specific operation.
optional: true
step:
- type: cosmos-get-chain-info
inputs:
chain_name: chain_name
outputs:
blockchain_config: blockchain_config
- type: internal-load-file
values:
filename: "account_info.json"
outputs:
account_number: account_number
sequence_number: sequence_number
- type: cosmos-withdraw-rewards
inputs:
delegate_address: delegate_address
validator_address: validator_address
blockchain_config: blockchain_config
gas_factor: gas_factor
outputs:
fee: fee
tx_messages: tx_messages
- type: cosmos-sign
inputs:
fee: fee
tx_messages: tx_messages
account_number: account_number
sequence_number: sequence_number
blockchain_config: blockchain_config
outputs:
transaction: signed_transaction
- type: internal-save-file
values:
filename: "transaction.json"
inputs:
transaction: signed_transaction

View File

@ -1,64 +0,0 @@
name: withdraw
description: |-
Withdraw staked coins from a validator.
Staked coins may be held for an unbonding period, depending on the chain upon
which they are staked.
inputs:
- name: delegate_address
description: >-
The owner of the staked coins.
- name: validator_address
description: >-
The validator from whom coins are staked.
- name: chain_name
description: >-
The name of the Cosmos-based chain.
- name: asset_name
description: >-
The name of the asset to withdraw.
- name: asset_amount
description: >-
The amount of the asset to withdraw.
- name: gas_factor
description: >-
An amount to multiply the required gas by; necessary if a chain requires
more gas for a specific operation.
optional: true
step:
- type: cosmos-get-chain-info
inputs:
chain_name: chain_name
outputs:
blockchain_config: blockchain_config
- type: internal-load-file
values:
filename: "account_info.json"
outputs:
account_number: account_number
sequence_number: sequence_number
- type: cosmos-withdraw
inputs:
delegate_address: delegate_address
validator_address: validator_address
amount: asset_amount
denom: asset_name
blockchain_config: blockchain_config
gas_factor: gas_factor
outputs:
fee: fee
tx_messages: tx_messages
- type: cosmos-sign
inputs:
fee: fee
tx_messages: tx_messages
account_number: account_number
sequence_number: sequence_number
blockchain_config: blockchain_config
outputs:
transaction: signed_transaction
- type: internal-save-file
values:
filename: "transaction.json"
inputs:
transaction: signed_transaction

View File

@ -1,40 +0,0 @@
name: "broadcast"
description: |-
Broadcast a transaction on the Solana blockchain.
inputs:
- name: "nonce_address"
description: >-
The address of the nonce account.
- name: "cluster"
description: >-
The name of the Solana cluster to broadcast the transaction on, if not
mainnet-beta.
optional: true
step:
- type: "sol-get-nonce-account-data"
inputs:
nonce_address: "nonce_address"
cluster: "cluster"
outputs:
authority: "nonce_authority"
durable_nonce: "nonce"
- type: "internal-save-file"
values:
filename: "nonce.json"
inputs:
nonce_authority: "nonce_authority"
nonce_data: "nonce"
nonce_address: "nonce_address"
- type: "internal-load-file"
values:
filename: "transaction.json"
outputs:
transaction: "transaction"
- type: "sol-broadcast"
inputs:
cluster: "cluster"
transaction: "transaction"
outputs:
status: "status"
url: "url"
error: "error"

View File

@ -1,15 +0,0 @@
name: generate-address
description: |-
Generate a Solana address.
inputs:
- name: account
description: >-
The account to use, if not the default account.
optional: true
step:
- type: sol-generate-wallet
inputs:
account: account
- type: sol-get-wallet-address
outputs:
pubkey: pubkey

View File

@ -1,75 +0,0 @@
name: "generate-nonce-account"
description: |-
Using a temporary Keyfork instance, generate a nonce address for the given
authorization address.
inputs:
- name: "cluster"
description: >-
Name of the Solana cluster to generate the nonce account on, if not
mainnet-beta.
- name: "authorization_address"
description: >-
The address used to authorize advancing the nonce.
The authorization address (also called "address" or "pubkey" in other
workflows) is required to be a signer of the transaction, so the
authorization address is often the principal address - the one performing
the transaction.
aliases:
- address
- primary_address
- principal_address
- pubkey
step:
- type: "sol-get-blockhash"
inputs:
cluster: "cluster"
outputs:
blockhash: "blockhash"
- type: "sol-create-nonce-account-and-signing-key"
inputs:
authorization_address: "authorization_address"
outputs:
transaction: "instructions"
nonce_pubkey: "nonce_pubkey"
payer_pubkey: "payer_pubkey"
privkeys: "private_keys"
- type: "sol-await-funds"
inputs:
address: "payer_pubkey"
cluster: "cluster"
values:
lamports: "1510000"
- type: "sol-compile"
inputs:
instructions: "instructions"
derivation_accounts: "derivation_accounts"
blockhash: "blockhash"
outputs:
instructions: "nonced_instructions"
- type: "sol-sign"
inputs:
blockhash: "blockhash"
signing_keys: "private_keys"
instructions: "nonced_instructions"
outputs:
transaction: "signed_transaction"
- type: "sol-broadcast"
inputs:
cluster: "cluster"
transaction: "signed_transaction"
outputs:
status: "status"
url: "url"
error: "error"
- type: "internal-cat"
inputs:
status: "status"
url: "url"
nonce_address: "nonce_pubkey"
error: "error"
outputs:
status: "status"
url: "url"
nonce_address: "nonce_address"
error: "error"

View File

@ -1,60 +0,0 @@
name: transfer-token
description: |-
Transfer SPL tokens held on the Solana blockchain.
inputs:
- name: from_address
description: >-
The address from which to send tokens.
- name: to_address
description: >-
The address to send coins to.
- name: token_name
description: >-
The name of the token to transfer.
- name: token_amount
description: >-
The amount of the token to transfer.
step:
- type: sol-get-token-info
inputs:
token: token_name
outputs:
token_address: token_address
token_decimals: token_decimals
- type: internal-load-file
values:
filename: "nonce.json"
outputs:
nonce_authority: nonce_authority
nonce_data: nonce_data
nonce_address: nonce_address
- type: sol-transfer-token
inputs:
amount: token_amount
token_address: token_address
decimals: token_decimals
to_address: to_address
from_address: from_address
outputs:
instructions: instructions
derivation_accounts: derivation_accounts
- type: sol-compile
inputs:
instructions: instructions
derivation_accounts: derivation_accounts
nonce_address: nonce_address
nonce_authority: nonce_authority
nonce_data: nonce_data
outputs:
instructions: nonced_instructions
- type: sol-sign
inputs:
instructions: nonced_instructions
blockhash: nonce_data
outputs:
transaction: transaction
- type: internal-save-file
values:
filename: "transaction.json"
inputs:
transaction: signed_transaction

View File

@ -1,49 +0,0 @@
name: "transfer"
description: |-
Transfer SOL from one address to another.
inputs:
- name: "to_address"
description: >-
The address to send SOL to.
- name: "from_address"
description: >-
The address to send SOL from.
- name: "amount"
description: >-
The amount of SOL to send.
step:
- type: "internal-load-file"
values:
filename: "nonce.json"
outputs:
nonce_authority: "nonce_authority"
nonce_data: "nonce_data"
nonce_address: "nonce_address"
- type: "sol-transfer"
inputs:
from_address: "from_address"
to_address: "to_address"
amount: "amount"
outputs:
instructions: "instructions"
derivation_accounts: "derivation_accounts"
- type: "sol-compile"
inputs:
instructions: "instructions"
derivation_accounts: "derivation_accounts"
nonce_address: "nonce_address"
nonce_authority: "nonce_authority"
nonce_data: "nonce_data"
outputs:
instructions: "nonced_instructions"
- type: "sol-sign"
inputs:
blockhash: "nonce_data"
instructions: "nonced_instructions"
outputs:
transaction: "signed_transaction"
- type: "internal-save-file"
values:
filename: "transaction.json"
inputs:
transaction: "signed_transaction"

View File

@ -1,25 +0,0 @@
name: generate-address
description: |-
Generate a Spacemesh address
inputs:
- name: account
description: >-
The account to use, if not the default account.
optional: true
- name: cluster
description: >-
The Spacemesh cluster to use, if not the mainnet.
optional: true
step:
- type: spacemesh-generate-wallet
inputs:
account: account
- type: ed25519-get-pubkey
outputs:
pubkey: pubkey
- type: spacemesh-get-wallet-address
inputs:
pubkey: pubkey
cluster: cluster
outputs:
address: address

View File

@ -1,21 +0,0 @@
[package]
name = "miniquorum"
version = "0.1.0"
edition = "2021"
[features]
default = ["clap"]
[dependencies]
bincode = "1.3.3"
card-backend-pcsc = "0.5.0"
chrono = { version = "0.4.39", default-features = false, features = ["std", "now", "serde"] }
clap = { version = "4.5.27", features = ["derive", "wrap_help"], optional = true }
keyfork-prompt = { version = "0.2.0", registry = "distrust", default-features = false }
openpgp-card = "0.4"
openpgp-card-sequoia = "0.2.2"
sequoia-openpgp = "1.22.0"
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
sha3 = "0.10.8"
thiserror = "2.0.11"

View File

@ -1,599 +0,0 @@
use chrono::prelude::*;
use keyfork_prompt::{
default_handler, prompt_validated_passphrase,
validators::{PinValidator, Validator},
};
use openpgp_card::{Error as CardError, StatusBytes};
use openpgp_card_sequoia::{state::Open, Card};
use sequoia_openpgp::{
self as openpgp,
armor::{Kind, Writer},
crypto::hash::Digest,
packet::{signature::SignatureBuilder, Packet},
parse::Parse,
serialize::Serialize as _,
types::{HashAlgorithm, SignatureType},
Cert, Fingerprint,
};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::{collections::BTreeMap, fs::File, io::Read, path::Path};
#[derive(thiserror::Error, Debug)]
/// An error with a [`PayloadVerification`] policy.
#[error("{error} (policy: {policy:?})")]
pub struct Error {
error: BaseError,
policy: PayloadVerification,
}
#[non_exhaustive]
#[derive(thiserror::Error, Debug)]
pub enum BaseError {
/// In the given certificate keyring, the provided fingerprint was not found.
#[error("fingerprint not found: {0}")]
FingerprintNotFound(Fingerprint),
/// No smartcard was found.
#[error("no smartcard found")]
NoSmartcard,
/// None of the certificates in the given certificate keyring matched any plugged-in smartcard.
#[error("no certs found matching any available smartcard")]
NoCertMatchedSmartcard,
/// The certificate was not trusted by the root of trust.
#[error("untrusted certificate: {0} has not signed {1:?}")]
UntrustedCertificates(Fingerprint, Vec<Fingerprint>),
/// No certificate in the given certificate keyring matched the signature.
#[error("no public key matched signature")]
NoPublicKeyMatchedSignature,
/// Not enough signatures matched based on the given threshold
#[error("not enough signatures: {0} < {1}")]
NotEnoughSignatures(u8, u8),
/// A Payload was provided when an inner [`serde_json::Value`] was expected.
#[error("a payload was provided when a non-payload JSON value was expected")]
UnexpectedPayloadProvided,
/// The JSON object is not a valid value.
#[error("the JSON object is not a valid value")]
InvalidJSONValue,
/// No signing key was found on smartcard.
#[error("no signing key was found on smartcard")]
NoSigningKey,
/// A signature exists for the current smartcard.
#[error("a signature exists for the key on the current smartcard: {0}")]
ConflictingSignature(openpgp::Fingerprint),
/// A bad packet type was encountered.
#[error("a bad OpenPGP packet was encountered: {0}")]
BadOpenPGPPacket(openpgp::packet::Tag),
/// A signature could not have been added; a smartcard might not have been pluggedi n.
#[error("a signature could not be added")]
NoSignatureAdded,
/// The signature matched a key that was already used to verify another signature.
#[error("signature {1} matched key {0} previously used to sign signature {2}")]
DuplicateSignature(openpgp::Fingerprint, usize, usize),
}
impl BaseError {
fn with_policy(self, policy: &PayloadVerification) -> Error {
Error {
error: self,
policy: policy.clone(),
}
}
}
fn canonicalize(value: Value) -> Value {
match value {
Value::Array(vec) => {
let values = vec.into_iter().map(canonicalize).collect();
Value::Array(values)
}
Value::Object(map) => {
// this sorts the values
let map: BTreeMap<String, Value> =
map.into_iter().map(|(k, v)| (k, canonicalize(v))).collect();
let sorted: Vec<Value> = map
.into_iter()
.map(|(k, v)| Value::Array(vec![Value::String(k), v]))
.collect();
Value::Array(sorted)
}
value => value,
}
}
fn unhashed(value: Value) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
let Value::Object(mut value) = value else {
return Err(BaseError::InvalidJSONValue.into());
};
value.remove("signatures");
let value = canonicalize(Value::Object(value));
let bincoded = bincode::serialize(&value)?;
Ok(bincoded)
}
fn hash(value: Value) -> Result<Box<dyn Digest>, Box<dyn std::error::Error>> {
let bincoded = unhashed(value)?;
let mut digest = HashAlgorithm::SHA512.context()?;
digest.update(&bincoded);
Ok(digest)
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Payload {
workflow: [String; 2],
values: Value,
datetime: DateTime<Utc>,
#[serde(default)]
signatures: Vec<String>,
}
#[derive(Clone, Debug)]
pub struct PayloadVerification {
threshold: u8,
error_on_invalid: bool,
error_on_missing_key: bool,
one_each: bool,
}
impl std::default::Default for PayloadVerification {
fn default() -> Self {
Self {
threshold: 0,
error_on_invalid: true,
error_on_missing_key: true,
one_each: true,
}
}
}
#[allow(dead_code)]
impl PayloadVerification {
pub fn new() -> Self {
Default::default()
}
/// Require a signature per key, regardless of any given threshold.
pub fn with_one_per_key(self, one_each: bool) -> Self {
Self { one_each, ..self }
}
/// Set a threshold for required signatures.
pub fn with_threshold(self, threshold: u8) -> Self {
Self {
one_each: false,
threshold,
..self
}
}
/// Require a single valid signature; other signatures may be invalid.
pub fn with_any_valid(self) -> Self {
Self {
threshold: 1,
error_on_invalid: false,
..self
}
}
/// Require a threshold of signatures to be valid, allowing no invalid signatures.
pub fn with_all_valid(self, threshold: u8) -> Self {
Self {
threshold,
error_on_invalid: true,
..self
}
}
/// Ignore invalid signatures. A threshold of valid signatures is still required.
///
/// The default behavior is to error when encountering an invalid signature, even if a quorum
/// is reached.
pub fn ignoring_invalid_signatures(self) -> Self {
Self {
error_on_invalid: false,
..self
}
}
/// Ignoring signatures signed by unknown keys.
///
/// The default behavior is to error when encountering an unknown signature.
pub fn ignoring_missing_keys(self) -> Self {
Self {
error_on_missing_key: true,
..self
}
}
}
/// Format a name from an OpenPGP card.
fn format_name(input: impl AsRef<str>) -> String {
let mut n = input
.as_ref()
.split("<<")
.take(2)
.map(|s| s.replace('<', " "))
.collect::<Vec<_>>();
n.reverse();
n.join(" ")
}
impl Payload {
/// Create a new Payload, using the current system's time, in UTC.
pub fn new(
values: serde_json::Value,
module_name: impl AsRef<str>,
workflow_name: impl AsRef<str>,
) -> Self {
Self {
workflow: [
module_name.as_ref().to_string(),
workflow_name.as_ref().to_string(),
],
values,
datetime: Utc::now(),
signatures: vec![],
}
}
/// Load a Payload and the relevant certificates.
///
/// # Errors
///
/// The constructor may error if either file can't be read or if either file has invalid data.
pub fn load(
payload_path: impl AsRef<Path>,
keyring_path: impl AsRef<Path>,
) -> Result<(Self, Vec<Cert>), Box<dyn std::error::Error>> {
let payload_file = File::open(payload_path)?;
let cert_file = File::open(keyring_path)?;
Self::from_readers(payload_file, cert_file)
}
pub fn from_readers(
payload: impl Read,
keyring: impl Read + Send + Sync,
) -> Result<(Self, Vec<Cert>), Box<dyn std::error::Error>> {
let payload: Payload = serde_json::from_reader(payload)?;
let certs =
openpgp::cert::CertParser::from_reader(keyring)?.collect::<Result<Vec<_>, _>>()?;
Ok((payload, certs))
}
pub fn signature_count(&self) -> usize {
self.signatures.len()
}
/// Attach a signature from an OpenPGP card.
///
/// # Errors
///
/// The method may error if a signature could not be created.
pub fn add_signature(&mut self) -> Result<(), Box<dyn std::error::Error>> {
let signatures = self
.signatures
.iter()
.map(|signature_text| Packet::from_bytes(signature_text.as_bytes()).map_err(Into::into))
.collect::<Result<Vec<_>, Box<dyn std::error::Error>>>()?;
let unhashed = unhashed(serde_json::to_value(&self)?)?;
let builder =
SignatureBuilder::new(SignatureType::Binary).set_hash_algo(HashAlgorithm::SHA512);
let mut prompt_handler = default_handler()?;
let pin_validator = PinValidator {
min_length: Some(6),
..Default::default()
};
let mut has_signed_any = false;
for backend in card_backend_pcsc::PcscBackend::cards(None)? {
let mut card = Card::<Open>::new(backend?)?;
let mut transaction = card.transaction()?;
let key_fps = transaction.fingerprints()?;
let signing_key_fp = key_fps.signature().ok_or(BaseError::NoSigningKey)?;
for packet in &signatures {
let Packet::Signature(signature) = packet else {
return Err(BaseError::BadOpenPGPPacket(packet.tag()).into());
};
for issuer_fp in signature.issuer_fingerprints() {
if issuer_fp.as_bytes() == signing_key_fp.as_bytes() {
return Err(BaseError::ConflictingSignature(issuer_fp.clone()).into());
}
}
}
let cardholder_name = format_name(transaction.cardholder_name()?);
let card_id = transaction.application_identifier()?.ident();
let mut pin = None;
while transaction.pw_status_bytes()?.err_count_pw1() > 0 && pin.is_none() {
transaction.reload_ard()?;
let attempts = transaction.pw_status_bytes()?.err_count_pw1();
let rpea = "Remaining PIN entry attempts";
let message = if cardholder_name.is_empty() {
format!("Unlock card {card_id}\n{rpea}: {attempts}\n\nPIN: ")
} else {
format!(
"Unlock card {card_id} ({cardholder_name})\n{rpea}: {attempts}\n\nPIN: "
)
};
let temp_pin = prompt_validated_passphrase(
&mut *prompt_handler,
&message,
3,
pin_validator.to_fn(),
)?;
let verification_status = transaction.verify_user_pin(temp_pin.as_str().trim());
match verification_status {
#[allow(clippy::ignored_unit_patterns)]
Ok(_) => {
pin.replace(temp_pin);
}
// NOTE: This should not be hit, because of the above validator.
Err(CardError::CardStatus(
StatusBytes::IncorrectParametersCommandDataField,
)) => {
prompt_handler.prompt_message(keyfork_prompt::Message::Text(
"Invalid PIN length entered.".to_string(),
))?;
}
Err(_) => {}
}
}
let mut signer_card = transaction.to_signing_card(pin.expect("valid PIN").as_str())?;
// NOTE: Can't use a PromptHandler to prompt a message as it doesn't provide a way to
// cancel a prompt when in terminal mode. Just eprintln to stderr.
//
// We shouldn't be writing with a PromptHandler, so the terminal should be reset.
let mut signer =
signer_card.signer(&|| eprintln!("Touch confirmation needed for signing"))?;
let signature = builder.clone().sign_message(&mut signer, &unhashed)?;
let signature = Packet::from(signature);
let mut armored_signature = vec![];
let mut writer = Writer::new(&mut armored_signature, Kind::Signature)?;
signature.serialize(&mut writer)?;
writer.finalize()?;
self.signatures.push(String::from_utf8(armored_signature)?);
has_signed_any = true;
}
if has_signed_any {
Ok(())
} else {
Err(BaseError::NoSignatureAdded.into())
}
}
/// Verify the keychain and certificates using either a Key ID or an OpenPGP card.
///
/// # Errors
///
/// The method may error if no certificate could be verified or if any singatures are invalid.
pub fn verify_signatures(
&self,
certs: &[Cert],
verification_policy: &PayloadVerification,
fingerprint: Option<Fingerprint>,
) -> Result<&serde_json::Value, Box<dyn std::error::Error>> {
let policy = openpgp::policy::StandardPolicy::new();
let validated_cert = find_matching_certificate(fingerprint, certs, &policy)?;
let (certs, invalid_certs) = validate_cross_signed_certs(&validated_cert, certs, &policy)?;
if !invalid_certs.is_empty() {
return Err(BaseError::UntrustedCertificates(
validated_cert.fingerprint(),
invalid_certs.iter().map(Cert::fingerprint).collect(),
))?;
}
let hashed = hash(serde_json::to_value(self)?)?;
let PayloadVerification {
mut threshold,
error_on_invalid,
error_on_missing_key,
one_each,
} = *verification_policy;
let mut matches = 0;
if one_each {
threshold = certs.len() as u8;
}
let mut seen = std::collections::HashMap::new();
for (index, signature) in self.signatures.iter().enumerate() {
dbg!(&index);
let packet = Packet::from_bytes(signature.as_bytes())?;
let Packet::Signature(signature) = packet else {
panic!("bad packet found: {}", packet.tag());
};
let mut signature_matched = false;
// NOTE: It is allowable, by the specification, to have a packet that doesn't include
// an issuer fingerprint, but instead just a key ID. However, filtering by both key ID
// and by fingerprint triggers the "duplicate signature" mechanism. For that reason, we
// are only going to filter over fingerprints.
//
// Any program that makes these signatures should be using fingerprints.
for issuer in signature.issuer_fingerprints() {
let mut currently_seen = std::collections::HashMap::new();
for cert in &certs {
if let Some(seen_index) = seen.get(&cert.fingerprint()) {
return Err(BaseError::DuplicateSignature(
cert.fingerprint(),
index,
*seen_index,
)
.into());
}
match cert
.with_policy(&policy, None)?
.keys()
.alive()
.for_signing()
.key_handle(issuer.clone())
.next()
.map(|signing_key| signature.verify_hash(&signing_key, hashed.clone()))
{
Some(Ok(())) => {
// key found, signature matched
signature_matched = true;
// mark the cert as seen, so it isn't reusable
currently_seen.insert(cert.fingerprint(), index);
}
Some(Err(e)) => {
if error_on_invalid {
return Err(e)?;
}
}
None => {
// key not found, but we have more certs to go through
}
}
}
seen.extend(currently_seen);
}
if signature_matched {
matches += 1;
} else if error_on_missing_key {
return Err(
BaseError::NoPublicKeyMatchedSignature.with_policy(verification_policy)
)?;
}
}
if matches < threshold {
return Err(
BaseError::NotEnoughSignatures(matches, threshold).with_policy(verification_policy)
)?;
}
Ok(&self.values)
}
pub fn into_values(self) -> (String, String, serde_json::Value) {
let Payload {
workflow, values, ..
} = self;
let [module, workflow] = workflow;
(module, workflow, values)
}
}
fn find_matching_certificate(
fingerprint: Option<Fingerprint>,
certs: &[Cert],
policy: &sequoia_openpgp::policy::StandardPolicy<'_>,
) -> Result<Cert, Box<dyn std::error::Error>> {
if let Some(fingerprint) = fingerprint {
Ok(certs
.iter()
.find(|cert| cert.fingerprint() == fingerprint)
.ok_or(BaseError::FingerprintNotFound(fingerprint))?
.clone())
} else {
let mut any_smartcard = false;
for backend in card_backend_pcsc::PcscBackend::cards(None)? {
any_smartcard = true;
let mut card = Card::<Open>::new(backend?)?;
let mut transaction = card.transaction()?;
let signing_fingerprint = transaction
.fingerprint(openpgp_card::KeyType::Signing)?
.expect("smartcard signing key is unavailable");
for cert in certs {
let valid_cert = cert.with_policy(policy, None)?;
// NOTE: We must verify that it is for_signing because back signatures
// mean that the signing key verifies the certificate.
//
// We don't want a certificate to be able to adopt, for example, an encryption key
// because that means there is no back signature and the encryption key can be
// adopted onto a malicious certificate.
for key in valid_cert.keys().alive().for_signing() {
let fpr = key.fingerprint();
if fpr.as_bytes() == signing_fingerprint.as_bytes() {
return Ok(cert.clone());
}
}
}
}
if any_smartcard {
Err(BaseError::NoCertMatchedSmartcard.into())
} else {
Err(BaseError::NoSmartcard.into())
}
}
}
/// Validate that `certs` are signed by `validated_cert`, either by a signature directly upon the
/// primary key of that certificate, or a signature on a user ID of the certificate.
///
/// Returns a list of trusted certs and a list of untrusted certs.
fn validate_cross_signed_certs(
validated_cert: &Cert,
certs: &[Cert],
policy: &sequoia_openpgp::policy::StandardPolicy,
) -> Result<(Vec<Cert>, Vec<Cert>), Box<dyn std::error::Error>> {
let our_pkey = validated_cert.primary_key();
let mut verified_certs = vec![validated_cert.clone()];
let mut unverified_certs = vec![];
for cert in certs
.iter()
.filter(|cert| cert.fingerprint() != validated_cert.fingerprint())
{
let mut has_valid_userid_signature = false;
let cert_pkey = cert.primary_key();
// check signatures on User IDs
let userids = cert
.userids()
.map(|ua| (ua.certifications(), ua.userid().clone()));
for (signatures, userid) in userids {
for signature in signatures {
if signature
.verify_userid_binding(&our_pkey, &*cert_pkey, &userid)
.is_ok()
{
has_valid_userid_signature = true;
}
}
}
// check signatures on the primary key itself
let has_valid_direct_signature = cert_pkey
.active_certifications_by_key(policy, None, &***our_pkey.role_as_unspecified())
.next()
.is_some();
if has_valid_userid_signature || has_valid_direct_signature {
verified_certs.push(cert.clone());
} else {
unverified_certs.push(cert.clone());
}
}
Ok((verified_certs, unverified_certs))
}

View File

@ -1,117 +0,0 @@
use clap::Parser;
use miniquorum::{Payload, PayloadVerification};
use sequoia_openpgp::Fingerprint;
use std::{fs::File, path::PathBuf};
#[derive(clap::Parser)]
/// An Icepick-specific subset of the Quorum decision-making system.
enum MiniQuorum {
/// Verify signatures on an Icepick Payload file.
VerifySignatures {
/// The file containing OpenPGP Certificates used for verifying signatures.
keyring_file: PathBuf,
/// The file provided as input.
///
/// If no file is passed, standard input is used.
input_file: Option<PathBuf>,
/// An OpenPGP Fingerprint to use in place of on-smartcard certificate detection.
///
/// This functionality is only recommended if verifying a payload without the physical
/// presence of any signer, and builds a web of trust from the signer fingerprint provided.
#[arg(long)]
fingerprint: Option<Fingerprint>,
/// The file to write the resulting payload to, if verification is successful.
#[arg(long)]
output_file: Option<PathBuf>,
},
/// Add a signature to an Icepick Payload file.
AddSignature {
/// The file to use as input.
///
/// If no file is provided, standard input is used. If a file is provided and no output
/// file is provided, it will be used in-place as the output file with the additional
/// signature added.
input_file: Option<PathBuf>,
/// The file to use as output.
///
/// If no file is provided, but an input file is provided, the input file is used. If no
/// input file is provided, standard output is used.
#[arg(long)]
output_file: Option<PathBuf>,
},
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
match MiniQuorum::parse() {
MiniQuorum::VerifySignatures {
keyring_file,
input_file,
fingerprint,
output_file,
} => {
assert_ne!(
input_file, output_file,
"output is verified data; not overwriting signed input data"
);
let (payload, certs) = match input_file {
Some(input_file) => Payload::load(&input_file, &keyring_file)?,
None => {
let stdin = std::io::stdin();
let keyring_file = File::open(&keyring_file)?;
Payload::from_readers(stdin, keyring_file)?
}
};
let policy = PayloadVerification::new().with_threshold(certs.len().try_into()?);
payload.verify_signatures(&certs, &policy, fingerprint)?;
if let Some(output_file) = output_file {
let file = File::create(output_file)?;
serde_json::to_writer_pretty(file, &payload)?;
} else {
let stdout = std::io::stdout();
serde_json::to_writer_pretty(stdout, &payload)?;
}
}
MiniQuorum::AddSignature {
input_file,
output_file,
} => {
let mut payload: Payload = match &input_file {
Some(input_file) => {
let input_file = File::open(input_file)?;
serde_json::from_reader(input_file)?
}
None => {
let stdin = std::io::stdin();
serde_json::from_reader(stdin)?
}
};
payload.add_signature()?;
if let Some(output_file) = output_file {
// write to output
let file = File::create(output_file)?;
serde_json::to_writer_pretty(file, &payload)?;
} else if let Some(input_file) = input_file {
// write to tempfile, move to input_file
let output_file = input_file.with_extension("tmp");
let mut file = File::create_new(&output_file)?;
serde_json::to_writer_pretty(&mut file, &payload)?;
drop(file);
std::fs::copy(&output_file, input_file)?;
std::fs::remove_file(output_file)?;
} else {
// write to standard output?
println!("{}", serde_json::to_string_pretty(&payload)?);
}
}
}
Ok(())
}

View File

@ -1,26 +0,0 @@
[package]
name = "spacemesh-api-client"
version = "0.1.0"
edition = "2021"
publish = ["distrust"]
[dependencies]
futures = "0.3"
progenitor-client = { git = "https://github.com/geoffreygarrett/progenitor", rev = "8726ea91eb19f92e1357f1ceeeab507477dcfeb6" }
reqwest = { version = "0.11", features = ["json", "stream"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
chrono = { version = "0.4", features = ["serde"] }
base64 = "0.22.1"
smex = { version = "0.1.0", registry = "distrust" }
[build-dependencies]
prettyplease = "0.2.22"
progenitor = { git = "https://github.com/geoffreygarrett/progenitor", rev = "8726ea91eb19f92e1357f1ceeeab507477dcfeb6" }
serde_json = "1.0"
syn = "2.0"
[dev-dependencies]
base64 = "0.22.1"
smex = { version = "0.1.0", registry = "distrust" }
tokio = { version = "1.43.0", features = ["macros", "net", "rt", "test-util"] }

View File

@ -1,22 +0,0 @@
/*
curl -X 'GET' \
'https://converter.swagger.io/api/convert?url=https%3A%2F%2Fmainnet-api-docs.spacemesh.network%2Fv1.7.12%2Fapi.swagger.json' \
-H 'accept: application/json'
*/
fn main() {
let src = "openapi.json";
println!("cargo:rerun-if-changed={}", src);
let file = std::fs::File::open(src).unwrap();
let spec = serde_json::from_reader(file).unwrap();
let mut generator = progenitor::Generator::default();
let tokens = generator.generate_tokens(&spec).unwrap();
let ast = syn::parse2(tokens).unwrap();
let content = prettyplease::unparse(&ast);
let mut out_file = std::path::Path::new(&std::env::var("OUT_DIR").unwrap()).to_path_buf();
out_file.push("codegen.rs");
std::fs::write(out_file, content).unwrap();
}

File diff suppressed because one or more lines are too long

View File

@ -1,42 +0,0 @@
#![allow(warnings, unused)]
include!(concat!(env!("OUT_DIR"), "/codegen.rs"));
// NOTE: The RPC API requires base64-encoded transaction IDs rather than hex-encoded.
// That was confusing, after all their branding is `0x` based.
pub fn encode_transaction_id(txid: impl AsRef<str>) -> Result<String, smex::DecodeError> {
use base64::prelude::*;
let tx = smex::decode(txid)?;
Ok(BASE64_STANDARD.encode(tx))
}
#[cfg(test)]
mod tests {
use super::*;
use base64::prelude::*;
#[tokio::test]
async fn it_works() {
let client = Client::new("https://mainnet-api.spacemesh.network");
let txid = "638442a2033f20b5a7280b9a4f2bfc73022f6e7ec64b1497b85335444381d99d";
let txid = smex::decode(txid).unwrap();
let txid = BASE64_STANDARD.encode(txid);
let result = client
.transaction_service_list(&types::Spacemeshv2alpha1TransactionRequest {
txid: vec![txid],
limit: Some(100.to_string()),
..Default::default()
})
.await
.unwrap()
.into_inner();
let result = match result {
types::GooglerpcStatusOrSpacemeshv2alpha1TransactionList::GooglerpcStatus(googlerpc_status) => panic!("{:?}", googlerpc_status.message),
types::GooglerpcStatusOrSpacemeshv2alpha1TransactionList::Spacemeshv2alpha1TransactionList(transaction_list) => {
transaction_list
},
};
}
}

View File

@ -1,12 +0,0 @@
[package]
name = "spacemesh-codec"
version = "0.1.0"
edition = "2021"
publish = ["distrust"]
[dependencies]
parity-scale-codec = { version = "3.6.12", features = ["derive"] }
[dev-dependencies]
base64 = "0.22.1"
bech32 = "0.11.0"

View File

@ -1,530 +0,0 @@
//! Spacemesh transaction encoding and decoding.
//! Based loosely on: <https://github.com/spacemeshos/sm-codec/>.
//!
//! # Encoding Transactions
//!
//! ```rust
//! let principal = [0u8; 24];
//! let destination = [1u8; 24];
//!
//! let single_sig_spend = Spend {
//! header: TxHeader {
//! principal,
//! },
//! payload: SpendPayload {
//! nonce: Compact(2),
//! gas_price: Compact(1),
//! arguments: SpendArguments {
//! destination,
//! amount: Compact(100000),
//! },
//! },
//! // unsigned transaction
//! signature: [0; 64],
//! };
//! ```
//!
//! # Decoding Transactions
//!
//! Transactions can be decoded to bytes using the [`base64`][base64] crate. Using the Spacemesh
//! client, the transaction should also include `template` and `method` values. With those values,
//! [`tx_types::decode_by_address_and_method()`] can be used to attempt to parse the transaction.
//!
//! ```rust
//! use base64::prelude::*;
//!
//! let encoded_tx = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAIBAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAYIaBgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
//! let raw_tx = BASE64_STANDARD.decode(encoded_tx).unwrap();
//! let spend = tx_types::single_signature::Spend::decode(&mut &raw_tx[..]).unwrap();
//! ```
//!
//! [base64]: https://docs.rs/base64/latest/base64/
pub use parity_scale_codec::{Compact, Decode, Encode};
pub mod constants {
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/codecs/constants.ts
/// The length of an address.
pub const ADDRESS_BYTES_LENGTH: usize = 24;
}
pub mod core {
use super::*;
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/codecs/core.ts
// NOTE: Encoding an array doesn't encode length, matching the same functionality
// as Bytes in scale-ts.
pub type Address = [u8; constants::ADDRESS_BYTES_LENGTH];
pub type PublicKey = [u8; 32];
pub type Nonce = Compact<u64>;
pub type GasPrice = Compact<u64>;
}
pub mod signatures {
use super::*;
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/codecs/signatures.ts
pub type SingleSig = [u8; 64];
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct MultiSigPart {
pub r#ref: Compact<u8>,
pub sig: SingleSig,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct MultiSig {
pub parts: Vec<MultiSigPart>,
}
impl Encode for MultiSig {
fn size_hint(&self) -> usize {
self.parts.len() * std::mem::size_of::<SingleSig>()
}
fn encode(&self) -> Vec<u8> {
// NOTE: No inline length is included.
let mut r = Vec::with_capacity(self.size_hint());
for sig in &self.parts {
sig.encode_to(&mut r);
}
r
}
}
impl Decode for MultiSig {
fn decode<I: parity_scale_codec::Input>(
input: &mut I,
) -> Result<Self, parity_scale_codec::Error> {
let mut parts = vec![];
// NOTE: We can't rely on the length of the input. It may not be available.
// Unfortunately, we also don't have enough context to know if the reason it can't
// decode is because we ran out of input, or because there was a format error.
while let Ok(part) = MultiSigPart::decode(input) {
parts.push(part);
}
Ok(Self { parts })
}
}
}
pub mod tx {
use super::*;
pub trait TransactionMethod {
fn method_selector() -> u8;
}
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/codecs/tx.ts
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TxHeader<const M: u8> {
// should always be 0
// pub transaction_type: Compact<u8>,
pub principal: core::Address,
// covered by const M
// pub method_selector: Compact<u8>,
}
impl<const M: u8> Encode for TxHeader<M> {
fn encode(&self) -> Vec<u8> {
let mut r = Vec::with_capacity(self.size_hint());
let transaction_type = Compact(0u8);
transaction_type.encode_to(&mut r);
self.principal.encode_to(&mut r);
let method_selector = Compact(M);
method_selector.encode_to(&mut r);
r
}
}
impl<const M: u8> Decode for TxHeader<M> {
fn decode<I: parity_scale_codec::Input>(
input: &mut I,
) -> Result<Self, parity_scale_codec::Error> {
let transaction_type = Compact::<u8>::decode(input)?;
if transaction_type.0 != 0 {
return Err("transaction_type != 0".into());
}
let principal = core::Address::decode(input)?;
let method_selector = Compact::<u8>::decode(input)?;
if method_selector.0 != M {
return Err("method_selector != M".into());
}
Ok(Self {
principal,
})
}
}
// NOTE: This is used in place of `withTemplateAddress()`.
// The original source implementation placed `template_address` as the last field,
// but I don't think that's correct based on the implementation of `withTemplateAddress()`.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SpawnTxHeader<const M: u8> {
pub template_address: core::Address,
// should always be 0
// pub transaction_type: Compact<u8>,
pub principal: core::Address,
// covered by const M
// pub method_selector: Compact<u8>,
}
impl<const M: u8> Encode for SpawnTxHeader<M> {
fn encode(&self) -> Vec<u8> {
let mut r = Vec::with_capacity(self.size_hint());
self.template_address.encode_to(&mut r);
let transaction_type = Compact(0u8);
transaction_type.encode_to(&mut r);
self.principal.encode_to(&mut r);
let method_selector = Compact(M);
method_selector.encode_to(&mut r);
r
}
}
impl<const M: u8> Decode for SpawnTxHeader<M> {
fn decode<I: parity_scale_codec::Input>(
input: &mut I,
) -> Result<Self, parity_scale_codec::Error> {
let template_address = core::Address::decode(input)?;
let transaction_type = Compact::<u8>::decode(input)?;
if transaction_type.0 != 0 {
return Err("transaction_type != 0".into());
}
let principal = core::Address::decode(input)?;
let method_selector = Compact::<u8>::decode(input)?;
if method_selector.0 != M {
return Err("method_selector != M".into());
}
Ok(Self {
template_address,
principal,
})
}
}
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/transaction.ts
mod sealed {
use super::signatures;
pub trait Signature {}
impl Signature for signatures::SingleSig {}
impl Signature for signatures::MultiSig {}
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpawnTransaction<Payload, Signature: sealed::Signature, const M: u8> {
pub header: SpawnTxHeader<M>,
pub payload: Payload,
pub signature: Signature,
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct Transaction<Payload, Signature: sealed::Signature, const M: u8> {
pub header: TxHeader<M>,
pub payload: Payload,
pub signature: Signature,
}
}
pub mod tx_types {
use super::*;
pub type DecodeResult<T> = Option<Result<T, parity_scale_codec::Error>>;
pub mod common {
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/std/common.ts
use super::*;
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct TxPayload<Arguments> {
pub nonce: core::Nonce,
pub gas_price: core::GasPrice,
pub arguments: Arguments,
}
}
pub mod vault {
use super::*;
use common::TxPayload;
use signatures::SingleSig;
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/std/vault.ts
pub const VAULT_TEMPLATE_ADDRESS: core::Address = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4,
];
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpawnArguments {
pub owner: core::Address,
pub total_amount: Compact<u64>,
pub initial_unlock_amount: Compact<u64>,
pub vesting_start: Compact<u32>,
pub vesting_end: Compact<u32>,
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpendArguments {
pub destination: core::Address,
pub amount: Compact<u64>,
}
pub type SpawnPayload = TxPayload<SpawnArguments>;
pub type SpendPayload = TxPayload<SpendArguments>;
pub type Spawn = tx::SpawnTransaction<SpawnPayload, SingleSig, 0>;
pub type Spend = tx::Transaction<SpendPayload, SingleSig, 16>;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Method {
Spawn(Spawn),
Spend(Spend),
}
pub fn decode_by_method(method: u8, input: &[u8]) -> DecodeResult<Method> {
match method {
0 => Some(Spawn::decode(&mut &*input).map(Method::Spawn)),
16 => Some(Spend::decode(&mut &*input).map(Method::Spend)),
_ => None,
}
}
}
pub mod vesting {
use super::*;
use common::TxPayload;
use signatures::MultiSig;
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/std/vesting.ts
pub const VESTING_TEMPLATE_ADDRESS: core::Address = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3,
];
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpawnArguments {
pub required: Compact<u8>,
pub public_keys: Vec<core::PublicKey>,
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpendArguments {
pub destination: core::Address,
pub amount: Compact<u64>,
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct DrainVaultArguments {
pub vault: core::Address,
pub destination: core::Address,
pub amount: Compact<u64>,
}
pub type SpawnPayload = TxPayload<SpawnArguments>;
pub type SpendPayload = TxPayload<SpendArguments>;
pub type DrainVaultPayload = TxPayload<DrainVaultArguments>;
pub type Spawn = tx::SpawnTransaction<SpawnPayload, MultiSig, 0>;
pub type Spend = tx::Transaction<SpendPayload, MultiSig, 16>;
pub type DrainVault = tx::Transaction<DrainVaultPayload, MultiSig, 17>;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Method {
Spawn(Spawn),
Spend(Spend),
DrainVault(DrainVault),
}
pub fn decode_by_method(method: u8, input: &[u8]) -> DecodeResult<Method> {
match method {
0 => Some(Spawn::decode(&mut &*input).map(Method::Spawn)),
16 => Some(Spend::decode(&mut &*input).map(Method::Spend)),
17 => Some(DrainVault::decode(&mut &*input).map(Method::DrainVault)),
_ => None,
}
}
}
pub mod single_signature {
use super::*;
use common::TxPayload;
use signatures::SingleSig;
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/std/singlesig.ts
pub const SINGLE_SIG_TEMPLATE_ADDRESS: core::Address = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
];
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpawnArguments {
pub public_key: core::PublicKey,
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpendArguments {
pub destination: core::Address,
pub amount: Compact<u64>,
}
pub type SpawnPayload = TxPayload<SpawnArguments>;
pub type SpendPayload = TxPayload<SpendArguments>;
pub type Spawn = tx::SpawnTransaction<SpawnPayload, SingleSig, 0>;
pub type Spend = tx::Transaction<SpendPayload, SingleSig, 16>;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Method {
Spawn(Spawn),
Spend(Spend),
}
pub fn decode_by_method(method: u8, input: &[u8]) -> DecodeResult<Method> {
match method {
0 => Some(Spawn::decode(&mut &*input).map(Method::Spawn)),
16 => Some(Spend::decode(&mut &*input).map(Method::Spend)),
_ => None,
}
}
}
pub mod multi_signature {
use super::*;
use common::TxPayload;
use signatures::MultiSig;
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/std/singlesig.ts
pub const MULTI_SIG_TEMPLATE_ADDRESS: core::Address = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2,
];
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpawnArguments {
pub required: Compact<u8>,
pub public_key: Vec<core::PublicKey>,
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpendArguments {
pub destination: core::Address,
pub amount: Compact<u64>,
}
pub type SpawnPayload = TxPayload<SpawnArguments>;
pub type SpendPayload = TxPayload<SpendArguments>;
pub type Spawn = tx::SpawnTransaction<SpawnPayload, MultiSig, 0>;
pub type Spend = tx::Transaction<SpendPayload, MultiSig, 16>;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Method {
Spawn(Spawn),
Spend(Spend),
}
pub fn decode_by_method(method: u8, input: &[u8]) -> DecodeResult<Method> {
match method {
0 => Some(Spawn::decode(&mut &*input).map(Method::Spawn)),
16 => Some(Spend::decode(&mut &*input).map(Method::Spend)),
_ => None,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ModuleMethod {
Vault(vault::Method),
Vesting(vesting::Method),
SingleSig(single_signature::Method),
MultiSig(multi_signature::Method),
}
#[rustfmt::skip]
pub fn decode_by_address_and_method(
address: core::Address,
method: u8,
input: &[u8],
) -> DecodeResult<ModuleMethod> {
match address {
vault::VAULT_TEMPLATE_ADDRESS =>
vault::decode_by_method(method, input)
.map(|method| method.map(ModuleMethod::Vault)),
vesting::VESTING_TEMPLATE_ADDRESS =>
vesting::decode_by_method(method, input)
.map(|method| method.map(ModuleMethod::Vesting)),
single_signature::SINGLE_SIG_TEMPLATE_ADDRESS => {
single_signature::decode_by_method(method, input)
.map(|method| method.map(ModuleMethod::SingleSig))
}
multi_signature::MULTI_SIG_TEMPLATE_ADDRESS => {
multi_signature::decode_by_method(method, input)
.map(|method| method.map(ModuleMethod::MultiSig))
}
_ => {
unimplemented!()
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
use base64::prelude::*;
use bech32::Bech32;
let (hrp, data) =
bech32::decode("sm1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqg56ypy7").unwrap();
assert_eq!(hrp.as_str(), "sm");
assert_eq!(
&data,
&tx_types::single_signature::SINGLE_SIG_TEMPLATE_ADDRESS
);
let encoded_tx = "AAAAAAAvqmgSN6hBGS16FVNfNDURojTRU0AQBAAAAABJThXbKEnjnty59ht5e/5EkjDK8AeANolPDOAiIHlzj7CIG60FzFRpuR/fLVRQsmzRbApYBryfg4RKcnZgmmWPywafADHyuVjkLNGup0gpvhnXAHICeSXveAs=";
let raw_tx = BASE64_STANDARD.decode(encoded_tx).unwrap();
let spend = tx_types::single_signature::Spend::decode(&mut &raw_tx[..]).unwrap();
let equivalence = spend.encode();
assert_eq!(raw_tx, equivalence);
let recipient_address =
bech32::encode::<Bech32>(hrp, &spend.payload.arguments.destination).unwrap();
assert_eq!(
recipient_address,
"sm1qqqqqqzffc2ak2zfuw0dew0krduhhljyjgcv4uqdt6nrd"
);
}
#[test]
fn recode() {
use tx::*;
use tx_types::single_signature::*;
let principal = [0u8; 24];
let single_sig_spend = Spend {
header: TxHeader {
principal,
},
payload: SpendPayload {
nonce: Compact(2),
gas_price: Compact(1),
arguments: SpendArguments {
destination: [1; 24],
amount: Compact(100000),
},
},
signature: [0; 64],
};
let encoded = single_sig_spend.encode();
let recoded = Spend::decode(&mut &*encoded).unwrap();
assert_eq!(single_sig_spend, recoded);
}
}

View File

@ -1,16 +0,0 @@
[package]
name = "spacemesh"
version = "0.1.0"
edition = "2021"
publish = ["distrust"]
[dependencies]
bech32 = "0.11.0"
spacemesh-api-client = { version = "0.1.0", path = "../api-client" }
spacemesh-codec = { version = "0.1.0", path = "../codec" }
[dev-dependencies]
base64 = "0.22.1"
bech32 = "0.11.0"
smex = { version = "0.1.0", registry = "distrust" }
tokio = { version = "1.43.0", features = ["net", "rt", "macros"] }

View File

@ -1,58 +0,0 @@
pub use spacemesh_api_client as client;
pub use spacemesh_api_client::Client;
pub use spacemesh_codec as codec;
pub use spacemesh_codec::tx_types as transaction;
pub mod wallet;
pub mod bech32 {
pub use bech32::*;
pub fn encode(hrp: Hrp, input: &[u8]) -> Result<String, EncodeError> {
bech32::encode::<Bech32>(hrp, input)
}
}
#[cfg(test)]
mod tests {
use super::*;
use base64::prelude::*;
use spacemesh_api_client::*;
#[tokio::test]
async fn decodes_live_transaction() {
let client = Client::new("https://mainnet-api.spacemesh.network");
let txid = "638442a2033f20b5a7280b9a4f2bfc73022f6e7ec64b1497b85335444381d99d";
let txid = smex::decode(txid).unwrap();
let txid = BASE64_STANDARD.encode(txid);
let result = client
.transaction_service_list(&types::Spacemeshv2alpha1TransactionRequest {
txid: vec![txid],
limit: Some(100.to_string()),
..Default::default()
})
.await
.unwrap()
.into_inner();
let mut result = match result {
types::GooglerpcStatusOrSpacemeshv2alpha1TransactionList::GooglerpcStatus(googlerpc_status) => panic!("{:?}", googlerpc_status.message),
types::GooglerpcStatusOrSpacemeshv2alpha1TransactionList::Spacemeshv2alpha1TransactionList(transaction_list) => {
transaction_list
},
};
let tx = result.transactions.pop().unwrap().tx.unwrap();
let (_hrp, address) = bech32::decode(&tx.template.unwrap()).unwrap();
let tx_raw = BASE64_STANDARD.decode(tx.raw.unwrap()).unwrap();
let decoded = transaction::decode_by_address_and_method(
address.try_into().unwrap(),
tx.method.unwrap() as u8,
&tx_raw,
)
.unwrap()
.unwrap();
drop(decoded);
}
}

View File

@ -1,77 +0,0 @@
//! Spacemesh wallet management.
pub use crate::codec::core::Address;
use crate::codec::tx::*;
use crate::codec::Compact;
use crate::transaction::single_signature;
const ADDRESS_RESERVED: usize = 4;
mod sealed {
pub trait Sealed {}
}
pub trait AsAddress: sealed::Sealed {
fn as_address(&self) -> Address;
}
impl sealed::Sealed for Address {}
impl AsAddress for Address {
#[inline(always)]
fn as_address(&self) -> Address {
*self
}
}
impl sealed::Sealed for [u8; 32] {}
impl AsAddress for [u8; 32] {
#[inline(always)]
fn as_address(&self) -> Address {
let mut output = [0u8; std::mem::size_of::<Address>()];
const START: usize = 32 - std::mem::size_of::<Address>() + ADDRESS_RESERVED;
output[ADDRESS_RESERVED..].copy_from_slice(
&self[START..],
);
output
}
}
pub fn spawn(principal: [u8; 32], nonce: u64, gas_price: u64) -> single_signature::Spawn {
single_signature::Spawn {
header: SpawnTxHeader {
principal: principal.as_address(),
template_address: single_signature::SINGLE_SIG_TEMPLATE_ADDRESS,
},
payload: single_signature::SpawnPayload {
nonce: Compact(nonce),
gas_price: Compact(gas_price),
arguments: single_signature::SpawnArguments {
public_key: principal,
},
},
signature: [0u8; 64],
}
}
pub fn transfer(
principal: impl AsAddress,
recipient: impl AsAddress,
amount: u64,
nonce: u64,
gas_price: u64,
) -> single_signature::Spend {
single_signature::Spend {
header: TxHeader {
principal: principal.as_address(),
},
payload: single_signature::SpendPayload {
nonce: Compact(nonce),
gas_price: Compact(gas_price),
arguments: single_signature::SpendArguments {
destination: recipient.as_address(),
amount: Compact(amount),
},
},
signature: [0u8; 64],
}
}

View File

@ -44,12 +44,8 @@ RUN <<EOF
cargo fetch --locked
cargo build --frozen --release --target x86_64-unknown-linux-musl --bin icepick
cargo build --frozen --release --target x86_64-unknown-linux-musl --bin icepick-sol
cargo build --frozen --release --target x86_64-unknown-linux-musl --bin icepick-internal
cp /app/target/x86_64-unknown-linux-musl/release/icepick /usr/bin
cp /app/target/x86_64-unknown-linux-musl/release/icepick-internal /usr/bin
cp /app/target/x86_64-unknown-linux-musl/release/icepick-sol /usr/bin
EOF
ENV ICEPICK_DATA_DIRECTORY=/data
WORKDIR /

View File

@ -13,11 +13,15 @@ from_address="$(jq -r .from_address /data/input.json)"
to_address="$(jq -r .to_address /data/input.json)"
token_name="$(jq -r .token_name /data/input.json)"
token_amount="$(jq -r .token_amount /data/input.json)"
blockhash="$(jq -r .blockhash /data/input.json)"
token_address="$(icepick sol get-token-info "$token_name" | jq -r .blob.token_address)"
token_decimals="$(icepick sol get-token-info "$token_name" | jq -r .blob.token_decimals)"
jq . /data/input.json
echo "Do these values look correct? If not, press ctrl-c. Otherwise, press Enter."
read -r _
read _
echo "Creating and signing transaction"
icepick workflow sol transfer-token --from-address "$from_address" --to-address "$to_address" --token-name "$token_name" --token-amount "$token_amount"
icepick sol transfer-token "$token_amount" "$token_address" "$to_address" "$from_address" "$token_decimals" | icepick sol sign "$blockhash" > /data/output.json.tmp
mv /data/output.json.tmp /data/output.json

View File

@ -1,27 +1,32 @@
printf "%s" "Public key of the sender address: "
read -r from_address
read from_address
printf "%s" "Public key of the recipient address: "
read -r to_address
printf "%s" "Public ey of the nonce account: "
read -r nonce_address
read to_address
printf "%s" "Name of the token to transfer: "
read -r token_name
read token_name
printf "%s" "Amount of token to transfer: "
read -r token_amount
read token_amount
echo "Saving inputs to file"
echo "Acquiring blockhash..."
blockhash="$(icepick sol get-blockhash --cluster devnet | jq -r .blob.blockhash)"
echo "Saving information to file"
cat <<EOF > /data/input.json
{
"from_address": "$from_address",
"to_address": "$to_address",
"token_name": "$token_name",
"token_amount": "$token_amount"
"token_amount": "$token_amount",
"blockhash": "$blockhash"
}
EOF
icepick workflow sol broadcast --cluster devnet --nonce-address "$nonce_address"
echo "Waiting for signed transaction..."
while test ! -f /data/output.json; do sleep 1; done
echo "Broadcasting transaction"
icepick sol broadcast --cluster devnet < /data/output.json

View File

@ -3,12 +3,87 @@ name = "sol"
derivation_prefix = "m/44'/501'/0'"
algorithm = "Ed25519"
[[module]]
name = "cosmos"
derivation_prefix = "m/44'/118'/0'"
algorithm = "Secp256k1"
[[module.workflow]]
# The name of the workflow, which can be called by:
# `icepick workflow sol transfer-token`
name = "transfer-token"
[[module]]
name = "spacemesh"
derivation_prefix = "m/44'/540'/0'/0'"
algorithm = "Ed25519"
# These values are used as inputs for other workflows, acquired from the CLI.
# These values can only be strings, but other values can be any value that can
# be serialized by serde_json::Value.
# These values can also be loaded using "internal-load-file", using some form
# of later-defined signature validation.
inputs = ["from_address", "to_address", "token_name", "token_amount"]
## Load the Blockhash from the SD card
#[[module.workflow.step]]
#type = "internal-load-file"
#
## Pre-defined values to be passed to the module
#values = { filename = "blockhash.json" }
#
## This value is marked to be saved in-memory, and can be used as an input for
## later steps.
#outputs = { blockhash = "blockhash" }
# Get the token address and token decimals for the given token
[[module.workflow.step]]
type = "sol-get-token-info"
# The key is the key that is passed to the program in the
# `values` field. The value is the item in storage. In this case,
# we read a `token-name` from our input, but the operation expects `token`.
inputs = { token= "token_name" }
# Because these two fields are currently unused in our storage, we can grab
# them from the outputs of our module. The key is the key of the output value
# we want to store, and the value is the name to be assigned in storage.
outputs = { token_address = "token_address", token_decimals = "token_decimals" }
[[module.workflow.step]]
# Generate an unsigned Transaction
type = "sol-transfer-token"
# If using a lot of inputs, it may be best to use a non-inline table.
# Non-inline tables _must_ be the last step, as otherwise, `outputs` for
# example would be considered a member of `inputs`. In this case, we use a
# non-inline table for `outputs` even though it would fit on one line, to avoid
# the ambiguity.
[module.workflow.step.inputs]
amount = "token_amount"
token_address = "token_address"
decimals = "token_decimals"
to_address = "to_address"
from_address = "from_address"
[module.workflow.step.outputs]
transaction = "unsigned_transaction"
# Get a blockhash
[[module.workflow.step]]
type = "sol-get-blockhash"
outputs = { blockhash = "blockhash" }
# Sign the transaction
[[module.workflow.step]]
type = "sol-sign"
[module.workflow.step.inputs]
transaction = "unsigned_transaction"
blockhash = "blockhash"
[module.workflow.step.outputs]
transaction = "signed_transaction"
## Write the signed transaction to a file
#[[module.workflow.step]]
#type = "internal-save-file"
#
## We are using a static filename here, so we use `values` instead of `inputs`.
#values = { filename = "transaction.json" }
#
## All fields in both `inputs` and `values`, other than `filename`, will be
## persisted to the file. In this case, the `transaction` field of the file will
## contain the signed transaction.
#inputs = { transaction = "signed_transaction" }

View File

@ -1,34 +0,0 @@
mnemonics:
keyfork: ENC[AES256_GCM,data:kz2vAo1XMCylVY6WtDfZ9Z0xKvccLRrOvfP2x0IJtJkRu3HmShTEzPlrTfRXrKcuxLqqJlxOnGPR7/Y7bPhRvH/nRj59Lz1SLocVl8UVq9YXsIpgymLJ0Hp2I6XUBuItOhGonvc61iAe7cXFTAO+T2VUMK0Tf40xoJcT2eBC9qOjkC5xOrHTa+FBDFcvQdHcMobm+y7Nv1BzpzbODaA=,iv:m3p+sAgZjQReM3YAld6n1uKppkQSn51IgQGsxlYHnn4=,tag:xrG7WLr9w4zE45TiHX6a8w==,type:str]
solana: ENC[AES256_GCM,data:5/OKpwkZT+Vf6AvTiVj7zafVoqiqkKwLRLwjIHA6MGbei0ssCWqxM8QAtka+BBNGGhe5SUTlr/nAqGfoiP0t6fwUyjxUnOgu,iv:8Ctui1cO/RCZAdtfjiCnqvYyINdOcMHZfIZD0nGj2Kg=,tag:5ASiLG+hehhCYwdJ+1MZFg==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age: []
lastmodified: "2025-01-03T23:53:42Z"
mac: ENC[AES256_GCM,data:/WYbQmisG9jvUKvcmMwQWop0X3EbLbCurUDnoMoOKJ7XxKRpGOKh/GkqqSFgMzpC8A6X9Cmjqo4gatiYBBGjDi5PIk+9fGvUE0ZSc4p5p5+0RLu7KyxYeRtsmhEjUYJllVi9aDLJT4x+GXta91uTWZFUWExcZ1wJHh42wSFsbo4=,iv:ZLSH09zdyeDom5koxrS5KBCv2xd3cCEkJO4/hAmzUPM=,tag:AGtJTuWUEslY+uD7OFCM/g==,type:str]
pgp:
- created_at: "2025-01-03T23:53:41Z"
enc: |
-----BEGIN PGP MESSAGE-----
hQIMAw95Vf08z8oUAQ/8CC594sGEYJLHzFZy9UsExxy7CQl2q1SKQA8frZCU1cBu
CyIex99UgQzKTSUqttlz5hxqfyodvpoRfBiZUOcyfOgVgTPtDJ9UfByMbsMc0wy0
q8hErtNYhBmzWRway4xoAThJUrfi6jXl/m1doFVH6Ug0Q9qi56Eo8DYaUtsE+NFU
HjHslQpMLWm3uf/i2mQhftmwE00tWTVmBfBtuAycj5jLc3AJAveNvB5jK1O22c9N
PHhWeHQB6K3dQfTLS1O549oSfGTfrXXxq4cHYT9BZNHDi0T4/tH1xHwmLHOwnUiZ
i0tQ8CTYL8eALyKxj/BQQxbLXKpmor7Yli1QH1UWGw5AddvVqIz1zIyukHN/AGN7
E475zcvkc2uLPBwnZ3JS3n7e1X9TCa/iZlW/msEqmkLeh6eW47t8/p13yj0WnkCD
1SqA6qFEIcH8TaWqC03vLZG9ue2gSZ11db+3ZeGzqykUAG/4NR8ncD+qdhRbCZtp
ZPASpfZnByweyGVrnfMgR/sL+i8/C7KgCqj8pUOOS5Z5Av8DNMpNushPndhdHJDU
XAzNe2gu5StPvqqlH9wONvxiYJSmNy/dWnnvgwozvm9aPPCboYjmO9fwxsy0Zl+x
20Bb8G5nl6C6ZvToztzxKPzToxaX1x2MFwovqnHT2GACtZ6/tAmMjg3oCFd+k/PS
XgHFcFzyleUy9LF8Yb7DJcEDe3Tue2wvvY8XlNsIYeMnpfJ/TCq9Grzho1/w31uX
swHv2T4SnwFnoBQoXk8cSOMqrWK3XyWi0RI9X16m+rTGXZ13I8hggi/ne8QbMsI=
=szJ5
-----END PGP MESSAGE-----
fp: 8E401478A3FBEF72
unencrypted_suffix: _unencrypted
version: 3.7.3