Compare commits

..

No commits in common. "main" and "ryansquared/sol/durable-nonces" have entirely different histories.

50 changed files with 580 additions and 6423 deletions

1932
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -3,17 +3,9 @@
resolver = "2"
members = [
"crates/icepick",
"crates/icepick-workflow",
"crates/icepick-module",
"crates/builtins/icepick-internal",
"crates/builtins/icepick-ed25519",
"crates/by-chain/icepick-solana",
"crates/by-chain/icepick-cosmos",
"crates/miniquorum",
"crates/spacemesh/api-client",
"crates/spacemesh/codec",
"crates/spacemesh/spacemesh",
"crates/by-chain/icepick-spacemesh",
]
[workspace.dependencies]

View File

@ -1,13 +0,0 @@
[package]
name = "icepick-ed25519"
version = "0.1.0"
edition = "2021"
publish = ["distrust"]
[dependencies]
ed25519-dalek = "2.1.1"
icepick-module = { version = "0.1.0", path = "../../icepick-module" }
serde.workspace = true
serde_json.workspace = true
smex = { version = "0.1.0", registry = "distrust" }
thiserror = "2.0.9"

View File

@ -1,91 +0,0 @@
use ed25519_dalek::Signer;
use icepick_module::Module;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "operation", content = "values", rename_all = "kebab-case")]
pub enum Operation {
GetPubkey {},
Sign { message: Vec<u8> },
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Request {
derived_keys: Option<Vec<[u8; 32]>>,
#[serde(flatten)]
operation: Operation,
}
#[derive(thiserror::Error, Debug)]
pub enum Error {}
pub struct Ed25519;
impl Module for Ed25519 {
type Error = Error;
type Request = Request;
fn describe_operations() -> Vec<icepick_module::help::Operation> {
use icepick_module::help::*;
let message = Argument::builder()
.name("message")
.description("The message to sign, as an array of bytes.")
.r#type(ArgumentType::Required)
.build();
let get_pubkey = Operation::builder()
.name("get-pubkey")
.description("Get an Ed25519 public key from the provided private key.")
.build();
let sign = Operation::builder()
.name("sign")
.description("Sign a message using an Ed25519 private key.")
.build()
.argument(&message);
vec![get_pubkey, sign]
}
fn handle_request(request: Self::Request) -> Result<serde_json::Value, Self::Error> {
let Request {
derived_keys,
operation,
} = request;
match operation {
Operation::GetPubkey {} => {
let key = derived_keys
.iter()
.flatten()
.next()
.map(ed25519_dalek::SigningKey::from_bytes)
.unwrap();
let key = key.verifying_key().to_bytes();
Ok(serde_json::json!({
"blob": {
"pubkey": key,
}
}))
}
Operation::Sign { message } => {
let key = derived_keys
.iter()
.flatten()
.next()
.map(ed25519_dalek::SigningKey::from_bytes)
.unwrap();
let signature = key.sign(&message);
Ok(serde_json::json!({
"blob": {
"signature": signature.to_vec(),
}
}))
}
}
}
}

View File

@ -1,6 +0,0 @@
use icepick_module::Module;
use icepick_ed25519::Ed25519;
fn main() -> Result<(), Box<dyn std::error::Error>> {
Ed25519::run_responder()
}

View File

@ -1,16 +0,0 @@
[package]
name = "icepick-cosmos"
version = "0.1.0"
edition = "2021"
[dependencies]
bon = "3.3.2"
cosmrs = { version = "0.21.0", features = ["rpc", "tokio"] }
icepick-module = { version = "0.1.0", path = "../../icepick-module" }
serde.workspace = true
serde_json = { workspace = true, features = ["arbitrary_precision"] }
thiserror = "2.0.9"
tokio = { version = "1.43.0", features = ["rt"] }
[dev-dependencies]
cosmrs = { version = "0.21.0", features = ["dev"] }

View File

@ -1,249 +0,0 @@
use bon::{bon, Builder};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize, Builder)]
#[serde(rename_all = "camelCase")]
pub struct Bip44Config {
pub coin_type: u32,
}
// NOTE: Are `public` variants used?
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Bech32Config {
#[serde(rename = "bech32PrefixAccAddress")]
pub account_address_prefix: String,
#[serde(rename = "bech32PrefixAccPub")]
pub account_address_public_prefix: String,
#[serde(rename = "bech32PrefixValOper")]
pub validator_operator_prefix: String,
#[serde(rename = "bech32PrefixValPub")]
pub validator_operator_public_prefix: String,
#[serde(rename = "bech32PrefixConsAddr")]
pub consensus_node_prefix: String,
#[serde(rename = "bech32PrefixConsPub")]
pub consensus_node_public_prefix: String,
}
#[bon]
impl Bech32Config {
#[builder]
fn new(
account_address_prefix: &'static str,
account_address_public_prefix: &'static str,
validator_operator_prefix: &'static str,
validator_operator_public_prefix: &'static str,
consensus_node_prefix: &'static str,
consensus_node_public_prefix: &'static str,
) -> Self {
Self {
account_address_prefix: account_address_prefix.to_string(),
account_address_public_prefix: account_address_public_prefix.to_string(),
validator_operator_prefix: validator_operator_prefix.to_string(),
validator_operator_public_prefix: validator_operator_public_prefix.to_string(),
consensus_node_prefix: consensus_node_prefix.to_string(),
consensus_node_public_prefix: consensus_node_public_prefix.to_string(),
}
}
fn with_similar_prefix(prefix: &'static str) -> Self {
Self {
account_address_prefix: format!("{prefix}"),
account_address_public_prefix: format!("{prefix}pub"),
validator_operator_prefix: format!("{prefix}valoper"),
validator_operator_public_prefix: format!("{prefix}valoperpub"),
consensus_node_prefix: format!("{prefix}valcons"),
consensus_node_public_prefix: format!("{prefix}valconspub"),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, Builder)]
pub struct GasPriceStep {
pub low: f64,
pub average: f64,
pub high: f64,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Currency {
pub coin_denom: String,
pub coin_minimal_denom: String,
pub coin_decimals: u8,
pub coin_gecko_id: String,
}
#[bon]
impl Currency {
#[builder]
fn new(
coin_denom: &'static str,
coin_minimal_denom: &'static str,
coin_decimals: u8,
coin_gecko_id: &'static str,
) -> Self {
Self {
coin_denom: coin_denom.to_string(),
coin_minimal_denom: coin_minimal_denom.to_string(),
coin_decimals,
coin_gecko_id: coin_gecko_id.to_string(),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, Builder)]
#[serde(rename_all = "camelCase")]
pub struct CurrencyWithGas {
#[serde(flatten)]
pub currency: Currency,
pub gas_price_step: GasPriceStep,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Blockchain {
pub chain_name: String,
pub chain_id: String,
pub rpc_url: String,
pub rest_url: String,
pub explorer_url_format: String,
#[serde(rename = "bip44")]
pub bip44_config: Bip44Config,
#[serde(rename = "bech32Config")]
pub bech32_config: Bech32Config,
pub currencies: Vec<Currency>,
pub fee_currencies: Vec<CurrencyWithGas>,
pub gas_price_step: GasPriceStep,
pub stake_currency: Currency,
}
#[bon]
impl Blockchain {
#[builder]
fn new(
chain_id: &'static str,
chain_name: &'static str,
rpc_url: &'static str,
rest_url: &'static str,
explorer_url_format: &'static str,
bip44_config: Bip44Config,
bech32_config: Bech32Config,
currencies: &[Currency],
fee_currencies: &[CurrencyWithGas],
gas_price_step: GasPriceStep,
stake_currency: Currency,
) -> Self {
Self {
chain_id: chain_id.to_string(),
chain_name: chain_name.to_string(),
rpc_url: rpc_url.to_string(),
rest_url: rest_url.to_string(),
explorer_url_format: explorer_url_format.to_string(),
bip44_config,
bech32_config,
currencies: currencies.to_vec(),
fee_currencies: fee_currencies.to_vec(),
gas_price_step,
stake_currency,
}
}
}
fn seda_chains() -> Vec<Blockchain> {
let mut chains = vec![];
let aseda = Currency::builder()
.coin_denom("seda")
.coin_minimal_denom("aseda")
.coin_decimals(18)
.coin_gecko_id("ID")
.build();
let aseda_gas = GasPriceStep::builder()
.low(5000000000.)
.average(10000000000.)
.high(15000000000.)
.build();
chains.push(
Blockchain::builder()
.chain_id("seda-1-devnet")
// NOTE: Officially, this is just "devnet", but otherwise this would conflict.
// We'll override it in our config.
.chain_name("seda-devnet")
.rpc_url("https://rpc.devnet.seda.xyz")
.rest_url("https://lcd.devnet.seda.xyz")
.explorer_url_format("https://devnet.explorer.seda.xyz/txs/%s")
.bip44_config(Bip44Config::builder().coin_type(118).build())
.bech32_config(Bech32Config::with_similar_prefix("seda"))
.currencies(&[aseda.clone()])
.fee_currencies(&[CurrencyWithGas::builder()
.currency(aseda.clone())
.gas_price_step(aseda_gas.clone()).build()])
.gas_price_step(aseda_gas)
.stake_currency(aseda)
.build(),
);
chains
}
fn kyve_chains() -> Vec<Blockchain> {
let mut chains = vec![];
let tkyve = Currency::builder()
.coin_denom("KYVE")
.coin_minimal_denom("tkyve")
.coin_decimals(6)
.coin_gecko_id("unknown")
.build();
let tkyve_gas = GasPriceStep::builder()
.low(0.01)
.average(0.025)
.high(0.03)
.build();
chains.push(
Blockchain::builder()
.chain_id("korellia-2")
.chain_name("korellia")
.rpc_url("https://rpc.korellia.kyve.network")
.rest_url("https://api.korellia.kyve.network")
.explorer_url_format("https://explorer.kyve.network/korellia/tx/%s")
.bip44_config(Bip44Config::builder().coin_type(118).build())
.bech32_config(Bech32Config::with_similar_prefix("kyve"))
.currencies(&[tkyve.clone()])
.fee_currencies(&[CurrencyWithGas::builder()
.currency(tkyve.clone())
.gas_price_step(tkyve_gas.clone())
.build()])
.gas_price_step(tkyve_gas.clone())
.stake_currency(tkyve.clone())
.build(),
);
chains
}
pub fn default_chains() -> Vec<Blockchain> {
let mut chains = vec![];
chains.extend(kyve_chains());
chains.extend(seda_chains());
chains
}

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +0,0 @@
use icepick_module::Module;
use icepick_cosmos::Cosmos;
fn main() -> Result<(), Box<dyn std::error::Error>> {
Cosmos::run_responder()
}

View File

@ -1,68 +0,0 @@
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
pub struct Coin {
amount: [u8; 16],
denom: cosmrs::Denom,
}
impl From<&cosmrs::Coin> for Coin {
fn from(value: &cosmrs::Coin) -> Self {
let cosmrs::Coin { denom, amount } = value;
Coin {
denom: denom.clone(),
amount: amount.to_be_bytes(),
}
}
}
impl From<&Coin> for cosmrs::Coin {
fn from(value: &Coin) -> Self {
let Coin { amount, denom } = value;
cosmrs::Coin {
denom: denom.clone(),
amount: u128::from_be_bytes(*amount),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Fee {
amount: Vec<Coin>,
gas_limit: u64,
}
impl From<&cosmrs::tx::Fee> for Fee {
fn from(value: &cosmrs::tx::Fee) -> Self {
let cosmrs::tx::Fee {
amount,
gas_limit,
payer,
granter,
} = value;
assert!(payer.is_none(), "unimplemented: payer");
assert!(granter.is_none(), "unimplemented: granter");
let amounts = amount.iter().map(Coin::from).collect::<Vec<_>>();
Fee {
amount: amounts,
gas_limit: *gas_limit,
}
}
}
impl From<&Fee> for cosmrs::tx::Fee {
fn from(value: &Fee) -> Self {
let Fee { amount, gas_limit } = value;
let amounts = amount.iter().map(cosmrs::Coin::from).collect::<Vec<_>>();
cosmrs::tx::Fee {
amount: amounts,
gas_limit: *gas_limit,
payer: None,
granter: None,
}
}
}

View File

@ -12,7 +12,6 @@ icepick-module = { version = "0.1.0", path = "../../icepick-module" }
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
solana-rpc-client = { version = "2.1.1", default-features = false }
solana-rpc-client-api = "2.1.7"
solana-rpc-client-nonce-utils = "2.1.7"
solana-sdk = { version = "2.1.1" }
solana-transaction-status = "2.1.1"

View File

@ -63,13 +63,10 @@ use icepick_module::{
Module,
};
use serde::{Deserialize, Serialize};
use solana_rpc_client::rpc_client::SerializableTransaction;
use solana_rpc_client_api::client_error::Result as ClientResult;
use solana_sdk::{
pubkey::Pubkey,
signer::{keypair::Keypair, Signer},
system_instruction,
transaction::TransactionError,
};
use std::{collections::HashSet, str::FromStr};
@ -705,8 +702,8 @@ impl Module for Solana {
"nonce_pubkey": keypair.pubkey().to_string(),
"nonce_privkey": [keypair.secret().to_bytes()],
"transaction": instructions,
"derivation_accounts": [0u32 | 1 << 31],
},
"derivation_accounts": [0u32 | 1 << 31],
}))
}
Operation::GetNonceAccountData(GetNonceAccountData {
@ -1084,63 +1081,16 @@ impl Module for Solana {
}
})
}
Err(_) => {
let signature = transaction.get_signature();
let status = client.get_signature_status(signature);
blob_for_signature_status(status, signature, &cluster_suffix)
Err(e) => {
serde_json::json!({
"blob": {
"status": "send_and_confirm",
"error": e.to_string(),
}
})
}
})
}
}
}
}
fn blob_for_signature_status(
status: ClientResult<Option<Result<(), TransactionError>>>,
signature: &solana_sdk::signature::Signature,
cluster_suffix: &str,
) -> serde_json::Value {
match status {
Ok(Some(Ok(()))) => {
// transaction passed.
eprintln!("An error occurred while broadcasting the transaction, but the transaction was confirmed manually.");
serde_json::json!({
"blob": {
"status": "send_and_confirm",
"succcess": signature.to_string(),
"url": format!("https://explorer.solana.com/tx/{signature}{cluster_suffix}"),
}
})
}
Ok(Some(Err(e))) => {
// transaction failed on-cluster
eprintln!("The transaction failed on-chain: {e}");
serde_json::json!({
"blob": {
"status": "send_and_confirm",
"error": e.to_string(),
}
})
}
Ok(None) => {
// transaction may not have been broadcast
eprintln!("The transaction was possibly not received by the cluster.");
serde_json::json!({
"blob": {
"status": "send_and_confirm",
"error": format!("Transaction {signature} does not exist on-cluster"),
}
})
}
Err(e) => {
// RPC request failed
eprintln!("An error occurred while interacting with the cluster: {e}");
serde_json::json!({
"blob": {
"status": "send_and_confirm",
"error": e.to_string(),
}
})
}
}
}

View File

@ -1,13 +0,0 @@
[package]
name = "icepick-spacemesh"
version = "0.1.0"
edition = "2021"
publish = ["distrust"]
[dependencies]
icepick-module = { version = "0.1.0", path = "../../icepick-module" }
serde.workspace = true
serde_json.workspace = true
spacemesh = { version = "0.1.0", path = "../../spacemesh/spacemesh" }
thiserror = "2.0.11"
tokio = { version = "1.43.0", features = ["rt", "net"] }

View File

@ -1,172 +0,0 @@
use icepick_module::Module;
use serde::{Deserialize, Serialize};
use spacemesh::bech32::{self, Hrp};
use std::str::FromStr;
#[derive(Serialize, Deserialize, PartialEq, Eq, Clone, Debug, Default)]
#[serde(rename_all = "kebab-case")]
pub enum Cluster {
Testnet,
#[default]
Mainnet,
}
impl Cluster {
fn hrp(&self) -> bech32::Hrp {
match self {
Cluster::Testnet => Hrp::parse("stest").unwrap(),
Cluster::Mainnet => Hrp::parse("sm").unwrap(),
}
}
}
impl std::str::FromStr for Cluster {
type Err = &'static str;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"testnet" => Ok(Self::Testnet),
"mainnet" => Ok(Self::Mainnet),
_ => Err("Invalid value"),
}
}
}
impl std::fmt::Display for Cluster {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Cluster::Testnet => f.write_str("testnet"),
Cluster::Mainnet => f.write_str("mainnet"),
}
}
}
#[derive(thiserror::Error, Debug)]
pub enum Error {}
#[derive(Serialize, Deserialize, Debug)]
pub struct GenerateWallet {
account: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetWalletAddress {
pubkey: [u8; 32],
cluster: Option<Cluster>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountData {
account: String,
cluster: Option<Cluster>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct AwaitFunds {
address: String,
amount: String,
cluster: Option<Cluster>,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "operation", content = "values", rename_all = "kebab-case")]
pub enum Operation {
GenerateWallet(GenerateWallet),
GetWalletAddress(GetWalletAddress),
AwaitFunds(AwaitFunds),
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Request {
derived_keys: Option<Vec<[u8; 32]>>,
#[serde(flatten)]
operation: Operation,
}
pub fn run_async<F: std::future::Future>(f: F) -> F::Output {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(f)
}
pub struct Spacemesh;
impl Module for Spacemesh {
type Error = Error;
type Request = Request;
fn describe_operations() -> Vec<icepick_module::help::Operation> {
use icepick_module::help::*;
let account = Argument::builder()
.name("account")
.description("The derivation index for the account.")
.r#type(ArgumentType::Optional)
.build();
let cluster = Argument::builder()
.name("cluster")
.description("Spacemesh cluster to interact with (mainnet, testnet).")
.r#type(ArgumentType::Required)
.build();
let generate_wallet = Operation::builder()
.name("generate-wallet")
.description("Generate a wallet for the given account.")
.build()
.argument(&account);
let get_wallet_address = Operation::builder()
.name("get-wallet-address")
.description("Get the address for a given wallet.")
.build()
.argument(&cluster)
.argument(
&Argument::builder()
.name("wallet_pubkey")
.description("Public key of the wallet.")
.r#type(ArgumentType::Required)
.build(),
);
vec![generate_wallet, get_wallet_address]
}
fn handle_request(request: Self::Request) -> Result<serde_json::Value, Self::Error> {
let Request {
operation,
derived_keys: _,
} = request;
match operation {
Operation::GenerateWallet(GenerateWallet { account }) => {
let account = u32::from_str(account.as_deref().unwrap_or("0")).unwrap();
Ok(serde_json::json!({
"blob": {},
"derivation_accounts": [(account | 1 << 31)],
}))
}
Operation::GetWalletAddress(GetWalletAddress { pubkey, cluster }) => {
use spacemesh::wallet::AsAddress;
let account = pubkey.as_address();
let hrp = cluster.unwrap_or_default().hrp();
let address = bech32::encode(hrp, &account).unwrap();
Ok(serde_json::json!({
"blob": {
"address": address,
},
"derivation_accounts": [],
}))
}
Operation::AwaitFunds(AwaitFunds {
address,
amount,
cluster,
}) => todo!(),
}
}
}

View File

@ -1,6 +0,0 @@
use icepick_module::Module;
use icepick_spacemesh::Spacemesh;
fn main() -> Result<(), Box<dyn std::error::Error>> {
Spacemesh::run_responder()
}

View File

@ -4,6 +4,5 @@ version = "0.1.0"
edition = "2021"
[dependencies]
bon = "3.3.2"
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true

View File

@ -16,26 +16,6 @@ pub mod help {
pub arguments: Vec<Argument>,
}
#[bon::bon]
impl Operation {
#[builder]
pub fn new(name: &'static str, description: &'static str) -> Self {
Operation {
name: name.into(),
description: description.into(),
arguments: vec![],
}
}
}
impl Operation {
pub fn argument(mut self, arg: &Argument) -> Self {
self.arguments.push(arg.clone());
self
}
}
/*
/// The context of whether a signature is signed, needs to be signed, or has been signed.
#[derive(Serialize, Deserialize, Clone)]
@ -70,19 +50,6 @@ pub mod help {
/// The type of argument - this may affect how it displays in the frontend.
pub r#type: ArgumentType,
}
#[bon::bon]
impl Argument {
#[builder]
pub fn new(name: &'static str, description: &'static str, r#type: ArgumentType) -> Self {
Argument {
name: name.into(),
description: description.into(),
r#type,
}
}
}
}
/// Implementation methods for Icepick Modules, performed over command I/O using JSON.

View File

@ -1,10 +0,0 @@
[package]
name = "icepick-workflow"
version = "0.1.0"
edition = "2021"
[dependencies]
keyfork-derive-util = { version = "0.2.2", registry = "distrust", default-features = false }
serde.workspace = true
serde_json.workspace = true
thiserror = "2.0.11"

View File

@ -1,234 +0,0 @@
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::{BTreeMap, HashSet};
#[derive(thiserror::Error, Debug)]
pub enum SimulationError {
#[error("Step not found: {0}")]
StepNotFound(String),
#[error("Expected input variable or static value not found in step {0}: {1}")]
ValueNotFound(String, String),
}
#[derive(thiserror::Error, Debug)]
pub enum WorkflowError {
#[error("Invocable operation could not be found: {0}")]
InvocableOperationNotFound(String),
#[error("Derivation configuration for operation not found: {0}")]
DerivationConfigurationNotFound(String),
#[error("An error was encountered while invoking an operation")]
InvocationError(String),
}
/// An input for a workflow argument. When inputs are read, they should be referenced by the first
/// name. Additional names can be provided as aliases, to allow chaining workflows together when
/// names may not make sense - such as a Solana address then being used as an authorization
/// address.
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Input {
/// An input with a single identifier.
/// The name of the input.
pub name: String,
/// A description of the input.
pub description: String,
/// Aliases used when loading inputs.
#[serde(default)]
pub aliases: Vec<String>,
/// Whether the workflow input is optional.
pub optional: Option<bool>,
}
impl Input {
pub fn identifiers(&self) -> impl Iterator<Item = &String> {
[&self.name].into_iter().chain(self.aliases.iter())
}
pub fn is_required(&self) -> bool {
self.optional.is_some_and(|o| o)
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Workflow {
pub name: String,
pub description: String,
#[serde(default)]
pub inputs: Vec<Input>,
#[serde(rename = "step")]
steps: Vec<WorkflowStep>,
}
pub type StringMap<T = String> = BTreeMap<String, T>;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct WorkflowStep {
r#type: String,
#[serde(default)]
values: StringMap,
#[serde(default)]
inputs: StringMap,
#[serde(default)]
outputs: StringMap,
}
// TODO: This should probably be migrated to an actual Result type, instead of
// currently just shoving everything in "blob". Probably done after derivation_accounts
// gets hoisted out of here.
#[derive(Serialize, Deserialize)]
pub struct OperationResult {
// All values returned from an operation.
blob: StringMap<Value>,
// Any requested accounts from an operation.
//
// TODO: Move this to its own step.
#[serde(default)]
derivation_accounts: Vec<DerivationIndex>,
}
type DeriveKeys<'a> =
&'a dyn Fn(&DerivationAlgorithm, &DerivationPath, &[DerivationIndex]) -> Vec<Vec<u8>>;
impl Workflow {
pub fn simulate_workflow<T: InvocableOperation + Sized>(
&self,
mut data: HashSet<String>,
operations: &[T],
) -> Result<Vec<String>, SimulationError> {
let mut reports = vec![];
for step in self.steps.iter() {
let step_type = step.r#type.clone();
let Some(invocable) = operations.iter().find(|op| *op.name() == step_type) else {
return Err(SimulationError::StepNotFound(step_type));
};
// Check we have the values the module expects
for in_memory_name in step.inputs.values() {
if !data.contains(in_memory_name) && !step.values.contains_key(in_memory_name) {
return Err(SimulationError::ValueNotFound(
step_type,
in_memory_name.to_owned(),
));
}
}
// Check whether the module expects the keys as arguments, or if the
// keys will be passed as a "payload" variable.
let mut inputs = step.inputs.keys().collect::<HashSet<_>>();
for argument in invocable.argument_names() {
inputs.remove(argument);
}
for remaining_input in inputs {
reports.push(format!(
"Step {step_type}: Input {remaining_input} is not interpreted as a argument"
));
}
// Add the return values from the module into memory
data.extend(step.outputs.values().cloned());
}
Ok(reports)
}
pub fn run_workflow<T: InvocableOperation>(
&self,
mut data: StringMap<Value>,
operations: &[T],
derive_keys: DeriveKeys,
) -> Result<StringMap<Value>, WorkflowError> {
let mut derived_keys = vec![];
let mut derivation_accounts = vec![];
for step in &self.steps {
let step_type = step.r#type.clone();
let Some(operation) = operations.iter().find(|op| *op.name() == step_type) else {
return Err(WorkflowError::InvocableOperationNotFound(step_type));
};
// Prepare all inputs for the operation invocation
let inputs: StringMap<Value> = data
.iter()
.map(|(k, v)| (k, v.clone()))
.filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (desired, _stored) = step.inputs.iter().find(|(_, v)| k == *v)?;
Some((desired.clone(), v))
})
.chain(
step.values
.iter()
.map(|(k, v)| (k.clone(), Value::String(v.clone()))),
)
.collect();
let OperationResult {
blob,
derivation_accounts: new_accounts,
} = operation.invoke(&inputs, &derived_keys);
derived_keys.clear();
derivation_accounts.extend(new_accounts);
data.extend(blob.into_iter().filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (_given, stored) = step.outputs.iter().find(|(k1, _)| k == **k1)?;
Some((stored.clone(), v))
}));
// Add requested derivation keys and clear derivation account requests.
if !derivation_accounts.is_empty() {
let Some((algo, path_prefix)) = operation.derivation_configuration() else {
return Err(WorkflowError::DerivationConfigurationNotFound(step_type));
};
derived_keys.extend(derive_keys(algo, path_prefix, &derivation_accounts));
}
derivation_accounts.clear();
}
if let Some(last_step) = &self.steps.last() {
let values = last_step.outputs.values().collect::<HashSet<_>>();
data.retain(|stored_name, _| values.contains(stored_name));
}
Ok(data)
}
}
pub trait WorkflowHandler {
/// Load all inputs for the Workflow from some external source, such as CLI arguments or
/// JSON payloads. The inputs can then be used to simulate or perform a workflow.
fn load_inputs(&self) -> StringMap;
}
/// The configuration for an Icepick operation that can be invoked.
///
/// Implementors of this trait should include all necessary requirements to invoke the operation
/// within themselves.
pub trait InvocableOperation {
/// Invoke the operation with the supplied inputs and derived keys.
fn invoke(&self, input: &StringMap<Value>, derived_keys: &[Vec<u8>]) -> OperationResult;
/// The name of the operation.
fn name(&self) -> &String;
/// The names of arguments that can be passed to the function.
fn argument_names(&self) -> impl Iterator<Item = &String>;
/// The derivation algorithm and derivation path to be prefixed to all derivation requests.
fn derivation_configuration(&self) -> Option<(&DerivationAlgorithm, &DerivationPath)>;
}

View File

@ -4,26 +4,12 @@ version = "0.1.0"
edition = "2021"
[dependencies]
bincode = "1.3.3"
chrono = { version = "0.4.39", default-features = false, features = ["now", "serde", "std"] }
clap = { version = "4.5.20", features = ["cargo", "derive", "string"] }
icepick-module = { version = "0.1.0", path = "../icepick-module" }
icepick-workflow = { version = "0.1.0", path = "../icepick-workflow" }
keyfork-derive-util = { version = "0.2.1", registry = "distrust" }
keyfork-prompt = { version = "0.2.1", registry = "distrust", default-features = false }
keyfork-shard = { version = "0.3.0", registry = "distrust", default-features = false, features = ["openpgp", "openpgp-card"] }
keyforkd-client = { version = "0.2.1", registry = "distrust" }
keyforkd-models = { version = "0.2.0", registry = "distrust" }
miniquorum = { version = "0.1.0", path = "../miniquorum", default-features = false }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["arbitrary_precision"] }
serde_yaml = "0.9.34"
smex = { version = "0.1.0", registry = "distrust" }
serde_json.workspace = true
thiserror = "2.0.3"
toml = "0.8.19"
[build-dependencies]
bincode = "1.3.3"
icepick-workflow = { version = "0.1.0", path = "../icepick-workflow" }
serde_yaml = "0.9.34"
smex = { version = "0.1.0", registry = "distrust" }

View File

@ -1,55 +0,0 @@
use icepick_workflow::Workflow;
use std::{collections::BTreeMap, path::{PathBuf, Path}};
fn env_var(var: &'static str) -> String {
println!("cargo::rerun-if-env-changed={var}");
std::env::var(var).expect(var)
}
fn track_path(path: &Path) {
println!("cargo::rerun-if-changed={}", path.to_str().unwrap());
}
fn main() {
let out_dir = env_var("OUT_DIR");
let crate_dir = env_var("CARGO_MANIFEST_DIR");
let workflows_dir = PathBuf::from(crate_dir).join("workflows");
track_path(&workflows_dir);
let mut workflows_by_module: BTreeMap<String, Vec<Workflow>> = Default::default();
for module_dir in std::fs::read_dir(&workflows_dir).unwrap() {
let module_dir = module_dir.unwrap();
dbg!(&module_dir);
let path = module_dir.path();
if !path.is_dir() {
panic!("found unexpected file {}", path.to_string_lossy());
}
let mut workflows = vec![];
for workflow_file in std::fs::read_dir(&path).unwrap() {
dbg!(&workflow_file);
let workflow_file = workflow_file.unwrap();
let path = workflow_file.path();
if !path.is_file() {
panic!("found unexpected non-file {}", path.to_string_lossy());
}
let file_content = std::fs::read(path).unwrap();
let workflow: Workflow = serde_yaml::from_slice(&file_content).unwrap();
workflows.push(workflow);
}
workflows.sort_by(|a, b| a.name.cmp(&b.name));
workflows_by_module.insert(
module_dir.file_name().to_str().unwrap().to_owned(),
workflows,
);
}
let out_path = PathBuf::from(out_dir).join("workflows.hex");
let result = bincode::serialize(&workflows_by_module).unwrap();
let hexed = smex::encode(&result);
std::fs::write(out_path, hexed).unwrap();
}

View File

@ -1,11 +1,10 @@
use clap::{builder::ArgPredicate, command, value_parser};
use clap::command;
use icepick_module::help::*;
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath};
use serde::{Deserialize, Serialize};
use std::{
collections::{HashMap, BTreeMap},
collections::HashMap,
io::{IsTerminal, Write},
path::PathBuf,
process::{Command, Stdio},
};
@ -71,7 +70,7 @@ struct ModuleConfig {
/// All workflows for a module.
#[serde(rename = "workflow", default)]
workflows: Vec<icepick_workflow::Workflow>,
workflows: Vec<workflow::Workflow>,
}
mod serde_derivation {
@ -113,11 +112,6 @@ struct Config {
// command name, invocable binary, operations
type Commands<'a> = &'a [(String, String, Vec<Operation>)];
fn default_workflows() -> HashMap<String, Vec<icepick_workflow::Workflow>> {
let workflows_hex = include_str!(concat!(env!("OUT_DIR"), "/workflows.hex"));
bincode::deserialize(&smex::decode(workflows_hex).unwrap()).unwrap()
}
pub fn do_cli_thing() {
/* parse config file to get module names */
let config_file = std::env::vars().find_map(|(k, v)| {
@ -126,12 +120,9 @@ pub fn do_cli_thing() {
}
None
});
let config_path = config_file.unwrap_or_else(|| "/etc/icepick/icepick.toml".to_string());
let config_path = config_file.unwrap_or_else(|| "icepick.toml".to_string());
let config_content = std::fs::read_to_string(config_path).expect("can't read config file");
let mut config: Config = match toml::from_str(&config_content) {
Ok(config) => config,
Err(e) => panic!("Error while loading TOML config: {e}"),
};
let mut config: Config = toml::from_str(&config_content).expect("config file had invalid toml");
config.modules.push(ModuleConfig {
name: "internal".to_string(),
command_name: Default::default(),
@ -139,39 +130,6 @@ pub fn do_cli_thing() {
derivation_prefix: Default::default(),
workflows: Default::default(),
});
config.modules.push(ModuleConfig {
name: "ed25519".to_string(),
command_name: Default::default(),
algorithm: Some(DerivationAlgorithm::Ed25519),
// TODO: impl Last
derivation_prefix: Default::default(),
workflows: Default::default(),
});
let workflows = default_workflows();
for module in &mut config.modules {
if let Some(module_workflows) = workflows.get(&module.name) {
module.workflows.extend(module_workflows.iter().cloned());
}
}
let workflows_file = std::env::vars().find_map(|(k, v)| {
if k == "ICEPICK_WORKFLOWS_FILE" {
return Some(v);
}
None
});
let workflows_path = workflows_file.unwrap_or_else(|| "workflows.yaml".to_string());
if let Ok(content) = std::fs::read(&workflows_path) {
let workflows: HashMap<String, Vec<icepick_workflow::Workflow>> =
serde_yaml::from_slice(&content).unwrap();
for module in &mut config.modules {
if let Some(module_workflows) = workflows.get(&module.name) {
module.workflows.extend(module_workflows.iter().cloned());
}
}
}
let mut commands = vec![];
let mut icepick_command = command!();
@ -208,89 +166,12 @@ pub fn do_cli_thing() {
}
let workflows = workflows.leak();
let mut workflow_command = clap::Command::new("workflow")
.about("Run a pre-defined Icepick workflow.")
.arg(
clap::arg!(--"run-quorum" <FILE> "Run a workflow signed by a quorum of approvers.")
.long_help(concat!(
"Run a workflow signed by a quorum of approvers. ",
"This command requires either `--shardfile` or `--keyring`. If given a ",
"Shardfile, the certificates stored within the Shardfile will be used to ",
"verify the quorum payload. If given an OpenPGP keyring, the ",
"certificates within the keyring will be used to verify the quorum ",
"payload. Both formats require all keys to be signed by the key matching a ",
"currently plugged-in OpenPGP smartcard."
))
.value_parser(value_parser!(PathBuf))
.conflicts_with_all([
"simulate-workflow",
"export-for-quorum",
"add-signature-to-quorum",
]),
)
.arg(
clap::arg!(--"add-signature-to-quorum" <FILE> "Add a signature to a workflow quorum.")
.long_help(concat!(
"Add a signature to a workflow quorum. ",
"Any existing signatures will be verified. ",
"This command requires either `--shardfile` or `--keyring`. If given a ",
"Shardfile, the certificates stored within the Shardfile will be used to ",
"verify the quorum payload. If given an OpenPGP keyring, the ",
"certificates within the keyring will be used to verify the quorum ",
"payload. Both formats require all keys to be signed by the key matching a ",
"currently plugged-in OpenPGP smartcard."
))
.value_parser(value_parser!(PathBuf)),
)
.arg(
clap::arg!(--"keyring" <FILE> "OpenPGP Keyring file for verifying quorum.")
.value_parser(value_parser!(PathBuf))
.requires_ifs([
(ArgPredicate::IsPresent, "run-quorum"),
(ArgPredicate::IsPresent, "add-signature-to-quorum"),
]),
)
.arg(
clap::arg!(--"quorum-threshold" <THRESHOLD> "Quorum of signatures required to run.")
.long_help(concat!(
"Quorum of signatures required to run. ",
"When not present, the default behavior is to require a signature from ",
"every certificate present."
))
.value_parser(value_parser!(u8))
.requires("run-quorum")
.conflicts_with("shardfile"), // Shardfile contains its own threshold.
)
.arg(
clap::arg!(--"shardfile" <FILE> "OpenPGP Shardfile for verifying quorum.")
.long_help(concat!(
"OpenPGP Shardfile for verifying quorum. ",
"An OpenPGP Smartcard will be required to decrypt the quorum threshold and ",
"OpenPGP certificates used for verifying the payload.",
))
.value_parser(value_parser!(PathBuf))
.requires_ifs([
(ArgPredicate::IsPresent, "run-quorum"),
(ArgPredicate::IsPresent, "add-signature-to-quorum"),
])
.conflicts_with("keyring"),
)
.arg(clap::arg!(--"simulate-workflow" "Simulate an Icepick Workflow.").global(true))
.arg(
clap::arg!(
--"export-for-quorum"
"Export the given inputs as a quorum file."
)
.global(true),
)
.arg(
clap::arg!(--"sign" "Sign the exported workflow values.")
.global(true)
.requires_if(ArgPredicate::IsPresent, "export-for-quorum"),
);
.about("Run a pre-defined Icepick workflow")
.arg(clap::arg!(--"simulate-workflow").global(true));
for module in workflows.iter() {
let mut module_subcommand = clap::Command::new(module.0.as_str());
for workflow in &module.1 {
module_subcommand = module_subcommand.subcommand(workflow::generate_command(workflow));
module_subcommand = module_subcommand.subcommand(workflow.generate_command());
}
workflow_command = workflow_command.subcommand(module_subcommand);
}
@ -324,69 +205,18 @@ pub fn do_cli_thing() {
// If we have a Workflow command, run the workflow and exit.
if let Some(("workflow", matches)) = matches.subcommand() {
if let Some((module_name, matches)) = matches.subcommand() {
let (workflow_name, matches) = matches
.subcommand()
.expect("icepick workflow: missing workflow");
let workflow = workflows
.iter()
.find(|(module, _)| module == module_name)
.and_then(|(_, workflows)| workflows.iter().find(|x| x.name == workflow_name))
.expect("workflow from CLI should match config");
workflow::handle(workflow, module_name, matches, commands, &config.modules);
} else if let Some(payload_file) = matches.get_one::<PathBuf>("add-signature-to-quorum") {
let purpose = workflow::Purpose::AddSignature;
let mut payload = {
if let Some(keyring_file) = matches.get_one::<PathBuf>("keyring") {
workflow::parse_quorum_file(
payload_file,
keyring_file,
matches.get_one::<u8>("quorum-threshold").copied(),
purpose,
)
} else if let Some(shardfile) = matches.get_one::<PathBuf>("shardfile") {
workflow::parse_quorum_with_shardfile(payload_file, shardfile, purpose)
} else {
panic!("neither --keyring nor --shardfile were given, no keys to verify")
}
};
payload.add_signature().unwrap();
let output_file = payload_file.with_extension("tmp");
let mut file = std::fs::File::create_new(&output_file).unwrap();
serde_json::to_writer_pretty(&mut file, &payload).unwrap();
drop(file);
std::fs::copy(&output_file, payload_file).unwrap();
std::fs::remove_file(output_file).unwrap();
} else if let Some(payload_file) = matches.get_one::<PathBuf>("run-quorum") {
let purpose = workflow::Purpose::RunQuorum;
let (module_name, workflow_name, inputs) = {
if let Some(keyring_file) = matches.get_one::<PathBuf>("keyring") {
workflow::parse_quorum_file(
payload_file,
keyring_file,
matches.get_one::<u8>("quorum-threshold").copied(),
purpose,
)
.into_values()
} else if let Some(shardfile) = matches.get_one::<PathBuf>("shardfile") {
workflow::parse_quorum_with_shardfile(payload_file, shardfile, purpose)
.into_values()
} else {
panic!("neither --keyring nor --shardfile were given, no keys to verify")
}
};
let inputs: BTreeMap<String, serde_json::Value> =
serde_json::from_value(inputs).unwrap();
let workflow = workflows
.iter()
.find(|(module, _)| *module == module_name)
.and_then(|(_, workflows)| workflows.iter().find(|x| x.name == workflow_name))
.expect("workflow from CLI should match config");
workflow::handle_payload(workflow, inputs, commands, &config.modules);
}
let (module_name, matches) = matches
.subcommand()
.expect("icepick workflow: missing module");
let (workflow_name, matches) = matches
.subcommand()
.expect("icepick workflow: missing workflow");
let workflow = workflows
.iter()
.find(|(module, _)| module == module_name)
.and_then(|(_, workflows)| workflows.iter().find(|x| x.name == workflow_name))
.expect("workflow from CLI should match config");
workflow.handle(matches, commands, &config.modules);
return;
}
@ -478,17 +308,10 @@ pub fn do_cli_thing() {
let mut input = child.stdin.take().unwrap();
serde_json::to_writer(&mut input, &json).unwrap();
input.write_all(b"\n{\"operation\": \"exit\"}\n").unwrap();
let output = child.wait_with_output().unwrap();
let stdout = &output.stdout;
if output.status.success() {
let json: serde_json::Value =
serde_json::from_slice(stdout).expect("valid json");
let json_as_str = serde_json::to_string(&json).unwrap();
println!("{json_as_str}");
} else {
eprintln!("Error while invoking operation, check logs");
std::process::exit(1);
}
let output = child.wait_with_output().unwrap().stdout;
let json: serde_json::Value = serde_json::from_slice(&output).expect("valid json");
let json_as_str = serde_json::to_string(&json).unwrap();
println!("{json_as_str}");
}
}
}

View File

@ -1,44 +1,69 @@
use icepick_workflow::{Input, InvocableOperation, OperationResult, StringMap, Workflow};
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationPath};
use keyfork_shard::{openpgp::OpenPGP, Format};
use miniquorum::{Payload, PayloadVerification};
use keyfork_derive_util::DerivationIndex;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::{
collections::{HashMap, HashSet},
io::Write,
process::{Command, Stdio},
};
use super::{derive_keys, get_command, Commands, ModuleConfig, Operation};
/// The purpose for interacting with a payload.
pub enum Purpose {
/// Adding a signature.
AddSignature,
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Workflow {
pub name: String,
/// Running a quorum-signed payload.
RunQuorum,
#[serde(default)]
pub inputs: Vec<String>,
#[serde(rename = "step")]
steps: Vec<WorkflowStep>,
}
pub type StringMap = std::collections::HashMap<String, String>;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct WorkflowStep {
r#type: String,
#[serde(default)]
blob: StringMap,
#[serde(default)]
values: StringMap,
#[serde(default)]
inputs: StringMap,
#[serde(default)]
outputs: StringMap,
}
#[derive(Clone, Debug)]
struct CLIOperation {
/// The name of the operation (i.e. `transfer-token`).
struct InvocableOperation {
module: String,
name: String,
/// The binary to invoke the operation.
binary: String,
/// Information describing the invocation requirements of the operation.
operation: Operation,
/// The derivation algorithm used when deriving keys for the operation.
derivation_algorithm: Option<DerivationAlgorithm>,
/// The derivation prefix used when deriving keys for the operation.
derivation_prefix: Option<DerivationPath>,
}
impl InvocableOperation for CLIOperation {
fn invoke(&self, input: &StringMap<Value>, derived_keys: &[Vec<u8>]) -> OperationResult {
// TODO: This should probably be migrated to an actual Result type, instead of
// currently just shoving everything in "blob". Probably done after derivation_accounts
// gets hoisted out of here.
#[derive(Serialize, Deserialize)]
struct OperationResult {
// All values returned from an operation.
blob: HashMap<String, Value>,
// Any requested accounts from an operation.
//
// TODO: Move this to its own step.
#[serde(default)]
derivation_accounts: Vec<DerivationIndex>,
}
impl InvocableOperation {
fn invoke(&self, input: &HashMap<String, Value>, derived_keys: &[Vec<u8>]) -> OperationResult {
let (command, args) = get_command(&self.binary);
let json = serde_json::json!({
@ -62,218 +87,204 @@ impl InvocableOperation for CLIOperation {
let result = child.wait_with_output().unwrap();
if !result.status.success() {
panic!(
"Bad exit ({}: {}): {}",
&self.binary,
&self.operation.name,
String::from_utf8_lossy(&result.stderr)
);
panic!("Bad exit: {}", String::from_utf8_lossy(&result.stderr));
}
let output = result.stdout;
let json: OperationResult = serde_json::from_slice(&output).expect("valid json");
json
}
fn name(&self) -> &String {
&self.name
}
fn argument_names(&self) -> impl Iterator<Item = &String> {
self.operation.arguments.iter().map(|i| &i.name)
}
fn derivation_configuration(&self) -> Option<(&DerivationAlgorithm, &DerivationPath)> {
self.derivation_algorithm
.as_ref()
.zip(self.derivation_prefix.as_ref())
}
}
pub fn generate_command(workflow: &Workflow) -> clap::Command {
let mut command = clap::Command::new(&workflow.name).about(&workflow.description);
// NOTE: all required inputs are still marked as .required(false) since they could be included
// in the `--input-file` argument.
for input in workflow.inputs.iter() {
for arg in input.identifiers() {
let arg = clap::Arg::new(arg)
impl Workflow {
/// Generate a [`clap::Command`] for a [`Workflow`], where the inputs can be defined either by
/// command-line arguments or via a JSON input file.
pub fn generate_command(&self) -> clap::Command {
let mut command = clap::Command::new(&self.name).arg(clap::arg!(
--"input-file" [FILE]
"A file containing any inputs not passed on the command line"
));
for input in &self.inputs {
let arg = clap::Arg::new(input)
.required(false)
.help(&input.description)
.long(arg.replace('_', "-"))
.value_name(arg.to_uppercase())
.conflicts_with_all(
input
.identifiers()
.filter(|name| *name != arg)
.collect::<Vec<_>>(),
);
.long(input.replace('_', "-"))
.value_name(input.to_uppercase());
command = command.arg(arg);
}
command
}
command.arg(clap::arg!(
--"input-file" [FILE]
"A file containing any inputs not passed on the command line"
))
}
fn load_inputs<'a>(
inputs: impl IntoIterator<Item = &'a Input>,
matches: &clap::ArgMatches,
) -> StringMap {
let mut map = StringMap::default();
let input_file: Option<StringMap> = matches
.get_one::<std::path::PathBuf>("input-file")
.and_then(|p| std::fs::File::open(p).ok())
.and_then(|f| serde_json::from_reader(f).ok());
for input in inputs {
let identifier = &input.name;
match input
.identifiers()
.filter_map(|name| matches.get_one::<String>(name))
.next()
{
Some(value) => {
map.insert(identifier.clone(), value.clone());
continue;
}
None => {
if let Some(value) = input_file.as_ref().and_then(|f| f.get(identifier)) {
map.insert(identifier.clone(), value.clone());
fn load_inputs(&self, matches: &clap::ArgMatches) -> StringMap {
let mut map = StringMap::default();
let input_file: Option<StringMap> = matches
.get_one::<std::path::PathBuf>("input-file")
.and_then(|p| std::fs::File::open(p).ok())
.and_then(|f| serde_json::from_reader(f).ok());
for input in &self.inputs {
match matches.get_one::<String>(input) {
Some(value) => {
map.insert(input.clone(), value.clone());
continue;
}
None => {
if let Some(value) = input_file.as_ref().and_then(|f| f.get(input)) {
map.insert(input.clone(), value.clone());
continue;
}
}
}
panic!("Key was not found: {input}");
}
map
}
fn simulate_workflow(&self, mut data: HashSet<String>, operations: &[InvocableOperation]) {
// simulate the steps by using a HashSet to traverse the inputs and outputs and ensure
// there's no inconsistencies
for (i, step) in self.steps.iter().enumerate() {
// NOTE: overflow possible but unlikely
let step_index = i + 1;
let step_type = &step.r#type;
// Find the relevant Operation
let Some(invocable) = operations.iter().find(|op| op.name == *step_type) else {
panic!("Could not find operation: {step_type}");
};
// Check if we have the keys we want to pass into the module.
for in_memory_name in step.inputs.values() {
if !data.contains(in_memory_name) && !step.values.contains_key(in_memory_name) {
panic!("Failed simulation: step #{step_index} ({step_type}): missing value {in_memory_name}");
}
}
// Check that the module accepts those keys.
for module_input_name in step.inputs.keys() {
if !invocable
.operation
.arguments
.iter()
.any(|arg| *module_input_name == arg.name)
{
eprintln!("Simulation: step #{step_index} ({step_type}): input value {module_input_name} will be passed through as JSON input");
}
}
// Add the keys we get from the module.
for in_memory_name in step.outputs.values() {
data.insert(in_memory_name.clone());
}
}
if input.is_required() {
panic!("Required workflow input was not found: {identifier}");
}
}
map
}
fn run_workflow(
&self,
mut data: HashMap<String, Value>,
operations: &[InvocableOperation],
config: &[ModuleConfig],
) {
let mut derived_keys = vec![];
let mut derivation_accounts = vec![];
fn load_operations(commands: Commands, config: &[ModuleConfig]) -> Vec<CLIOperation> {
let mut operations = vec![];
for step in &self.steps {
let operation = operations
.iter()
.find(|op| op.name == step.r#type)
.expect("operation matched step type");
for (module_name, module_binary, module_operations) in commands {
for operation in module_operations {
let operation_name = &operation.name;
let module_config = config.iter().find(|conf| conf.name == *module_name);
let io = CLIOperation {
name: format!("{module_name}-{operation_name}"),
binary: module_binary.clone(),
operation: operation.clone(),
derivation_algorithm: module_config.and_then(|m| m.algorithm.clone()),
derivation_prefix: module_config.and_then(|m| m.derivation_prefix.clone()),
};
operations.push(io);
// Load keys from Keyfork, from previously requested workflow
let config = config
.iter()
.find(|module| module.name == operation.module)
.expect("could not find module config");
let algo = &config.algorithm;
let path_prefix = &config.derivation_prefix;
if !derivation_accounts.is_empty() {
derived_keys.extend(derive_keys(
algo.as_ref()
.expect("a module requested keys but didn't provide algorithm"),
path_prefix
.as_ref()
.expect("a module requested keys but didn't provide prefix"),
&derivation_accounts,
));
}
derivation_accounts.clear();
// Prepare all inputs for the operation invocation
//
// NOTE: this could be .clone().into_iter() but it would create an extra allocation of
// the HashMap, and an unnecessary alloc of the key.
let inputs: HashMap<String, Value> = data
.iter()
.map(|(k, v)| (k, v.clone()))
.filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (desired, _stored) = step.inputs.iter().find(|(_, v)| k == *v)?;
Some((desired.clone(), v))
})
.chain(
step.values
.iter()
.map(|(k, v)| (k.clone(), Value::String(v.clone()))),
)
.collect();
let OperationResult {
blob,
derivation_accounts: new_accounts,
} = operation.invoke(&inputs, &derived_keys);
derived_keys.clear();
derivation_accounts.extend(new_accounts);
data.extend(blob.into_iter().filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (_given, stored) = step.outputs.iter().find(|(k1, _)| k == **k1)?;
Some((stored.clone(), v))
}));
}
let last_outputs = &self.steps.last().unwrap().outputs;
data.retain(|stored_name, _| {
last_outputs
.values()
.any(|storage_name| stored_name == storage_name)
});
let json_as_str = serde_json::to_string(&data).unwrap();
println!("{json_as_str}");
}
operations
}
pub fn handle(&self, matches: &clap::ArgMatches, modules: Commands, config: &[ModuleConfig]) {
let inputs = self.load_inputs(matches);
let data: HashMap<String, Value> = inputs
.into_iter()
.map(|(k, v)| (k, Value::String(v)))
.collect();
pub fn parse_quorum_file(
quorum_path: impl AsRef<std::path::Path>,
cert_path: impl AsRef<std::path::Path>,
threshold: Option<u8>,
purpose: Purpose,
) -> Payload {
let (payload, certs) = Payload::load(quorum_path, cert_path).unwrap();
let threshold = threshold.unwrap_or(u8::try_from(certs.len()).expect("too many certs!"));
let policy = match purpose {
Purpose::AddSignature => {
// All signatures must be valid, but we don't require a minimum.
PayloadVerification::new().with_threshold(0)
let mut operations = vec![];
for (module_name, module_binary, module_operations) in modules {
for operation in module_operations {
let operation_name = &operation.name;
let io = InvocableOperation {
module: module_name.clone(),
name: format!("{module_name}-{operation_name}"),
binary: module_binary.clone(),
operation: operation.clone(),
};
operations.push(io);
}
}
Purpose::RunQuorum => PayloadVerification::new().with_threshold(threshold),
};
payload.verify_signatures(&certs, &policy, None).unwrap();
payload
}
pub fn parse_quorum_with_shardfile(
quorum_path: impl AsRef<std::path::Path>,
shardfile_path: impl AsRef<std::path::Path>,
purpose: Purpose,
) -> Payload {
let payload_file = std::fs::File::open(quorum_path).unwrap();
let payload: Payload = serde_json::from_reader(payload_file).unwrap();
let opgp = OpenPGP;
let (threshold, certs) = opgp
.decrypt_metadata_from_file(
None::<&std::path::Path>,
std::fs::File::open(shardfile_path).unwrap(),
keyfork_prompt::default_handler().unwrap(),
)
.unwrap();
let policy = match purpose {
Purpose::AddSignature => {
// All signatures must be valid, but we don't require a minimum.
PayloadVerification::new().with_threshold(0)
if matches.get_flag("simulate-workflow") {
self.simulate_workflow(data.into_keys().collect(), &operations);
return;
}
Purpose::RunQuorum => PayloadVerification::new().with_threshold(threshold),
};
payload.verify_signatures(&certs, &policy, None).unwrap();
payload
}
pub fn handle_payload(
workflow: &Workflow,
inputs: StringMap<Value>,
modules: Commands,
config: &[ModuleConfig],
) {
let operations = load_operations(modules, config);
let result = workflow
.run_workflow(inputs, &operations, &derive_keys)
.expect("Invocation failure");
println!("{}", serde_json::to_string(&result).expect("valid JSON"));
}
pub fn handle(
workflow: &Workflow,
module_name: &str,
matches: &clap::ArgMatches,
modules: Commands,
config: &[ModuleConfig],
) {
let inputs = load_inputs(&workflow.inputs, matches);
let data: StringMap<Value> = inputs
.into_iter()
.map(|(k, v)| (k, Value::String(v)))
.collect();
let operations = load_operations(modules, config);
if matches.get_flag("simulate-workflow") {
let reports = workflow
.simulate_workflow(data.into_keys().collect(), &operations)
.expect("Simulation failure");
for report in reports {
println!("{report}");
}
return;
self.run_workflow(data, &operations, config);
}
if matches.get_flag("export-for-quorum") {
let mut payload = Payload::new(
serde_json::to_value(data).unwrap(),
module_name,
&workflow.name,
);
if matches.get_flag("sign") {
payload.add_signature().unwrap();
}
println!("{}", serde_json::to_string_pretty(&payload).unwrap());
return;
}
let result = workflow
.run_workflow(data, &operations, &derive_keys)
.expect("Invocation failure");
println!("{}", serde_json::to_string(&result).expect("valid JSON"));
}

View File

@ -1,43 +0,0 @@
name: "broadcast"
description: |-
Broadcast a transaction on a Cosmos-based blockchain.
inputs:
- name: "nonce_address"
description: >-
The address of the account used for the transaction nonce.
- name: "chain_name"
description: >-
The name of the Cosmos chain to broadcast a transaction on.
step:
- type: "cosmos-get-chain-info"
inputs:
chain_name: "chain_name"
outputs:
blockchain_config: "blockchain_config"
- type: "cosmos-get-account-data"
inputs:
account_id: "nonce_address"
blockchain_config: "blockchain_config"
outputs:
account_number: "account_number"
sequence_number: "sequence_number"
- type: "internal-save-file"
values:
filename: "account_info.json"
inputs:
account_number: "account_number"
sequence_number: "sequence_number"
- type: "internal-load-file"
values:
filename: "transaction.json"
outputs:
transaction: "transaction"
- type: "cosmos-broadcast"
inputs:
blockchain_config: "blockchain_config"
transaction: "transaction"
outputs:
status: "status"
url: "url"
error: "error"
error_code: "error_code"

View File

@ -1,26 +0,0 @@
name: generate-address
description: |-
Generate an address on a given Cosmos-based blockchain.
inputs:
- name: chain_name
description: >-
The name of the Cosmos chain you'd like to generate an address for.
- name: account
description: >-
The account to use, if not the default account.
optional: true
step:
- type: cosmos-get-chain-info
inputs:
chain_name: chain_name
outputs:
blockchain_config: blockchain_config
- type: cosmos-generate-wallet
inputs:
account: account
blockchain_config: blockchain_config
- type: cosmos-get-wallet-address
inputs:
blockchain_config: blockchain_config
outputs:
pubkey: pubkey

View File

@ -1,61 +0,0 @@
name: stake
description: |-
Stake coins on the provided chain.
inputs:
- name: delegate_address
description: >-
Address holding the coins to be staked to a validator.
- name: validator_address
description: >-
Address of the validator operator.
- name: chain_name
description: >-
The name of the Cosmos-based chain.
- name: asset_name
description: >-
The name of the asset to stake.
- name: asset_amount
description: >-
The amount of the asset to stake.
- name: gas_factor
description: >-
An amount to multiply the required gas by; necessary if a chain requires
more gas for a specific operation.
optional: true
step:
- type: cosmos-get-chain-info
inputs:
chain_name: chain_name
outputs:
blockchain_config: blockchain_config
- type: internal-load-file
values:
filename: "account_info.json"
outputs:
account_number: account_number
sequence_number: sequence_number
- type: cosmos-stake
inputs:
delegate_address: delegate_address
validator_address: validator_address
amount: asset_amount
denom: asset_name
blockchain_config: blockchain_config
gas_factor: gas_factor
outputs:
fee: fee
tx_messages: tx_messages
- type: cosmos-sign
inputs:
fee: fee
tx_messages: tx_messages
account_number: account_number
sequence_number: sequence_number
blockchain_config: blockchain_config
outputs:
transaction: signed_transaction
- type: internal-save-file
values:
filename: "transaction.json"
inputs:
transaction: signed_transaction

View File

@ -1,55 +0,0 @@
name: "transfer"
description: |-
Transfer a Cosmos coin.
inputs:
- name: "from_address"
description: >-
The address from which to send coin.
- name: "to_address"
description: >-
The address to send coins to.
- name: "asset_name"
description: >-
The name of the asset to send.
- name: "chain_name"
description: >-
The name of the Cosmos chain the asset lives on.
- name: "asset_amount"
description: >-
The amount of the asset to send.
step:
- type: "cosmos-get-chain-info"
inputs:
chain_name: "chain_name"
outputs:
blockchain_config: "blockchain_config"
- type: "internal-load-file"
values:
filename: "account_info.json"
outputs:
account_number: "account_number"
sequence_number: "sequence_number"
- type: "cosmos-transfer"
inputs:
from_address: "from_address"
to_address: "to_address"
amount: "asset_amount"
denom: "asset_name"
blockchain_config: "blockchain_config"
outputs:
fee: "fee"
tx_messages: "tx_messages"
- type: "cosmos-sign"
inputs:
fee: "fee"
tx_messages: "tx_messages"
account_number: "account_number"
sequence_number: "sequence_number"
blockchain_config: "blockchain_config"
outputs:
transaction: "signed_transaction"
- type: "internal-save-file"
values:
filename: "transaction.json"
inputs:
transaction: "signed_transaction"

View File

@ -1,53 +0,0 @@
name: withdraw-rewards
description: |-
Withdraw rewards gained from staking to a validator.
inputs:
- name: delegate_address
description: >-
The owner of the staked coins; also, the recipient of rewards.
- name: validator_address
description: >-
The validator from whom coins are staked.
- name: chain_name
description: >-
The name of the Cosmos-based chain.
- name: gas_factor
description: >-
An amount to multiply the required gas by; necessary if a chain requires
more gas for a specific operation.
optional: true
step:
- type: cosmos-get-chain-info
inputs:
chain_name: chain_name
outputs:
blockchain_config: blockchain_config
- type: internal-load-file
values:
filename: "account_info.json"
outputs:
account_number: account_number
sequence_number: sequence_number
- type: cosmos-withdraw-rewards
inputs:
delegate_address: delegate_address
validator_address: validator_address
blockchain_config: blockchain_config
gas_factor: gas_factor
outputs:
fee: fee
tx_messages: tx_messages
- type: cosmos-sign
inputs:
fee: fee
tx_messages: tx_messages
account_number: account_number
sequence_number: sequence_number
blockchain_config: blockchain_config
outputs:
transaction: signed_transaction
- type: internal-save-file
values:
filename: "transaction.json"
inputs:
transaction: signed_transaction

View File

@ -1,64 +0,0 @@
name: withdraw
description: |-
Withdraw staked coins from a validator.
Staked coins may be held for an unbonding period, depending on the chain upon
which they are staked.
inputs:
- name: delegate_address
description: >-
The owner of the staked coins.
- name: validator_address
description: >-
The validator from whom coins are staked.
- name: chain_name
description: >-
The name of the Cosmos-based chain.
- name: asset_name
description: >-
The name of the asset to withdraw.
- name: asset_amount
description: >-
The amount of the asset to withdraw.
- name: gas_factor
description: >-
An amount to multiply the required gas by; necessary if a chain requires
more gas for a specific operation.
optional: true
step:
- type: cosmos-get-chain-info
inputs:
chain_name: chain_name
outputs:
blockchain_config: blockchain_config
- type: internal-load-file
values:
filename: "account_info.json"
outputs:
account_number: account_number
sequence_number: sequence_number
- type: cosmos-withdraw
inputs:
delegate_address: delegate_address
validator_address: validator_address
amount: asset_amount
denom: asset_name
blockchain_config: blockchain_config
gas_factor: gas_factor
outputs:
fee: fee
tx_messages: tx_messages
- type: cosmos-sign
inputs:
fee: fee
tx_messages: tx_messages
account_number: account_number
sequence_number: sequence_number
blockchain_config: blockchain_config
outputs:
transaction: signed_transaction
- type: internal-save-file
values:
filename: "transaction.json"
inputs:
transaction: signed_transaction

View File

@ -1,40 +0,0 @@
name: "broadcast"
description: |-
Broadcast a transaction on the Solana blockchain.
inputs:
- name: "nonce_address"
description: >-
The address of the nonce account.
- name: "cluster"
description: >-
The name of the Solana cluster to broadcast the transaction on, if not
mainnet-beta.
optional: true
step:
- type: "sol-get-nonce-account-data"
inputs:
nonce_address: "nonce_address"
cluster: "cluster"
outputs:
authority: "nonce_authority"
durable_nonce: "nonce"
- type: "internal-save-file"
values:
filename: "nonce.json"
inputs:
nonce_authority: "nonce_authority"
nonce_data: "nonce"
nonce_address: "nonce_address"
- type: "internal-load-file"
values:
filename: "transaction.json"
outputs:
transaction: "transaction"
- type: "sol-broadcast"
inputs:
cluster: "cluster"
transaction: "transaction"
outputs:
status: "status"
url: "url"
error: "error"

View File

@ -1,15 +0,0 @@
name: generate-address
description: |-
Generate a Solana address.
inputs:
- name: account
description: >-
The account to use, if not the default account.
optional: true
step:
- type: sol-generate-wallet
inputs:
account: account
- type: sol-get-wallet-address
outputs:
pubkey: pubkey

View File

@ -1,75 +0,0 @@
name: "generate-nonce-account"
description: |-
Using a temporary Keyfork instance, generate a nonce address for the given
authorization address.
inputs:
- name: "cluster"
description: >-
Name of the Solana cluster to generate the nonce account on, if not
mainnet-beta.
- name: "authorization_address"
description: >-
The address used to authorize advancing the nonce.
The authorization address (also called "address" or "pubkey" in other
workflows) is required to be a signer of the transaction, so the
authorization address is often the principal address - the one performing
the transaction.
step:
- type: "sol-generate-wallet"
- type: "sol-get-wallet-address"
outputs:
pubkey: "wallet_pubkey"
- type: "sol-await-funds"
inputs:
address: "wallet_pubkey"
cluster: "cluster"
values:
lamports: "1510000"
- type: "sol-get-blockhash"
inputs:
cluster: "cluster"
outputs:
blockhash: "blockhash"
- type: "sol-create-nonce-account-and-signing-key"
inputs:
from_address: "wallet_pubkey"
authorization_address: "authorization_address"
outputs:
transaction: "instructions"
nonce_pubkey: "nonce_pubkey"
nonce_privkey: "private_keys"
derivation_accounts: "derivation_accounts"
- type: "sol-compile"
inputs:
instructions: "instructions"
derivation_accounts: "derivation_accounts"
blockhash: "blockhash"
outputs:
transaction: "unsigned_transaction"
- type: "sol-sign"
inputs:
blockhash: "blockhash"
signing_keys: "private_keys"
transaction: "unsigned_transaction"
outputs:
transaction: "signed_transaction"
- type: "sol-broadcast"
inputs:
cluster: "cluster"
transaction: "signed_transaction"
outputs:
status: "status"
url: "url"
error: "error"
- type: "internal-cat"
inputs:
status: "status"
url: "url"
nonce_account: "nonce_pubkey"
error: "error"
outputs:
status: "status"
url: "url"
nonce_account: "nonce_account"
error: "error"

View File

@ -1,60 +0,0 @@
name: transfer-token
description: |-
Transfer SPL tokens held on the Solana blockchain.
inputs:
- name: from_address
description: >-
The address from which to send tokens.
- name: to_address
description: >-
The address to send coins to.
- name: token_name
description: >-
The name of the token to transfer.
- name: token_amount
description: >-
The amount of the token to transfer.
step:
- type: sol-get-token-info
inputs:
token: token_name
outputs:
token_address: token_address
token_decimals: token_decimals
- type: internal-load-file
values:
filename: "nonce.json"
outputs:
nonce_authority: nonce_authority
nonce_data: nonce_data
nonce_address: nonce_address
- type: sol-transfer-token
inputs:
amount: token_amount
token_address: token_address
decimals: token_decimals
to_address: to_address
from_address: from_address
outputs:
instructions: instructions
derivation_accounts: derivation_accounts
- type: sol-compile
inputs:
instructions: instructions
derivation_accounts: derivation_accounts
nonce_address: nonce_address
nonce_authority: nonce_authority
nonce_data: nonce_data
outputs:
transaction: unsigned_transaction
- type: sol-sign
inputs:
transaction: unsigned_transaction
blockhash: nonce_data
outputs:
transaction: transaction
- type: internal-save-file
values:
filename: "transaction.json"
inputs:
transaction: signed_transaction

View File

@ -1,49 +0,0 @@
name: "transfer"
description: |-
Transfer SOL from one address to another.
inputs:
- name: "to_address"
description: >-
The address to send SOL to.
- name: "from_address"
description: >-
The address to send SOL from.
- name: "amount"
description: >-
The amount of SOL to send.
step:
- type: "internal-load-file"
values:
filename: "nonce.json"
outputs:
nonce_authority: "nonce_authority"
nonce_data: "nonce_data"
nonce_address: "nonce_address"
- type: "sol-transfer"
inputs:
from_address: "from_address"
to_address: "to_address"
amount: "amount"
outputs:
instructions: "instructions"
derivation_accounts: "derivation_accounts"
- type: "sol-compile"
inputs:
instructions: "instructions"
derivation_accounts: "derivation_accounts"
nonce_address: "nonce_address"
nonce_authority: "nonce_authority"
nonce_data: "nonce_data"
outputs:
transaction: "unsigned_transaction"
- type: "sol-sign"
inputs:
blockhash: "nonce_data"
transaction: "unsigned_transaction"
outputs:
transaction: "signed_transaction"
- type: "internal-save-file"
values:
filename: "transaction.json"
inputs:
transaction: "signed_transaction"

View File

@ -1,25 +0,0 @@
name: generate-address
description: |-
Generate a Spacemesh address
inputs:
- name: account
description: >-
The account to use, if not the default account.
optional: true
- name: cluster
description: >-
The Spacemesh cluster to use, if not the mainnet.
optional: true
step:
- type: spacemesh-generate-wallet
inputs:
account: account
- type: ed25519-get-pubkey
outputs:
pubkey: pubkey
- type: spacemesh-get-wallet-address
inputs:
pubkey: pubkey
cluster: cluster
outputs:
address: address

View File

@ -1,21 +0,0 @@
[package]
name = "miniquorum"
version = "0.1.0"
edition = "2021"
[features]
default = ["clap"]
[dependencies]
bincode = "1.3.3"
card-backend-pcsc = "0.5.0"
chrono = { version = "0.4.39", default-features = false, features = ["std", "now", "serde"] }
clap = { version = "4.5.27", features = ["derive", "wrap_help"], optional = true }
keyfork-prompt = { version = "0.2.0", registry = "distrust", default-features = false }
openpgp-card = "0.4"
openpgp-card-sequoia = "0.2.2"
sequoia-openpgp = "1.22.0"
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
sha3 = "0.10.8"
thiserror = "2.0.11"

View File

@ -1,526 +0,0 @@
use chrono::prelude::*;
use keyfork_prompt::{
default_handler, prompt_validated_passphrase,
validators::{PinValidator, Validator},
};
use openpgp_card::{Error as CardError, StatusBytes};
use openpgp_card_sequoia::{state::Open, Card};
use sequoia_openpgp::{
self as openpgp,
armor::{Kind, Writer},
crypto::hash::Digest,
packet::{signature::SignatureBuilder, Packet},
parse::Parse,
serialize::Serialize as _,
types::{HashAlgorithm, SignatureType},
Cert, Fingerprint,
};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::{collections::BTreeMap, fs::File, io::Read, path::Path};
#[derive(thiserror::Error, Debug)]
/// An error with a [`PayloadVerification`] policy.
#[error("{error} (policy: {policy:?})")]
pub struct Error {
error: BaseError,
policy: PayloadVerification,
}
#[non_exhaustive]
#[derive(thiserror::Error, Debug)]
pub enum BaseError {
/// In the given certificate keyring, the provided fingerprint was not found.
#[error("fingerprint not found: {0}")]
FingerprintNotFound(Fingerprint),
/// No smartcard was found.
#[error("no smartcard found")]
NoSmartcard,
/// None of the certificates in the given certificate keyring matched any plugged-in smartcard.
#[error("no certs found matching any available smartcard")]
NoCertMatchedSmartcard,
/// The certificate was not trusted by the root of trust.
#[error("untrusted certificate: {0} has not signed {1:?}")]
UntrustedCertificates(Fingerprint, Vec<Fingerprint>),
/// No certificate in the given certificate keyring matched the signature.
#[error("no public key matched signature")]
NoPublicKeyMatchedSignature,
/// Not enough signatures matched based on the given threshold
#[error("not enough signatures: {0} < {1}")]
NotEnoughSignatures(u8, u8),
/// A Payload was provided when an inner [`serde_json::Value`] was expected.
#[error("a payload was provided when a non-payload JSON value was expected")]
UnexpectedPayloadProvided,
/// The JSON object is not a valid value.
#[error("the JSON object is not a valid value")]
InvalidJSONValue,
}
impl BaseError {
fn with_policy(self, policy: &PayloadVerification) -> Error {
Error {
error: self,
policy: policy.clone(),
}
}
}
fn canonicalize(value: Value) -> Value {
match value {
Value::Array(vec) => {
let values = vec.into_iter().map(canonicalize).collect();
Value::Array(values)
}
Value::Object(map) => {
// this sorts the values
let map: BTreeMap<String, Value> =
map.into_iter().map(|(k, v)| (k, canonicalize(v))).collect();
let sorted: Vec<Value> = map
.into_iter()
.map(|(k, v)| Value::Array(vec![Value::String(k), v]))
.collect();
Value::Array(sorted)
}
value => value,
}
}
fn unhashed(value: Value) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
let Value::Object(mut value) = value else {
return Err(BaseError::InvalidJSONValue.into());
};
value.remove("signatures");
let value = canonicalize(Value::Object(value));
let bincoded = bincode::serialize(&value)?;
Ok(bincoded)
}
fn hash(value: Value) -> Result<Box<dyn Digest>, Box<dyn std::error::Error>> {
let bincoded = unhashed(value)?;
let mut digest = HashAlgorithm::SHA512.context()?;
digest.update(&bincoded);
Ok(digest)
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Payload {
workflow: [String; 2],
values: Value,
datetime: DateTime<Utc>,
#[serde(default)]
signatures: Vec<String>,
}
#[derive(Clone, Debug)]
pub struct PayloadVerification {
threshold: u8,
error_on_invalid: bool,
error_on_missing_key: bool,
one_each: bool,
}
impl std::default::Default for PayloadVerification {
fn default() -> Self {
Self {
threshold: 0,
error_on_invalid: true,
error_on_missing_key: true,
one_each: true,
}
}
}
#[allow(dead_code)]
impl PayloadVerification {
pub fn new() -> Self {
Default::default()
}
/// Require a signature per key, regardless of any given threshold.
pub fn with_one_per_key(self, one_each: bool) -> Self {
Self { one_each, ..self }
}
/// Set a threshold for required signatures.
pub fn with_threshold(self, threshold: u8) -> Self {
Self { one_each: false, threshold, ..self }
}
/// Require a single valid signature; other signatures may be invalid.
pub fn with_any_valid(self) -> Self {
Self {
threshold: 1,
error_on_invalid: false,
..self
}
}
/// Require a threshold of signatures to be valid, allowing no invalid signatures.
pub fn with_all_valid(self, threshold: u8) -> Self {
Self {
threshold,
error_on_invalid: true,
..self
}
}
/// Ignore invalid signatures. A threshold of valid signatures is still required.
///
/// The default behavior is to error when encountering an invalid signature, even if a quorum
/// is reached.
pub fn ignoring_invalid_signatures(self) -> Self {
Self {
error_on_invalid: false,
..self
}
}
/// Ignoring signatures signed by unknown keys.
///
/// The default behavior is to error when encountering an unknown signature.
pub fn ignoring_missing_keys(self) -> Self {
Self {
error_on_missing_key: true,
..self
}
}
}
/// Format a name from an OpenPGP card.
fn format_name(input: impl AsRef<str>) -> String {
let mut n = input
.as_ref()
.split("<<")
.take(2)
.map(|s| s.replace('<', " "))
.collect::<Vec<_>>();
n.reverse();
n.join(" ")
}
impl Payload {
/// Create a new Payload, using the current system's time, in UTC.
pub fn new(
values: serde_json::Value,
module_name: impl AsRef<str>,
workflow_name: impl AsRef<str>,
) -> Self {
Self {
workflow: [
module_name.as_ref().to_string(),
workflow_name.as_ref().to_string(),
],
values,
datetime: Utc::now(),
signatures: vec![],
}
}
/// Load a Payload and the relevant certificates.
///
/// # Errors
///
/// The constructor may error if either file can't be read or if either file has invalid data.
pub fn load(
payload_path: impl AsRef<Path>,
keyring_path: impl AsRef<Path>,
) -> Result<(Self, Vec<Cert>), Box<dyn std::error::Error>> {
let payload_file = File::open(payload_path)?;
let cert_file = File::open(keyring_path)?;
Self::from_readers(payload_file, cert_file)
}
pub fn from_readers(
payload: impl Read,
keyring: impl Read + Send + Sync,
) -> Result<(Self, Vec<Cert>), Box<dyn std::error::Error>> {
let payload: Payload = serde_json::from_reader(payload)?;
let certs =
openpgp::cert::CertParser::from_reader(keyring)?.collect::<Result<Vec<_>, _>>()?;
Ok((payload, certs))
}
pub fn signature_count(&self) -> usize {
self.signatures.len()
}
/// Attach a signature from an OpenPGP card.
///
/// # Errors
///
/// The method may error if a signature could not be created.
pub fn add_signature(&mut self) -> Result<(), Box<dyn std::error::Error>> {
let unhashed = unhashed(serde_json::to_value(&self)?)?;
let builder =
SignatureBuilder::new(SignatureType::Binary).set_hash_algo(HashAlgorithm::SHA512);
let mut prompt_handler = default_handler()?;
let pin_validator = PinValidator {
min_length: Some(6),
..Default::default()
};
for backend in card_backend_pcsc::PcscBackend::cards(None)? {
let mut card = Card::<Open>::new(backend?)?;
let mut transaction = card.transaction()?;
let cardholder_name = format_name(transaction.cardholder_name()?);
let card_id = transaction.application_identifier()?.ident();
let mut pin = None;
while transaction.pw_status_bytes()?.err_count_pw1() > 0 && pin.is_none() {
transaction.reload_ard()?;
let attempts = transaction.pw_status_bytes()?.err_count_pw1();
let rpea = "Remaining PIN entry attempts";
let message = if cardholder_name.is_empty() {
format!("Unlock card {card_id}\n{rpea}: {attempts}\n\nPIN: ")
} else {
format!(
"Unlock card {card_id} ({cardholder_name})\n{rpea}: {attempts}\n\nPIN: "
)
};
let temp_pin = prompt_validated_passphrase(
&mut *prompt_handler,
&message,
3,
pin_validator.to_fn(),
)?;
let verification_status = transaction.verify_user_pin(temp_pin.as_str().trim());
match verification_status {
#[allow(clippy::ignored_unit_patterns)]
Ok(_) => {
pin.replace(temp_pin);
}
// NOTE: This should not be hit, because of the above validator.
Err(CardError::CardStatus(
StatusBytes::IncorrectParametersCommandDataField,
)) => {
prompt_handler.prompt_message(keyfork_prompt::Message::Text(
"Invalid PIN length entered.".to_string(),
))?;
}
Err(_) => {}
}
}
let mut signer_card = transaction.to_signing_card(pin.expect("valid PIN").as_str())?;
// NOTE: Can't use a PromptHandler to prompt a message as it doesn't provide a way to
// cancel a prompt when in terminal mode. Just eprintln to stderr.
//
// We shouldn't be writing with a PromptHandler, so the terminal should be reset.
let mut signer =
signer_card.signer(&|| eprintln!("Touch confirmation needed for signing"))?;
let signature = builder.clone().sign_message(&mut signer, &unhashed)?;
let signature = Packet::from(signature);
let mut armored_signature = vec![];
let mut writer = Writer::new(&mut armored_signature, Kind::Signature)?;
signature.serialize(&mut writer)?;
writer.finalize()?;
self.signatures.push(String::from_utf8(armored_signature)?);
}
Ok(())
}
/// Verify the keychain and certificates using either a Key ID or an OpenPGP card.
///
/// # Errors
///
/// The method may error if no certificate could be verified or if any singatures are invalid.
pub fn verify_signatures(
&self,
certs: &[Cert],
verification_policy: &PayloadVerification,
fingerprint: Option<Fingerprint>,
) -> Result<&serde_json::Value, Box<dyn std::error::Error>> {
let policy = openpgp::policy::StandardPolicy::new();
let validated_cert = find_matching_certificate(fingerprint, certs, &policy)?;
let (certs, invalid_certs) = validate_cross_signed_certs(&validated_cert, certs, &policy)?;
if !invalid_certs.is_empty() {
return Err(BaseError::UntrustedCertificates(
validated_cert.fingerprint(),
invalid_certs.iter().map(Cert::fingerprint).collect(),
))?;
}
let hashed = hash(serde_json::to_value(self)?)?;
let PayloadVerification {
mut threshold,
error_on_invalid,
error_on_missing_key,
one_each,
} = *verification_policy;
let mut matches = 0;
if one_each {
threshold = certs.len() as u8;
}
for signature in &self.signatures {
let packet = Packet::from_bytes(signature.as_bytes())?;
let Packet::Signature(signature) = packet else {
panic!("bad packet found: {}", packet.tag());
};
let mut signature_matched = false;
for issuer in signature.get_issuers() {
for cert in &certs {
match cert
.with_policy(&policy, None)?
.keys()
.alive()
.for_signing()
.key_handle(issuer.clone())
.next()
.map(|signing_key| signature.verify_hash(&signing_key, hashed.clone()))
{
Some(Ok(())) => {
// key found, signature matched
signature_matched = true;
}
Some(Err(e)) => {
if error_on_invalid {
return Err(e)?;
}
}
None => {
// key not found, but we have more certs to go through
}
}
}
}
if signature_matched {
matches += 1;
} else if error_on_missing_key {
return Err(
BaseError::NoPublicKeyMatchedSignature.with_policy(verification_policy)
)?;
}
}
if matches < threshold {
return Err(
BaseError::NotEnoughSignatures(matches, threshold).with_policy(verification_policy)
)?;
}
Ok(&self.values)
}
pub fn into_values(self) -> (String, String, serde_json::Value) {
let Payload {
workflow, values, ..
} = self;
let [module, workflow] = workflow;
(module, workflow, values)
}
}
fn find_matching_certificate(
fingerprint: Option<Fingerprint>,
certs: &[Cert],
policy: &sequoia_openpgp::policy::StandardPolicy<'_>,
) -> Result<Cert, Box<dyn std::error::Error>> {
if let Some(fingerprint) = fingerprint {
Ok(certs
.iter()
.find(|cert| cert.fingerprint() == fingerprint)
.ok_or(BaseError::FingerprintNotFound(fingerprint))?
.clone())
} else {
let mut any_smartcard = false;
for backend in card_backend_pcsc::PcscBackend::cards(None)? {
any_smartcard = true;
let mut card = Card::<Open>::new(backend?)?;
let mut transaction = card.transaction()?;
let signing_fingerprint = transaction
.fingerprint(openpgp_card::KeyType::Signing)?
.expect("smartcard signing key is unavailable");
for cert in certs {
let valid_cert = cert.with_policy(policy, None)?;
// NOTE: We must verify that it is for_signing because back signatures
// mean that the signing key verifies the certificate.
//
// We don't want a certificate to be able to adopt, for example, an encryption key
// because that means there is no back signature and the encryption key can be
// adopted onto a malicious certificate.
for key in valid_cert.keys().alive().for_signing() {
let fpr = key.fingerprint();
if fpr.as_bytes() == signing_fingerprint.as_bytes() {
return Ok(cert.clone());
}
}
}
}
if any_smartcard {
Err(BaseError::NoCertMatchedSmartcard.into())
} else {
Err(BaseError::NoSmartcard.into())
}
}
}
/// Validate that `certs` are signed by `validated_cert`, either by a signature directly upon the
/// primary key of that certificate, or a signature on a user ID of the certificate.
///
/// Returns a list of trusted certs and a list of untrusted certs.
fn validate_cross_signed_certs(
validated_cert: &Cert,
certs: &[Cert],
policy: &sequoia_openpgp::policy::StandardPolicy,
) -> Result<(Vec<Cert>, Vec<Cert>), Box<dyn std::error::Error>> {
let our_pkey = validated_cert.primary_key();
let mut verified_certs = vec![validated_cert.clone()];
let mut unverified_certs = vec![];
for cert in certs
.iter()
.filter(|cert| cert.fingerprint() != validated_cert.fingerprint())
{
let mut has_valid_userid_signature = false;
let cert_pkey = cert.primary_key();
// check signatures on User IDs
let userids = cert
.userids()
.map(|ua| (ua.certifications(), ua.userid().clone()));
for (signatures, userid) in userids {
for signature in signatures {
if signature
.verify_userid_binding(&our_pkey, &*cert_pkey, &userid)
.is_ok()
{
has_valid_userid_signature = true;
}
}
}
// check signatures on the primary key itself
let has_valid_direct_signature = cert_pkey
.active_certifications_by_key(policy, None, &***our_pkey.role_as_unspecified())
.next()
.is_some();
if has_valid_userid_signature || has_valid_direct_signature {
verified_certs.push(cert.clone());
} else {
unverified_certs.push(cert.clone());
}
}
Ok((verified_certs, unverified_certs))
}

View File

@ -1,117 +0,0 @@
use clap::Parser;
use miniquorum::{Payload, PayloadVerification};
use sequoia_openpgp::Fingerprint;
use std::{fs::File, path::PathBuf};
#[derive(clap::Parser)]
/// An Icepick-specific subset of the Quorum decision-making system.
enum MiniQuorum {
/// Verify signatures on an Icepick Payload file.
VerifySignatures {
/// The file containing OpenPGP Certificates used for verifying signatures.
keyring_file: PathBuf,
/// The file provided as input.
///
/// If no file is passed, standard input is used.
input_file: Option<PathBuf>,
/// An OpenPGP Fingerprint to use in place of on-smartcard certificate detection.
///
/// This functionality is only recommended if verifying a payload without the physical
/// presence of any signer, and builds a web of trust from the signer fingerprint provided.
#[arg(long)]
fingerprint: Option<Fingerprint>,
/// The file to write the resulting payload to, if verification is successful.
#[arg(long)]
output_file: Option<PathBuf>,
},
/// Add a signature to an Icepick Payload file.
AddSignature {
/// The file to use as input.
///
/// If no file is provided, standard input is used. If a file is provided and no output
/// file is provided, it will be used in-place as the output file with the additional
/// signature added.
input_file: Option<PathBuf>,
/// The file to use as output.
///
/// If no file is provided, but an input file is provided, the input file is used. If no
/// input file is provided, standard output is used.
#[arg(long)]
output_file: Option<PathBuf>,
},
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
match MiniQuorum::parse() {
MiniQuorum::VerifySignatures {
keyring_file,
input_file,
fingerprint,
output_file,
} => {
assert_ne!(
input_file, output_file,
"output is verified data; not overwriting signed input data"
);
let (payload, certs) = match input_file {
Some(input_file) => Payload::load(&input_file, &keyring_file)?,
None => {
let stdin = std::io::stdin();
let keyring_file = File::open(&keyring_file)?;
Payload::from_readers(stdin, keyring_file)?
}
};
let policy = PayloadVerification::new().with_threshold(certs.len().try_into()?);
payload.verify_signatures(&certs, &policy, fingerprint)?;
if let Some(output_file) = output_file {
let file = File::create(output_file)?;
serde_json::to_writer_pretty(file, &payload)?;
} else {
let stdout = std::io::stdout();
serde_json::to_writer_pretty(stdout, &payload)?;
}
}
MiniQuorum::AddSignature {
input_file,
output_file,
} => {
let mut payload: Payload = match &input_file {
Some(input_file) => {
let input_file = File::open(input_file)?;
serde_json::from_reader(input_file)?
}
None => {
let stdin = std::io::stdin();
serde_json::from_reader(stdin)?
}
};
payload.add_signature()?;
if let Some(output_file) = output_file {
// write to output
let file = File::create(output_file)?;
serde_json::to_writer_pretty(file, &payload)?;
} else if let Some(input_file) = input_file {
// write to tempfile, move to input_file
let output_file = input_file.with_extension("tmp");
let mut file = File::create_new(&output_file)?;
serde_json::to_writer_pretty(&mut file, &payload)?;
drop(file);
std::fs::copy(&output_file, input_file)?;
std::fs::remove_file(output_file)?;
} else {
// write to standard output?
println!("{}", serde_json::to_string_pretty(&payload)?);
}
}
}
Ok(())
}

View File

@ -1,26 +0,0 @@
[package]
name = "spacemesh-api-client"
version = "0.1.0"
edition = "2021"
publish = ["distrust"]
[dependencies]
futures = "0.3"
progenitor-client = { git = "https://github.com/geoffreygarrett/progenitor", rev = "8726ea91eb19f92e1357f1ceeeab507477dcfeb6" }
reqwest = { version = "0.11", features = ["json", "stream"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
chrono = { version = "0.4", features = ["serde"] }
base64 = "0.22.1"
smex = { version = "0.1.0", registry = "distrust" }
[build-dependencies]
prettyplease = "0.2.22"
progenitor = { git = "https://github.com/geoffreygarrett/progenitor", rev = "8726ea91eb19f92e1357f1ceeeab507477dcfeb6" }
serde_json = "1.0"
syn = "2.0"
[dev-dependencies]
base64 = "0.22.1"
smex = { version = "0.1.0", registry = "distrust" }
tokio = { version = "1.43.0", features = ["macros", "net", "rt", "test-util"] }

View File

@ -1,22 +0,0 @@
/*
curl -X 'GET' \
'https://converter.swagger.io/api/convert?url=https%3A%2F%2Fmainnet-api-docs.spacemesh.network%2Fv1.7.12%2Fapi.swagger.json' \
-H 'accept: application/json'
*/
fn main() {
let src = "openapi.json";
println!("cargo:rerun-if-changed={}", src);
let file = std::fs::File::open(src).unwrap();
let spec = serde_json::from_reader(file).unwrap();
let mut generator = progenitor::Generator::default();
let tokens = generator.generate_tokens(&spec).unwrap();
let ast = syn::parse2(tokens).unwrap();
let content = prettyplease::unparse(&ast);
let mut out_file = std::path::Path::new(&std::env::var("OUT_DIR").unwrap()).to_path_buf();
out_file.push("codegen.rs");
std::fs::write(out_file, content).unwrap();
}

File diff suppressed because one or more lines are too long

View File

@ -1,42 +0,0 @@
#![allow(warnings, unused)]
include!(concat!(env!("OUT_DIR"), "/codegen.rs"));
// NOTE: The RPC API requires base64-encoded transaction IDs rather than hex-encoded.
// That was confusing, after all their branding is `0x` based.
pub fn encode_transaction_id(txid: impl AsRef<str>) -> Result<String, smex::DecodeError> {
use base64::prelude::*;
let tx = smex::decode(txid)?;
Ok(BASE64_STANDARD.encode(tx))
}
#[cfg(test)]
mod tests {
use super::*;
use base64::prelude::*;
#[tokio::test]
async fn it_works() {
let client = Client::new("https://mainnet-api.spacemesh.network");
let txid = "638442a2033f20b5a7280b9a4f2bfc73022f6e7ec64b1497b85335444381d99d";
let txid = smex::decode(txid).unwrap();
let txid = BASE64_STANDARD.encode(txid);
let result = client
.transaction_service_list(&types::Spacemeshv2alpha1TransactionRequest {
txid: vec![txid],
limit: Some(100.to_string()),
..Default::default()
})
.await
.unwrap()
.into_inner();
let result = match result {
types::GooglerpcStatusOrSpacemeshv2alpha1TransactionList::GooglerpcStatus(googlerpc_status) => panic!("{:?}", googlerpc_status.message),
types::GooglerpcStatusOrSpacemeshv2alpha1TransactionList::Spacemeshv2alpha1TransactionList(transaction_list) => {
transaction_list
},
};
}
}

View File

@ -1,12 +0,0 @@
[package]
name = "spacemesh-codec"
version = "0.1.0"
edition = "2021"
publish = ["distrust"]
[dependencies]
parity-scale-codec = { version = "3.6.12", features = ["derive"] }
[dev-dependencies]
base64 = "0.22.1"
bech32 = "0.11.0"

View File

@ -1,530 +0,0 @@
//! Spacemesh transaction encoding and decoding.
//! Based loosely on: <https://github.com/spacemeshos/sm-codec/>.
//!
//! # Encoding Transactions
//!
//! ```rust
//! let principal = [0u8; 24];
//! let destination = [1u8; 24];
//!
//! let single_sig_spend = Spend {
//! header: TxHeader {
//! principal,
//! },
//! payload: SpendPayload {
//! nonce: Compact(2),
//! gas_price: Compact(1),
//! arguments: SpendArguments {
//! destination,
//! amount: Compact(100000),
//! },
//! },
//! // unsigned transaction
//! signature: [0; 64],
//! };
//! ```
//!
//! # Decoding Transactions
//!
//! Transactions can be decoded to bytes using the [`base64`][base64] crate. Using the Spacemesh
//! client, the transaction should also include `template` and `method` values. With those values,
//! [`tx_types::decode_by_address_and_method()`] can be used to attempt to parse the transaction.
//!
//! ```rust
//! use base64::prelude::*;
//!
//! let encoded_tx = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAIBAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAYIaBgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
//! let raw_tx = BASE64_STANDARD.decode(encoded_tx).unwrap();
//! let spend = tx_types::single_signature::Spend::decode(&mut &raw_tx[..]).unwrap();
//! ```
//!
//! [base64]: https://docs.rs/base64/latest/base64/
pub use parity_scale_codec::{Compact, Decode, Encode};
pub mod constants {
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/codecs/constants.ts
/// The length of an address.
pub const ADDRESS_BYTES_LENGTH: usize = 24;
}
pub mod core {
use super::*;
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/codecs/core.ts
// NOTE: Encoding an array doesn't encode length, matching the same functionality
// as Bytes in scale-ts.
pub type Address = [u8; constants::ADDRESS_BYTES_LENGTH];
pub type PublicKey = [u8; 32];
pub type Nonce = Compact<u64>;
pub type GasPrice = Compact<u64>;
}
pub mod signatures {
use super::*;
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/codecs/signatures.ts
pub type SingleSig = [u8; 64];
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct MultiSigPart {
pub r#ref: Compact<u8>,
pub sig: SingleSig,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct MultiSig {
pub parts: Vec<MultiSigPart>,
}
impl Encode for MultiSig {
fn size_hint(&self) -> usize {
self.parts.len() * std::mem::size_of::<SingleSig>()
}
fn encode(&self) -> Vec<u8> {
// NOTE: No inline length is included.
let mut r = Vec::with_capacity(self.size_hint());
for sig in &self.parts {
sig.encode_to(&mut r);
}
r
}
}
impl Decode for MultiSig {
fn decode<I: parity_scale_codec::Input>(
input: &mut I,
) -> Result<Self, parity_scale_codec::Error> {
let mut parts = vec![];
// NOTE: We can't rely on the length of the input. It may not be available.
// Unfortunately, we also don't have enough context to know if the reason it can't
// decode is because we ran out of input, or because there was a format error.
while let Ok(part) = MultiSigPart::decode(input) {
parts.push(part);
}
Ok(Self { parts })
}
}
}
pub mod tx {
use super::*;
pub trait TransactionMethod {
fn method_selector() -> u8;
}
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/codecs/tx.ts
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TxHeader<const M: u8> {
// should always be 0
// pub transaction_type: Compact<u8>,
pub principal: core::Address,
// covered by const M
// pub method_selector: Compact<u8>,
}
impl<const M: u8> Encode for TxHeader<M> {
fn encode(&self) -> Vec<u8> {
let mut r = Vec::with_capacity(self.size_hint());
let transaction_type = Compact(0u8);
transaction_type.encode_to(&mut r);
self.principal.encode_to(&mut r);
let method_selector = Compact(M);
method_selector.encode_to(&mut r);
r
}
}
impl<const M: u8> Decode for TxHeader<M> {
fn decode<I: parity_scale_codec::Input>(
input: &mut I,
) -> Result<Self, parity_scale_codec::Error> {
let transaction_type = Compact::<u8>::decode(input)?;
if transaction_type.0 != 0 {
return Err("transaction_type != 0".into());
}
let principal = core::Address::decode(input)?;
let method_selector = Compact::<u8>::decode(input)?;
if method_selector.0 != M {
return Err("method_selector != M".into());
}
Ok(Self {
principal,
})
}
}
// NOTE: This is used in place of `withTemplateAddress()`.
// The original source implementation placed `template_address` as the last field,
// but I don't think that's correct based on the implementation of `withTemplateAddress()`.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SpawnTxHeader<const M: u8> {
pub template_address: core::Address,
// should always be 0
// pub transaction_type: Compact<u8>,
pub principal: core::Address,
// covered by const M
// pub method_selector: Compact<u8>,
}
impl<const M: u8> Encode for SpawnTxHeader<M> {
fn encode(&self) -> Vec<u8> {
let mut r = Vec::with_capacity(self.size_hint());
self.template_address.encode_to(&mut r);
let transaction_type = Compact(0u8);
transaction_type.encode_to(&mut r);
self.principal.encode_to(&mut r);
let method_selector = Compact(M);
method_selector.encode_to(&mut r);
r
}
}
impl<const M: u8> Decode for SpawnTxHeader<M> {
fn decode<I: parity_scale_codec::Input>(
input: &mut I,
) -> Result<Self, parity_scale_codec::Error> {
let template_address = core::Address::decode(input)?;
let transaction_type = Compact::<u8>::decode(input)?;
if transaction_type.0 != 0 {
return Err("transaction_type != 0".into());
}
let principal = core::Address::decode(input)?;
let method_selector = Compact::<u8>::decode(input)?;
if method_selector.0 != M {
return Err("method_selector != M".into());
}
Ok(Self {
template_address,
principal,
})
}
}
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/transaction.ts
mod sealed {
use super::signatures;
pub trait Signature {}
impl Signature for signatures::SingleSig {}
impl Signature for signatures::MultiSig {}
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpawnTransaction<Payload, Signature: sealed::Signature, const M: u8> {
pub header: SpawnTxHeader<M>,
pub payload: Payload,
pub signature: Signature,
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct Transaction<Payload, Signature: sealed::Signature, const M: u8> {
pub header: TxHeader<M>,
pub payload: Payload,
pub signature: Signature,
}
}
pub mod tx_types {
use super::*;
pub type DecodeResult<T> = Option<Result<T, parity_scale_codec::Error>>;
pub mod common {
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/std/common.ts
use super::*;
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct TxPayload<Arguments> {
pub nonce: core::Nonce,
pub gas_price: core::GasPrice,
pub arguments: Arguments,
}
}
pub mod vault {
use super::*;
use common::TxPayload;
use signatures::SingleSig;
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/std/vault.ts
pub const VAULT_TEMPLATE_ADDRESS: core::Address = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4,
];
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpawnArguments {
pub owner: core::Address,
pub total_amount: Compact<u64>,
pub initial_unlock_amount: Compact<u64>,
pub vesting_start: Compact<u32>,
pub vesting_end: Compact<u32>,
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpendArguments {
pub destination: core::Address,
pub amount: Compact<u64>,
}
pub type SpawnPayload = TxPayload<SpawnArguments>;
pub type SpendPayload = TxPayload<SpendArguments>;
pub type Spawn = tx::SpawnTransaction<SpawnPayload, SingleSig, 0>;
pub type Spend = tx::Transaction<SpendPayload, SingleSig, 16>;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Method {
Spawn(Spawn),
Spend(Spend),
}
pub fn decode_by_method(method: u8, input: &[u8]) -> DecodeResult<Method> {
match method {
0 => Some(Spawn::decode(&mut &*input).map(Method::Spawn)),
16 => Some(Spend::decode(&mut &*input).map(Method::Spend)),
_ => None,
}
}
}
pub mod vesting {
use super::*;
use common::TxPayload;
use signatures::MultiSig;
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/std/vesting.ts
pub const VESTING_TEMPLATE_ADDRESS: core::Address = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3,
];
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpawnArguments {
pub required: Compact<u8>,
pub public_keys: Vec<core::PublicKey>,
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpendArguments {
pub destination: core::Address,
pub amount: Compact<u64>,
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct DrainVaultArguments {
pub vault: core::Address,
pub destination: core::Address,
pub amount: Compact<u64>,
}
pub type SpawnPayload = TxPayload<SpawnArguments>;
pub type SpendPayload = TxPayload<SpendArguments>;
pub type DrainVaultPayload = TxPayload<DrainVaultArguments>;
pub type Spawn = tx::SpawnTransaction<SpawnPayload, MultiSig, 0>;
pub type Spend = tx::Transaction<SpendPayload, MultiSig, 16>;
pub type DrainVault = tx::Transaction<DrainVaultPayload, MultiSig, 17>;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Method {
Spawn(Spawn),
Spend(Spend),
DrainVault(DrainVault),
}
pub fn decode_by_method(method: u8, input: &[u8]) -> DecodeResult<Method> {
match method {
0 => Some(Spawn::decode(&mut &*input).map(Method::Spawn)),
16 => Some(Spend::decode(&mut &*input).map(Method::Spend)),
17 => Some(DrainVault::decode(&mut &*input).map(Method::DrainVault)),
_ => None,
}
}
}
pub mod single_signature {
use super::*;
use common::TxPayload;
use signatures::SingleSig;
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/std/singlesig.ts
pub const SINGLE_SIG_TEMPLATE_ADDRESS: core::Address = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
];
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpawnArguments {
pub public_key: core::PublicKey,
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpendArguments {
pub destination: core::Address,
pub amount: Compact<u64>,
}
pub type SpawnPayload = TxPayload<SpawnArguments>;
pub type SpendPayload = TxPayload<SpendArguments>;
pub type Spawn = tx::SpawnTransaction<SpawnPayload, SingleSig, 0>;
pub type Spend = tx::Transaction<SpendPayload, SingleSig, 16>;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Method {
Spawn(Spawn),
Spend(Spend),
}
pub fn decode_by_method(method: u8, input: &[u8]) -> DecodeResult<Method> {
match method {
0 => Some(Spawn::decode(&mut &*input).map(Method::Spawn)),
16 => Some(Spend::decode(&mut &*input).map(Method::Spend)),
_ => None,
}
}
}
pub mod multi_signature {
use super::*;
use common::TxPayload;
use signatures::MultiSig;
// ref: https://github.com/spacemeshos/sm-codec/blob/master/src/std/singlesig.ts
pub const MULTI_SIG_TEMPLATE_ADDRESS: core::Address = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2,
];
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpawnArguments {
pub required: Compact<u8>,
pub public_key: Vec<core::PublicKey>,
}
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode)]
pub struct SpendArguments {
pub destination: core::Address,
pub amount: Compact<u64>,
}
pub type SpawnPayload = TxPayload<SpawnArguments>;
pub type SpendPayload = TxPayload<SpendArguments>;
pub type Spawn = tx::SpawnTransaction<SpawnPayload, MultiSig, 0>;
pub type Spend = tx::Transaction<SpendPayload, MultiSig, 16>;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Method {
Spawn(Spawn),
Spend(Spend),
}
pub fn decode_by_method(method: u8, input: &[u8]) -> DecodeResult<Method> {
match method {
0 => Some(Spawn::decode(&mut &*input).map(Method::Spawn)),
16 => Some(Spend::decode(&mut &*input).map(Method::Spend)),
_ => None,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ModuleMethod {
Vault(vault::Method),
Vesting(vesting::Method),
SingleSig(single_signature::Method),
MultiSig(multi_signature::Method),
}
#[rustfmt::skip]
pub fn decode_by_address_and_method(
address: core::Address,
method: u8,
input: &[u8],
) -> DecodeResult<ModuleMethod> {
match address {
vault::VAULT_TEMPLATE_ADDRESS =>
vault::decode_by_method(method, input)
.map(|method| method.map(ModuleMethod::Vault)),
vesting::VESTING_TEMPLATE_ADDRESS =>
vesting::decode_by_method(method, input)
.map(|method| method.map(ModuleMethod::Vesting)),
single_signature::SINGLE_SIG_TEMPLATE_ADDRESS => {
single_signature::decode_by_method(method, input)
.map(|method| method.map(ModuleMethod::SingleSig))
}
multi_signature::MULTI_SIG_TEMPLATE_ADDRESS => {
multi_signature::decode_by_method(method, input)
.map(|method| method.map(ModuleMethod::MultiSig))
}
_ => {
unimplemented!()
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
use base64::prelude::*;
use bech32::Bech32;
let (hrp, data) =
bech32::decode("sm1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqg56ypy7").unwrap();
assert_eq!(hrp.as_str(), "sm");
assert_eq!(
&data,
&tx_types::single_signature::SINGLE_SIG_TEMPLATE_ADDRESS
);
let encoded_tx = "AAAAAAAvqmgSN6hBGS16FVNfNDURojTRU0AQBAAAAABJThXbKEnjnty59ht5e/5EkjDK8AeANolPDOAiIHlzj7CIG60FzFRpuR/fLVRQsmzRbApYBryfg4RKcnZgmmWPywafADHyuVjkLNGup0gpvhnXAHICeSXveAs=";
let raw_tx = BASE64_STANDARD.decode(encoded_tx).unwrap();
let spend = tx_types::single_signature::Spend::decode(&mut &raw_tx[..]).unwrap();
let equivalence = spend.encode();
assert_eq!(raw_tx, equivalence);
let recipient_address =
bech32::encode::<Bech32>(hrp, &spend.payload.arguments.destination).unwrap();
assert_eq!(
recipient_address,
"sm1qqqqqqzffc2ak2zfuw0dew0krduhhljyjgcv4uqdt6nrd"
);
}
#[test]
fn recode() {
use tx::*;
use tx_types::single_signature::*;
let principal = [0u8; 24];
let single_sig_spend = Spend {
header: TxHeader {
principal,
},
payload: SpendPayload {
nonce: Compact(2),
gas_price: Compact(1),
arguments: SpendArguments {
destination: [1; 24],
amount: Compact(100000),
},
},
signature: [0; 64],
};
let encoded = single_sig_spend.encode();
let recoded = Spend::decode(&mut &*encoded).unwrap();
assert_eq!(single_sig_spend, recoded);
}
}

View File

@ -1,16 +0,0 @@
[package]
name = "spacemesh"
version = "0.1.0"
edition = "2021"
publish = ["distrust"]
[dependencies]
bech32 = "0.11.0"
spacemesh-api-client = { version = "0.1.0", path = "../api-client" }
spacemesh-codec = { version = "0.1.0", path = "../codec" }
[dev-dependencies]
base64 = "0.22.1"
bech32 = "0.11.0"
smex = { version = "0.1.0", registry = "distrust" }
tokio = { version = "1.43.0", features = ["net", "rt", "macros"] }

View File

@ -1,58 +0,0 @@
pub use spacemesh_api_client as client;
pub use spacemesh_api_client::Client;
pub use spacemesh_codec as codec;
pub use spacemesh_codec::tx_types as transaction;
pub mod wallet;
pub mod bech32 {
pub use bech32::*;
pub fn encode(hrp: Hrp, input: &[u8]) -> Result<String, EncodeError> {
bech32::encode::<Bech32>(hrp, input)
}
}
#[cfg(test)]
mod tests {
use super::*;
use base64::prelude::*;
use spacemesh_api_client::*;
#[tokio::test]
async fn decodes_live_transaction() {
let client = Client::new("https://mainnet-api.spacemesh.network");
let txid = "638442a2033f20b5a7280b9a4f2bfc73022f6e7ec64b1497b85335444381d99d";
let txid = smex::decode(txid).unwrap();
let txid = BASE64_STANDARD.encode(txid);
let result = client
.transaction_service_list(&types::Spacemeshv2alpha1TransactionRequest {
txid: vec![txid],
limit: Some(100.to_string()),
..Default::default()
})
.await
.unwrap()
.into_inner();
let mut result = match result {
types::GooglerpcStatusOrSpacemeshv2alpha1TransactionList::GooglerpcStatus(googlerpc_status) => panic!("{:?}", googlerpc_status.message),
types::GooglerpcStatusOrSpacemeshv2alpha1TransactionList::Spacemeshv2alpha1TransactionList(transaction_list) => {
transaction_list
},
};
let tx = result.transactions.pop().unwrap().tx.unwrap();
let (_hrp, address) = bech32::decode(&tx.template.unwrap()).unwrap();
let tx_raw = BASE64_STANDARD.decode(tx.raw.unwrap()).unwrap();
let decoded = transaction::decode_by_address_and_method(
address.try_into().unwrap(),
tx.method.unwrap() as u8,
&tx_raw,
)
.unwrap()
.unwrap();
drop(decoded);
}
}

View File

@ -1,77 +0,0 @@
//! Spacemesh wallet management.
pub use crate::codec::core::Address;
use crate::codec::tx::*;
use crate::codec::Compact;
use crate::transaction::single_signature;
const ADDRESS_RESERVED: usize = 4;
mod sealed {
pub trait Sealed {}
}
pub trait AsAddress: sealed::Sealed {
fn as_address(&self) -> Address;
}
impl sealed::Sealed for Address {}
impl AsAddress for Address {
#[inline(always)]
fn as_address(&self) -> Address {
*self
}
}
impl sealed::Sealed for [u8; 32] {}
impl AsAddress for [u8; 32] {
#[inline(always)]
fn as_address(&self) -> Address {
let mut output = [0u8; std::mem::size_of::<Address>()];
const START: usize = 32 - std::mem::size_of::<Address>() + ADDRESS_RESERVED;
output[ADDRESS_RESERVED..].copy_from_slice(
&self[START..],
);
output
}
}
pub fn spawn(principal: [u8; 32], nonce: u64, gas_price: u64) -> single_signature::Spawn {
single_signature::Spawn {
header: SpawnTxHeader {
principal: principal.as_address(),
template_address: single_signature::SINGLE_SIG_TEMPLATE_ADDRESS,
},
payload: single_signature::SpawnPayload {
nonce: Compact(nonce),
gas_price: Compact(gas_price),
arguments: single_signature::SpawnArguments {
public_key: principal,
},
},
signature: [0u8; 64],
}
}
pub fn transfer(
principal: impl AsAddress,
recipient: impl AsAddress,
amount: u64,
nonce: u64,
gas_price: u64,
) -> single_signature::Spend {
single_signature::Spend {
header: TxHeader {
principal: principal.as_address(),
},
payload: single_signature::SpendPayload {
nonce: Compact(nonce),
gas_price: Compact(gas_price),
arguments: single_signature::SpendArguments {
destination: recipient.as_address(),
amount: Compact(amount),
},
},
signature: [0u8; 64],
}
}

View File

@ -4,9 +4,6 @@ read -r from_address
printf "%s" "Public key of the recipient address: "
read -r to_address
printf "%s" "Public ey of the nonce account: "
read -r nonce_address
printf "%s" "Name of the token to transfer: "
read -r token_name
@ -24,4 +21,4 @@ cat <<EOF > /data/input.json
}
EOF
icepick workflow sol broadcast --cluster devnet --nonce-address "$nonce_address"
icepick workflow sol broadcast --cluster devnet

View File

@ -3,12 +3,248 @@ name = "sol"
derivation_prefix = "m/44'/501'/0'"
algorithm = "Ed25519"
[[module]]
name = "cosmos"
derivation_prefix = "m/44'/118'/0'"
algorithm = "Secp256k1"
[[module.workflow]]
# The name of the workflow, which can be called by:
# `icepick workflow sol transfer-token`
name = "transfer-token"
[[module]]
name = "spacemesh"
derivation_prefix = "m/44'/540'/0'/0'"
algorithm = "Ed25519"
# These values are used as inputs for other workflows, acquired from the CLI.
# These values can only be strings, but other values can be any value that can
# be serialized by serde_json::Value.
# These values can also be loaded using "internal-load-file", using some form
# of later-defined signature validation.
inputs = ["from_address", "to_address", "token_name", "token_amount"]
# Get the token address and token decimals for the given token
[[module.workflow.step]]
type = "sol-get-token-info"
# The key is the key that is passed to the program in the
# `values` field. The value is the item in storage. In this case,
# we read a `token-name` from our input, but the operation expects `token`.
inputs = { token = "token_name" }
# Because these two fields are currently unused in our storage, we can grab
# them from the outputs of our module. The key is the key of the output value
# we want to store, and the value is the name to be assigned in storage.
outputs = { token_address = "token_address", token_decimals = "token_decimals" }
# Load the Blockhash from the SD card
[[module.workflow.step]]
type = "internal-load-file"
# Pre-defined values to be passed to the module
values = { filename = "blockhash.json" }
# This value is marked to be saved in-memory, and can be used as an input for
# later steps.
outputs = { blockhash = "blockhash" }
[[module.workflow.step]]
# Generate an unsigned Transaction
# This step MUST run immediately before sol-sign, as in the current version of
# Icepick, keys are only held in memory in-between a single module invocation.
type = "sol-transfer-token"
# If using a lot of inputs, it may be best to use a non-inline table.
# Non-inline tables _must_ be the last step, as otherwise, `outputs` for
# example would be considered a member of `inputs`. In this case, we use a
# non-inline table for `outputs` even though it would fit on one line, to avoid
# the ambiguity.
[module.workflow.step.inputs]
amount = "token_amount"
token_address = "token_address"
decimals = "token_decimals"
to_address = "to_address"
from_address = "from_address"
[module.workflow.step.outputs]
instructions = "instructions"
derivation_accounts = "derivation_accounts"
[[module.workflow.step]]
type = "sol-compile"
[module.workflow.step.inputs]
instructions = "instructions"
derivation_accounts = "derivation_accounts"
blockhash = "blockhash"
[module.workflow.step.outputs]
transaction = "unsigned_transaction"
# Sign the transaction
[[module.workflow.step]]
type = "sol-sign"
[module.workflow.step.inputs]
transaction = "unsigned_transaction"
blockhash = "blockhash"
[module.workflow.step.outputs]
transaction = "signed_transaction"
# Write the signed transaction to a file
[[module.workflow.step]]
type = "internal-save-file"
# We are using a static filename here, so we use `values` instead of `inputs`.
values = { filename = "transaction.json" }
# All fields in both `inputs` and `values`, other than `filename`, will be
# persisted to the file. In this case, the `transaction` field of the file will
# contain the signed transaction.
inputs = { transaction = "signed_transaction" }
[[module.workflow]]
name = "broadcast"
inputs = ["cluster"]
[[module.workflow.step]]
type = "sol-get-blockhash"
inputs = { cluster = "cluster" }
outputs = { blockhash = "blockhash" }
[[module.workflow.step]]
type = "internal-save-file"
values = { filename = "blockhash.json" }
inputs = { blockhash = "blockhash" }
[[module.workflow.step]]
type = "internal-load-file"
values = { filename = "transaction.json" }
outputs = { transaction = "transaction" }
[[module.workflow.step]]
type = "sol-broadcast"
inputs = { cluster = "cluster", transaction = "transaction" }
outputs = { status = "status", url = "url", error = "error" }
[[module.workflow]]
name = "generate-nonce-account"
inputs = ["cluster", "authorization_address"]
[[module.workflow.step]]
type = "sol-generate-wallet"
[[module.workflow.step]]
type = "sol-get-wallet-address"
outputs = { pubkey = "wallet_pubkey" }
[[module.workflow.step]]
type = "sol-await-funds"
inputs = { address = "wallet_pubkey", cluster = "cluster" }
# enough to cover two signatures and the 1_500_000 approx. rent fee
values = { lamports = "1510000" }
[[module.workflow.step]]
type = "sol-get-blockhash"
inputs = { cluster = "cluster" }
outputs = { blockhash = "blockhash" }
[[module.workflow.step]]
type = "sol-create-nonce-account-and-signing-key"
[module.workflow.step.inputs]
from_address = "wallet_pubkey"
authorization_address = "authorization_address"
[module.workflow.step.outputs]
transaction = "unsigned_transaction"
nonce_pubkey = "nonce_pubkey"
nonce_privkey = "private_keys"
[[module.workflow.step]]
type = "sol-sign"
[module.workflow.step.inputs]
blockhash = "blockhash"
signing_keys = "private_keys"
transaction = "unsigned_transaction"
[module.workflow.step.outputs]
transaction = "signed_transaction"
[[module.workflow.step]]
type = "sol-broadcast"
inputs = { cluster = "cluster", transaction = "signed_transaction" }
outputs = { status = "status", url = "url" }
[[module.workflow.step]]
type = "internal-cat"
inputs = { status = "status", url = "url", nonce_account = "nonce_pubkey" }
outputs = { status = "status", url = "url", nonce_account = "nonce_account" }
[[module.workflow]]
name = "transfer-online-nonced"
inputs = ["nonce_address", "to_address", "from_address", "amount", "cluster"]
[[module.workflow.step]]
type = "sol-get-nonce-account-data"
inputs = { nonce_address = "nonce_address", cluster = "cluster" }
outputs = { authority = "nonce_authority", durable_nonce = "nonce" }
[[module.workflow.step]]
type = "sol-transfer"
inputs = { from_address = "from_address", to_address = "to_address", amount = "amount" }
outputs = { instructions = "instructions", derivation_accounts = "derivation_accounts" }
[[module.workflow.step]]
type = "sol-compile"
[module.workflow.step.inputs]
instructions = "instructions"
derivation_accounts = "derivation_accounts"
nonce_data = "nonce"
nonce_address = "nonce_address"
nonce_authority = "from_address"
[module.workflow.step.outputs]
transaction = "unsigned_transaction"
[[module.workflow.step]]
type = "sol-sign"
inputs = { blockhash = "nonce", transaction = "unsigned_transaction" }
outputs = { transaction = "signed_transaction" }
[[module.workflow.step]]
type = "sol-broadcast"
inputs = { cluster = "cluster", transaction = "signed_transaction" }
outputs = { status = "status", url = "url" }
[[module.workflow]]
name = "transfer-online"
inputs = ["to_address", "from_address", "amount", "cluster"]
[[module.workflow.step]]
type = "sol-get-blockhash"
inputs = { cluster = "cluster" }
outputs = { blockhash = "blockhash" }
[[module.workflow.step]]
type = "sol-transfer"
inputs = { from_address = "from_address", to_address = "to_address", amount = "amount" }
outputs = { instructions = "instructions", derivation_accounts = "derivation_accounts" }
[[module.workflow.step]]
type = "sol-compile"
[module.workflow.step.inputs]
instructions = "instructions"
derivation_accounts = "derivation_accounts"
blockhash = "blockhash"
[module.workflow.step.outputs]
transaction = "unsigned_transaction"
[[module.workflow.step]]
type = "sol-sign"
inputs = { blockhash = "blockhash", transaction = "unsigned_transaction" }
outputs = { transaction = "signed_transaction" }
[[module.workflow.step]]
type = "sol-broadcast"
inputs = { cluster = "cluster", transaction = "signed_transaction" }
outputs = { status = "status", url = "url" }

View File

@ -1,34 +0,0 @@
mnemonics:
keyfork: ENC[AES256_GCM,data:kz2vAo1XMCylVY6WtDfZ9Z0xKvccLRrOvfP2x0IJtJkRu3HmShTEzPlrTfRXrKcuxLqqJlxOnGPR7/Y7bPhRvH/nRj59Lz1SLocVl8UVq9YXsIpgymLJ0Hp2I6XUBuItOhGonvc61iAe7cXFTAO+T2VUMK0Tf40xoJcT2eBC9qOjkC5xOrHTa+FBDFcvQdHcMobm+y7Nv1BzpzbODaA=,iv:m3p+sAgZjQReM3YAld6n1uKppkQSn51IgQGsxlYHnn4=,tag:xrG7WLr9w4zE45TiHX6a8w==,type:str]
solana: ENC[AES256_GCM,data:5/OKpwkZT+Vf6AvTiVj7zafVoqiqkKwLRLwjIHA6MGbei0ssCWqxM8QAtka+BBNGGhe5SUTlr/nAqGfoiP0t6fwUyjxUnOgu,iv:8Ctui1cO/RCZAdtfjiCnqvYyINdOcMHZfIZD0nGj2Kg=,tag:5ASiLG+hehhCYwdJ+1MZFg==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age: []
lastmodified: "2025-01-03T23:53:42Z"
mac: ENC[AES256_GCM,data:/WYbQmisG9jvUKvcmMwQWop0X3EbLbCurUDnoMoOKJ7XxKRpGOKh/GkqqSFgMzpC8A6X9Cmjqo4gatiYBBGjDi5PIk+9fGvUE0ZSc4p5p5+0RLu7KyxYeRtsmhEjUYJllVi9aDLJT4x+GXta91uTWZFUWExcZ1wJHh42wSFsbo4=,iv:ZLSH09zdyeDom5koxrS5KBCv2xd3cCEkJO4/hAmzUPM=,tag:AGtJTuWUEslY+uD7OFCM/g==,type:str]
pgp:
- created_at: "2025-01-03T23:53:41Z"
enc: |
-----BEGIN PGP MESSAGE-----
hQIMAw95Vf08z8oUAQ/8CC594sGEYJLHzFZy9UsExxy7CQl2q1SKQA8frZCU1cBu
CyIex99UgQzKTSUqttlz5hxqfyodvpoRfBiZUOcyfOgVgTPtDJ9UfByMbsMc0wy0
q8hErtNYhBmzWRway4xoAThJUrfi6jXl/m1doFVH6Ug0Q9qi56Eo8DYaUtsE+NFU
HjHslQpMLWm3uf/i2mQhftmwE00tWTVmBfBtuAycj5jLc3AJAveNvB5jK1O22c9N
PHhWeHQB6K3dQfTLS1O549oSfGTfrXXxq4cHYT9BZNHDi0T4/tH1xHwmLHOwnUiZ
i0tQ8CTYL8eALyKxj/BQQxbLXKpmor7Yli1QH1UWGw5AddvVqIz1zIyukHN/AGN7
E475zcvkc2uLPBwnZ3JS3n7e1X9TCa/iZlW/msEqmkLeh6eW47t8/p13yj0WnkCD
1SqA6qFEIcH8TaWqC03vLZG9ue2gSZ11db+3ZeGzqykUAG/4NR8ncD+qdhRbCZtp
ZPASpfZnByweyGVrnfMgR/sL+i8/C7KgCqj8pUOOS5Z5Av8DNMpNushPndhdHJDU
XAzNe2gu5StPvqqlH9wONvxiYJSmNy/dWnnvgwozvm9aPPCboYjmO9fwxsy0Zl+x
20Bb8G5nl6C6ZvToztzxKPzToxaX1x2MFwovqnHT2GACtZ6/tAmMjg3oCFd+k/PS
XgHFcFzyleUy9LF8Yb7DJcEDe3Tue2wvvY8XlNsIYeMnpfJ/TCq9Grzho1/w31uX
swHv2T4SnwFnoBQoXk8cSOMqrWK3XyWi0RI9X16m+rTGXZ13I8hggi/ne8QbMsI=
=szJ5
-----END PGP MESSAGE-----
fp: 8E401478A3FBEF72
unencrypted_suffix: _unencrypted
version: 3.7.3