Compare commits

...

4 Commits

15 changed files with 1039 additions and 363 deletions

53
Cargo.lock generated
View File

@ -1659,12 +1659,14 @@ version = "0.1.0"
dependencies = [
"clap",
"icepick-module",
"icepick-workflow",
"keyfork-derive-util",
"keyforkd-client",
"keyforkd-models",
"serde",
"serde_json",
"thiserror 2.0.9",
"serde_yaml",
"thiserror 2.0.11",
"toml 0.8.19",
]
@ -1677,7 +1679,7 @@ dependencies = [
"icepick-module",
"serde",
"serde_json",
"thiserror 2.0.9",
"thiserror 2.0.11",
"tokio",
]
@ -1688,7 +1690,7 @@ dependencies = [
"icepick-module",
"serde",
"serde_json",
"thiserror 2.0.9",
"thiserror 2.0.11",
]
[[package]]
@ -1720,7 +1722,17 @@ dependencies = [
"spl-associated-token-account 6.0.0",
"spl-token 7.0.0",
"spl-token-2022 6.0.0",
"thiserror 2.0.9",
"thiserror 2.0.11",
]
[[package]]
name = "icepick-workflow"
version = "0.1.0"
dependencies = [
"keyfork-derive-util",
"serde",
"serde_json",
"thiserror 2.0.11",
]
[[package]]
@ -1993,9 +2005,9 @@ checksum = "fb5eae1e7471415b59f852ccb43b7858f0650a5d158ccbfb1d39088d0881f582"
[[package]]
name = "keyfork-derive-util"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://git.distrust.co/public/_cargo-index.git"
checksum = "12df1b3dbe1ac978fa53d86889156da010e290edb60ec308064906696d6a85b7"
checksum = "9205ca562716366941d69b16f0610b570811fe428b884879efbe68897d57edbc"
dependencies = [
"digest 0.10.7",
"ed25519-dalek 2.1.1",
@ -3095,6 +3107,19 @@ dependencies = [
"syn 2.0.92",
]
[[package]]
name = "serde_yaml"
version = "0.9.34+deprecated"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
dependencies = [
"indexmap",
"itoa",
"ryu",
"serde",
"unsafe-libyaml",
]
[[package]]
name = "sha2"
version = "0.9.9"
@ -4958,11 +4983,11 @@ dependencies = [
[[package]]
name = "thiserror"
version = "2.0.9"
version = "2.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc"
checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc"
dependencies = [
"thiserror-impl 2.0.9",
"thiserror-impl 2.0.11",
]
[[package]]
@ -4978,9 +5003,9 @@ dependencies = [
[[package]]
name = "thiserror-impl"
version = "2.0.9"
version = "2.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4"
checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2"
dependencies = [
"proc-macro2",
"quote",
@ -5205,6 +5230,12 @@ dependencies = [
"void",
]
[[package]]
name = "unsafe-libyaml"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
[[package]]
name = "untrusted"
version = "0.9.0"

View File

@ -3,9 +3,11 @@
resolver = "2"
members = [
"crates/icepick",
"crates/icepick-workflow",
"crates/icepick-module",
"crates/builtins/icepick-internal",
"crates/by-chain/icepick-solana", "crates/by-chain/icepick-cosmos",
"crates/by-chain/icepick-solana",
"crates/by-chain/icepick-cosmos",
]
[workspace.dependencies]

View File

@ -6,7 +6,7 @@ use cosmrs::{
};
use icepick_module::Module;
use serde::{Deserialize, Serialize};
use std::str::FromStr;
use std::{collections::HashMap, str::FromStr};
use cosmrs::crypto::secp256k1;
@ -21,6 +21,18 @@ pub struct GetChainInfo {
chain_name: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetValidatorNames {
blockchain_config: coin_denoms::Blockchain,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetDelegation {
delegator_address: String,
validator_address: String,
blockchain_config: coin_denoms::Blockchain,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GenerateWallet {
account: Option<String>,
@ -28,7 +40,7 @@ pub struct GenerateWallet {
#[derive(Serialize, Deserialize, Debug)]
pub struct GetWalletAddress {
address_prefix: String,
blockchain_config: coin_denoms::Blockchain,
}
#[derive(Serialize, Deserialize, Debug)]
@ -54,11 +66,30 @@ pub struct Transfer {
to_address: String,
from_account: Option<String>,
from_address: String,
// TODO: find a way to simulate transaction and calculate gas necessary
// for now, 0.01KYVE seems to be a reasonable mainnet number?
// for testing purposes, i'm gonna go much lower. 0.0001.
gas_factor: Option<String>,
gas_factor: Option<String>,
blockchain_config: coin_denoms::Blockchain,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Stake {
amount: String,
denom: String,
delegate_address: String,
validator_address: String,
gas_factor: Option<String>,
blockchain_config: coin_denoms::Blockchain,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Withdraw {
amount: String,
denom: String,
delegate_address: String,
validator_address: String,
gas_factor: Option<String>,
blockchain_config: coin_denoms::Blockchain,
}
@ -92,11 +123,15 @@ pub struct Request {
#[allow(clippy::large_enum_variant)]
pub enum Operation {
GetChainInfo(GetChainInfo),
GetValidatorNames(GetValidatorNames),
GetDelegation(GetDelegation),
GenerateWallet(GenerateWallet),
GetWalletAddress(GetWalletAddress),
GetAccountData(GetAccountData),
AwaitFunds(AwaitFunds),
Transfer(Transfer),
Stake(Stake),
Withdraw(Withdraw),
Sign(Sign),
Broadcast(Broadcast),
}
@ -175,6 +210,30 @@ impl Module for Cosmos {
.build(),
);
let get_validators = Operation::builder()
.name("get-validator-names")
.description("Get a list of all validators, by name (if the validator provides one)")
.build();
let get_delegation = Operation::builder()
.name("get-delegation")
.description("Get the delegate information for a delegator-validator pair.")
.build()
.argument(
&Argument::builder()
.name("delegator_address")
.description("The address of the delegator.")
.r#type(ArgumentType::Required)
.build(),
)
.argument(
&Argument::builder()
.name("validator_address")
.description("The address of the validator.")
.r#type(ArgumentType::Required)
.build(),
);
let generate_wallet = Operation::builder()
.name("generate-wallet")
.description("Generate a wallet for the given account.")
@ -261,6 +320,86 @@ impl Module for Cosmos {
.build(),
);
let stake = Operation::builder()
.name("stake")
.description("Delegate coins to a specified validator.")
.build()
.argument(
&Argument::builder()
.name("amount")
.description("The amount of coins to stake.")
.r#type(ArgumentType::Required)
.build(),
)
.argument(
&Argument::builder()
.name("denom")
.description("The denomination of coin to stake.")
.r#type(ArgumentType::Required)
.build(),
)
.argument(
&Argument::builder()
.name("delegate_address")
.description("The address holding funds to be staked.")
.r#type(ArgumentType::Required)
.build(),
)
.argument(
&Argument::builder()
.name("validator_address")
.description("The address of the validator operator to stake upon.")
.r#type(ArgumentType::Required)
.build(),
)
.argument(
&Argument::builder()
.name("gas_factor")
.description("The factor to multiply the default gas amount by.")
.r#type(ArgumentType::Optional)
.build(),
);
let withdraw = Operation::builder()
.name("withdraw")
.description("Delegate coins to a specified validator.")
.build()
.argument(
&Argument::builder()
.name("amount")
.description("The amount of coins to withdraw.")
.r#type(ArgumentType::Required)
.build(),
)
.argument(
&Argument::builder()
.name("denom")
.description("The denomination of coin to withdraw.")
.r#type(ArgumentType::Required)
.build(),
)
.argument(
&Argument::builder()
.name("delegate_address")
.description("The address holding funds to be withdrawn.")
.r#type(ArgumentType::Required)
.build(),
)
.argument(
&Argument::builder()
.name("validator_address")
.description("The address of the validator operator to withdraw from.")
.r#type(ArgumentType::Required)
.build(),
)
.argument(
&Argument::builder()
.name("gas_factor")
.description("The factor to multiply the default gas amount by.")
.r#type(ArgumentType::Optional)
.build(),
);
let sign = Operation::builder()
.name("sign")
.description("Sign a previously-generated transaction.")
@ -287,12 +426,16 @@ impl Module for Cosmos {
vec![
get_chain_info,
get_validators,
get_delegation,
generate_wallet,
get_wallet_address,
get_account_info,
await_funds,
transfer,
sign,
stake,
withdraw,
broadcast,
]
}
@ -317,6 +460,84 @@ impl Module for Cosmos {
},
}))
}
Operation::GetValidatorNames(GetValidatorNames { blockchain_config }) => {
use cosmrs::proto::cosmos::staking::v1beta1::*;
let validators = run_async(async {
let client =
cosmrs::rpc::HttpClient::new(blockchain_config.rpc_url.as_str()).unwrap();
// TODO: Pagination
let validator: QueryValidatorsResponse = abci_query(
&client,
"/cosmos.staking.v1beta1.Query/Validators",
Some(&QueryValidatorsRequest {
status: BondStatus::Bonded.as_str_name().to_string(),
pagination: None,
}),
None,
false,
)
.await
.unwrap();
validator.validators
});
let id_to_name = validators
.iter()
.map(|val| {
let name = val
.description
.as_ref()
.map(|desc| &desc.moniker)
.filter(|moniker| !moniker.is_empty())
.unwrap_or(&val.operator_address);
(val.operator_address.clone(), name.clone())
})
.collect::<HashMap<String, String>>();
Ok(serde_json::json!({
"blob": {
"validators": id_to_name,
}
}))
}
Operation::GetDelegation(GetDelegation {
delegator_address,
validator_address,
blockchain_config,
}) => {
use cosmrs::proto::cosmos::staking::v1beta1::*;
let delegation = run_async(async {
let client =
cosmrs::rpc::HttpClient::new(blockchain_config.rpc_url.as_str()).unwrap();
let delegation: QueryDelegationResponse = abci_query(
&client,
"/cosmos.staking.v1beta1.Query/Delegation",
Some(&QueryDelegationRequest {
delegator_addr: delegator_address,
validator_addr: validator_address,
}),
None,
false,
)
.await
.unwrap();
delegation.delegation_response.unwrap()
});
let DelegationResponse {
delegation: Some(delegation),
balance: Some(balance),
} = delegation
else {
panic!("Either delegation or balance were not accessible");
};
// NOTE: The return value here is an i128. Do not parse it. serde becomes unhappy.
Ok(serde_json::json!({
"blob": {
"shares": delegation.shares,
"balance": balance.amount,
}
}))
}
Operation::GenerateWallet(GenerateWallet { account }) => {
let account = u32::from_str(account.as_deref().unwrap_or("0")).unwrap();
Ok(serde_json::json!({
@ -324,12 +545,14 @@ impl Module for Cosmos {
"derivation_accounts": [(account | 1 << 31)],
}))
}
Operation::GetWalletAddress(GetWalletAddress { address_prefix }) => {
Operation::GetWalletAddress(GetWalletAddress { blockchain_config }) => {
// NOTE: panics if doesn't exist
let key = request.derived_keys.unwrap()[0];
let privkey = secp256k1::SigningKey::from_slice(&key).unwrap();
let pubkey = privkey.public_key();
let sender_account_id = pubkey.account_id(&address_prefix).unwrap();
let sender_account_id = pubkey
.account_id(&blockchain_config.bech32_config.account_address_prefix)
.unwrap();
Ok(serde_json::json!({
"blob": {
"pubkey": sender_account_id,
@ -431,6 +654,158 @@ impl Module for Cosmos {
}
}))
}
Operation::Stake(Stake {
amount,
denom,
delegate_address,
validator_address,
gas_factor,
blockchain_config,
}) => {
// Check if given denom is min denom or normal and adjust accordingly
let Some(relevant_denom) = blockchain_config.currencies.iter().find(|c| {
[&c.coin_denom, &c.coin_minimal_denom]
.iter()
.any(|name| **name == denom)
}) else {
panic!("{denom} not in {blockchain_config:?}");
};
let gas_factor = gas_factor
.as_deref()
.map(f64::from_str)
.transpose()
.unwrap()
.unwrap_or(1.0);
let amount = f64::from_str(&amount).unwrap();
let adjusted_amount = if relevant_denom.coin_denom == denom {
amount * 10f64.powi(i32::from(relevant_denom.coin_decimals))
} else if relevant_denom.coin_minimal_denom == denom {
amount
} else {
unreachable!("broke invariant: check denom checker");
} as u128;
let delegate_id = AccountId::from_str(&delegate_address).unwrap();
let validator_id = AccountId::from_str(&validator_address).unwrap();
let coin = cosmrs::Coin {
denom: relevant_denom.coin_minimal_denom.parse().unwrap(),
amount: adjusted_amount,
};
let msg_delegate = cosmrs::staking::MsgDelegate {
delegator_address: delegate_id,
validator_address: validator_id,
amount: coin,
}
.to_any()
.unwrap();
let expected_gas = 200_000u64;
// convert gas "price" to minimum denom,
// multiply by amount of gas required,
// multiply by gas factor
let expected_fee =
blockchain_config.gas_price_step.high * expected_gas as f64 * gas_factor;
let fee_coin = cosmrs::Coin {
denom: relevant_denom.coin_minimal_denom.parse().unwrap(),
amount: expected_fee as u128,
};
let fee = Fee::from_amount_and_gas(fee_coin, expected_gas);
#[allow(clippy::identity_op)]
Ok(serde_json::json!({
"blob": {
"fee": remote_serde::Fee::from(&fee),
// TODO: Body does not implement Serialize and
// needs to be constructed in Sign
"tx_messages": [msg_delegate],
// re-export, but in general this should be copied over
// using workflows
},
"derivation_accounts": [0u32 | 1 << 31],
}))
}
Operation::Withdraw(Withdraw {
amount,
denom,
delegate_address,
validator_address,
gas_factor,
blockchain_config,
}) => {
// Check if given denom is min denom or normal and adjust accordingly
let Some(relevant_denom) = blockchain_config.currencies.iter().find(|c| {
[&c.coin_denom, &c.coin_minimal_denom]
.iter()
.any(|name| **name == denom)
}) else {
panic!("{denom} not in {blockchain_config:?}");
};
let gas_factor = gas_factor
.as_deref()
.map(f64::from_str)
.transpose()
.unwrap()
.unwrap_or(1.0);
let amount = f64::from_str(&amount).unwrap();
let adjusted_amount = if relevant_denom.coin_denom == denom {
amount * 10f64.powi(i32::from(relevant_denom.coin_decimals))
} else if relevant_denom.coin_minimal_denom == denom {
amount
} else {
unreachable!("broke invariant: check denom checker");
} as u128;
let delegate_id = AccountId::from_str(&delegate_address).unwrap();
let validator_id = AccountId::from_str(&validator_address).unwrap();
let coin = cosmrs::Coin {
denom: relevant_denom.coin_minimal_denom.parse().unwrap(),
amount: adjusted_amount,
};
let msg_undelegate = cosmrs::staking::MsgUndelegate {
delegator_address: delegate_id,
validator_address: validator_id,
amount: coin,
}
.to_any()
.unwrap();
let expected_gas = 250_000u64;
// convert gas "price" to minimum denom,
// multiply by amount of gas required,
// multiply by gas factor
let expected_fee =
blockchain_config.gas_price_step.high * expected_gas as f64 * gas_factor;
let fee_coin = cosmrs::Coin {
denom: relevant_denom.coin_minimal_denom.parse().unwrap(),
amount: expected_fee as u128,
};
let fee = Fee::from_amount_and_gas(fee_coin, expected_gas);
#[allow(clippy::identity_op)]
Ok(serde_json::json!({
"blob": {
"fee": remote_serde::Fee::from(&fee),
// TODO: Body does not implement Serialize and
// needs to be constructed in Sign
"tx_messages": [msg_undelegate],
// re-export, but in general this should be copied over
// using workflows
},
"derivation_accounts": [0u32 | 1 << 31],
}))
}
Operation::Transfer(Transfer {
amount,
denom,
@ -484,21 +859,16 @@ impl Module for Cosmos {
let expected_gas = 100_000u64;
// convert gas "price" to minimum denom,
// multiply by amount of gas required,
// multiply by gas factor if necessary.
let expected_fee = blockchain_config.gas_price_step.high
// * dbg!(10f64.powi(relevant_denom.coin_decimals as i32))
* expected_gas as f64
* gas_factor;
// multiply by gas factor
let expected_fee =
blockchain_config.gas_price_step.high * expected_gas as f64 * gas_factor;
let fee_coin = cosmrs::Coin {
denom: relevant_denom.coin_minimal_denom.parse().unwrap(),
amount: expected_fee as u128,
};
let fee = Fee::from_amount_and_gas(
fee_coin,
expected_gas,
);
let fee = Fee::from_amount_and_gas(fee_coin, expected_gas);
#[allow(clippy::identity_op)]
Ok(serde_json::json!({
@ -509,7 +879,6 @@ impl Module for Cosmos {
"tx_messages": [msg_send],
// re-export, but in general this should be copied over
// using workflows
"blockchain_config": blockchain_config,
},
"derivation_accounts": [0u32 | 1 << 31],
}))
@ -541,7 +910,6 @@ impl Module for Cosmos {
Ok(serde_json::json!({
"blob": {
"transaction": signed_tx.to_bytes().unwrap(),
"blockchain_config": blockchain_config,
}
}))
}

View File

@ -0,0 +1,10 @@
[package]
name = "icepick-workflow"
version = "0.1.0"
edition = "2021"
[dependencies]
keyfork-derive-util = { version = "0.2.2", registry = "distrust", default-features = false }
serde.workspace = true
serde_json.workspace = true
thiserror = "2.0.11"

View File

@ -0,0 +1,205 @@
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::{HashMap, HashSet};
#[derive(thiserror::Error, Debug)]
pub enum SimulationError {
#[error("Step not found: {0}")]
StepNotFound(String),
#[error("Expected input variable or static value not found in step {0}: {1}")]
ValueNotFound(String, String),
}
#[derive(thiserror::Error, Debug)]
pub enum WorkflowError {
#[error("Invocable operation could not be found: {0}")]
InvocableOperationNotFound(String),
#[error("Derivation configuration for operation not found: {0}")]
DerivationConfigurationNotFound(String),
#[error("An error was encountered while invoking an operation")]
InvocationError(String),
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Workflow {
pub name: String,
#[serde(default)]
pub inputs: Vec<String>,
#[serde(default)]
pub optional_inputs: Vec<String>,
#[serde(rename = "step")]
steps: Vec<WorkflowStep>,
}
pub type StringMap = HashMap<String, String>;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct WorkflowStep {
r#type: String,
#[serde(default)]
values: StringMap,
#[serde(default)]
inputs: StringMap,
#[serde(default)]
outputs: StringMap,
}
// TODO: This should probably be migrated to an actual Result type, instead of
// currently just shoving everything in "blob". Probably done after derivation_accounts
// gets hoisted out of here.
#[derive(Serialize, Deserialize)]
pub struct OperationResult {
// All values returned from an operation.
blob: HashMap<String, Value>,
// Any requested accounts from an operation.
//
// TODO: Move this to its own step.
#[serde(default)]
derivation_accounts: Vec<DerivationIndex>,
}
type DeriveKeys<'a> = &'a dyn Fn(&DerivationAlgorithm, &DerivationPath, &[DerivationIndex]) -> Vec<Vec<u8>>;
impl Workflow {
pub fn simulate_workflow<T: InvocableOperation + Sized>(
&self,
mut data: HashSet<String>,
operations: &[T],
) -> Result<Vec<String>, SimulationError> {
let mut reports = vec![];
for step in self.steps.iter() {
let step_type = step.r#type.clone();
let Some(invocable) = operations.iter().find(|op| *op.name() == step_type) else {
return Err(SimulationError::StepNotFound(step_type));
};
// Check we have the values the module expects
for in_memory_name in step.inputs.values() {
if !data.contains(in_memory_name) && !step.values.contains_key(in_memory_name) {
return Err(SimulationError::ValueNotFound(
step_type,
in_memory_name.to_owned(),
));
}
}
// Check whether the module expects the keys as arguments, or if the
// keys will be passed as a "payload" variable.
let mut inputs = step.inputs.keys().collect::<HashSet<_>>();
for argument in invocable.argument_names() {
inputs.remove(argument);
}
for remaining_input in inputs {
reports.push(format!(
"Step {step_type}: Input {remaining_input} is not interpreted as a argument"
));
}
// Add the return values from the module into memory
data.extend(step.outputs.values().cloned());
}
Ok(reports)
}
pub fn run_workflow<T: InvocableOperation>(
&self,
mut data: HashMap<String, Value>,
operations: &[T],
derive_keys: DeriveKeys,
) -> Result<HashMap<String, Value>, WorkflowError> {
let mut derived_keys = vec![];
let mut derivation_accounts = vec![];
for step in &self.steps {
let step_type = step.r#type.clone();
let Some(operation) = operations.iter().find(|op| *op.name() == step_type) else {
return Err(WorkflowError::InvocableOperationNotFound(step_type));
};
// Add requested derivation keys and clear derivation account requests.
if !derivation_accounts.is_empty() {
let Some((algo, path_prefix)) = operation.derivation_configuration() else {
return Err(WorkflowError::DerivationConfigurationNotFound(step_type));
};
derived_keys.extend(derive_keys(algo, path_prefix, &derivation_accounts));
}
derivation_accounts.clear();
// Prepare all inputs for the operation invocation
let inputs: HashMap<String, Value> = data
.iter()
.map(|(k, v)| (k, v.clone()))
.filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (desired, _stored) = step.inputs.iter().find(|(_, v)| k == *v)?;
Some((desired.clone(), v))
})
.chain(
step.values
.iter()
.map(|(k, v)| (k.clone(), Value::String(v.clone()))),
)
.collect();
let OperationResult {
blob,
derivation_accounts: new_accounts,
} = operation.invoke(&inputs, &derived_keys);
derived_keys.clear();
derivation_accounts.extend(new_accounts);
data.extend(blob.into_iter().filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (_given, stored) = step.outputs.iter().find(|(k1, _)| k == **k1)?;
Some((stored.clone(), v))
}));
}
if let Some(last_step) = &self.steps.last() {
let values = last_step.outputs.values().collect::<HashSet<_>>();
data.retain(|stored_name, _| {
values.contains(stored_name)
});
}
Ok(data)
}
}
pub trait WorkflowHandler {
/// Load all inputs for the Workflow from some external source, such as CLI arguments or
/// JSON payloads. The inputs can then be used to simulate or perform a workflow.
fn load_inputs(&self) -> StringMap;
}
/// The configuration for an Icepick operation that can be invoked.
///
/// Implementors of this trait should include all necessary requirements to invoke the operation
/// within themselves.
pub trait InvocableOperation {
/// Invoke the operation with the supplied inputs and derived keys.
fn invoke(&self, input: &HashMap<String, Value>, derived_keys: &[Vec<u8>]) -> OperationResult;
/// The name of the operation.
fn name(&self) -> &String;
/// The names of arguments that can be passed to the function.
fn argument_names(&self) -> impl Iterator<Item = &String>;
/// The derivation algorithm and derivation path to be prefixed to all derivation requests.
fn derivation_configuration(&self) -> Option<(&DerivationAlgorithm, &DerivationPath)>;
}

View File

@ -6,10 +6,16 @@ edition = "2021"
[dependencies]
clap = { version = "4.5.20", features = ["cargo", "derive", "string"] }
icepick-module = { version = "0.1.0", path = "../icepick-module" }
icepick-workflow = { version = "0.1.0", path = "../icepick-workflow" }
keyfork-derive-util = { version = "0.2.1", registry = "distrust" }
keyforkd-client = { version = "0.2.1", registry = "distrust" }
keyforkd-models = { version = "0.2.0", registry = "distrust" }
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
serde_yaml = "0.9.34"
thiserror = "2.0.3"
toml = "0.8.19"
[build-dependencies]
icepick-workflow = { version = "0.1.0", path = "../icepick-workflow" }
serde_yaml = "0.9.34"

50
crates/icepick/build.rs Normal file
View File

@ -0,0 +1,50 @@
use icepick_workflow::Workflow;
use std::{collections::HashMap, path::{PathBuf, Path}};
fn env_var(var: &'static str) -> String {
println!("cargo::rerun-if-env-changed={var}");
std::env::var(var).expect(var)
}
fn track_path(path: &Path) {
println!("cargo::rerun-if-changed={}", path.to_str().unwrap());
}
fn main() {
let out_dir = env_var("CARGO_TARGET_DIR");
let crate_dir = env_var("CARGO_MANIFEST_DIR");
let workflows_dir = PathBuf::from(crate_dir).join("workflows");
track_path(&workflows_dir);
let mut workflows_by_module: HashMap<String, Vec<Workflow>> = Default::default();
for module_dir in std::fs::read_dir(&workflows_dir).unwrap() {
let module_dir = module_dir.unwrap();
let path = module_dir.path();
if !path.is_dir() {
panic!("found unexpected file {}", path.to_string_lossy());
}
let mut workflows = vec![];
for workflow_file in std::fs::read_dir(&path).unwrap() {
let workflow_file = workflow_file.unwrap();
let path = workflow_file.path();
if !path.is_file() {
panic!("found unexpected non-file {}", path.to_string_lossy());
}
let file_content = std::fs::read(path).unwrap();
let workflow: Workflow = serde_yaml::from_slice(&file_content).unwrap();
workflows.push(workflow);
}
workflows_by_module.insert(
module_dir.file_name().to_str().unwrap().to_owned(),
workflows,
);
}
let out_path = PathBuf::from(out_dir).join("workflows.yaml");
let out_file = std::fs::File::create(&out_path).unwrap();
serde_yaml::to_writer(out_file, &workflows_by_module).unwrap();
}

View File

@ -70,7 +70,7 @@ struct ModuleConfig {
/// All workflows for a module.
#[serde(rename = "workflow", default)]
workflows: Vec<workflow::Workflow>,
workflows: Vec<icepick_workflow::Workflow>,
}
mod serde_derivation {
@ -134,6 +134,23 @@ pub fn do_cli_thing() {
workflows: Default::default(),
});
let workflows_file = std::env::vars().find_map(|(k, v)| {
if k == "ICEPICK_WORKFLOWS_FILE" {
return Some(v);
}
None
});
let workflows_path = workflows_file.unwrap_or_else(|| "workflows.yaml".to_string());
let workflows_content = std::fs::read(&workflows_path).expect("can't read workflows from file");
let workflows: HashMap<String, Vec<icepick_workflow::Workflow>> =
serde_yaml::from_slice(&workflows_content).unwrap();
for module in &mut config.modules {
if let Some(module_workflows) = workflows.get(&module.name) {
module.workflows.extend(module_workflows.iter().cloned());
}
}
let mut commands = vec![];
let mut icepick_command = command!();
for module in &config.modules {
@ -174,7 +191,7 @@ pub fn do_cli_thing() {
for module in workflows.iter() {
let mut module_subcommand = clap::Command::new(module.0.as_str());
for workflow in &module.1 {
module_subcommand = module_subcommand.subcommand(workflow.generate_command());
module_subcommand = module_subcommand.subcommand(workflow::generate_command(workflow));
}
workflow_command = workflow_command.subcommand(module_subcommand);
}
@ -219,7 +236,7 @@ pub fn do_cli_thing() {
.find(|(module, _)| module == module_name)
.and_then(|(_, workflows)| workflows.iter().find(|x| x.name == workflow_name))
.expect("workflow from CLI should match config");
workflow.handle(matches, commands, &config.modules);
workflow::handle(workflow, matches, commands, &config.modules);
return;
}

View File

@ -1,68 +1,35 @@
use keyfork_derive_util::DerivationIndex;
use serde::{Deserialize, Serialize};
use icepick_workflow::{InvocableOperation, OperationResult, Workflow};
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationPath};
use serde_json::Value;
use std::{
collections::{HashMap, HashSet},
collections::HashMap,
io::Write,
process::{Command, Stdio},
};
use super::{derive_keys, get_command, Commands, ModuleConfig, Operation};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Workflow {
pub name: String,
#[serde(default)]
pub inputs: Vec<String>,
#[serde(rename = "step")]
steps: Vec<WorkflowStep>,
}
pub type StringMap = std::collections::HashMap<String, String>;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct WorkflowStep {
r#type: String,
#[serde(default)]
blob: StringMap,
#[serde(default)]
values: StringMap,
#[serde(default)]
inputs: StringMap,
#[serde(default)]
outputs: StringMap,
}
#[derive(Clone, Debug)]
struct InvocableOperation {
module: String,
struct CLIOperation {
/// The name of the operation (i.e. `transfer-token`).
name: String,
/// The binary to invoke the operation.
binary: String,
/// Information describing the invocation requirements of the operation.
operation: Operation,
/// The derivation algorithm used when deriving keys for the operation.
derivation_algorithm: Option<DerivationAlgorithm>,
/// The derivation prefix used when deriving keys for the operation.
derivation_prefix: Option<DerivationPath>,
}
// TODO: This should probably be migrated to an actual Result type, instead of
// currently just shoving everything in "blob". Probably done after derivation_accounts
// gets hoisted out of here.
#[derive(Serialize, Deserialize)]
struct OperationResult {
// All values returned from an operation.
blob: HashMap<String, Value>,
// Any requested accounts from an operation.
//
// TODO: Move this to its own step.
#[serde(default)]
derivation_accounts: Vec<DerivationIndex>,
}
impl InvocableOperation {
impl InvocableOperation for CLIOperation {
fn invoke(&self, input: &HashMap<String, Value>, derived_keys: &[Vec<u8>]) -> OperationResult {
let (command, args) = get_command(&self.binary);
@ -87,204 +54,148 @@ impl InvocableOperation {
let result = child.wait_with_output().unwrap();
if !result.status.success() {
panic!("Bad exit ({}: {}): {}", &self.binary, &self.operation.name, String::from_utf8_lossy(&result.stderr));
panic!(
"Bad exit ({}: {}): {}",
&self.binary,
&self.operation.name,
String::from_utf8_lossy(&result.stderr)
);
}
let output = result.stdout;
let json: OperationResult = serde_json::from_slice(&output).expect("valid json");
json
}
}
impl Workflow {
/// Generate a [`clap::Command`] for a [`Workflow`], where the inputs can be defined either by
/// command-line arguments or via a JSON input file.
pub fn generate_command(&self) -> clap::Command {
let mut command = clap::Command::new(&self.name).arg(clap::arg!(
--"input-file" [FILE]
"A file containing any inputs not passed on the command line"
));
for input in &self.inputs {
let arg = clap::Arg::new(input)
.required(false)
.long(input.replace('_', "-"))
.value_name(input.to_uppercase());
command = command.arg(arg);
}
command
fn name(&self) -> &String {
&self.name
}
fn load_inputs(&self, matches: &clap::ArgMatches) -> StringMap {
let mut map = StringMap::default();
let input_file: Option<StringMap> = matches
.get_one::<std::path::PathBuf>("input-file")
.and_then(|p| std::fs::File::open(p).ok())
.and_then(|f| serde_json::from_reader(f).ok());
for input in &self.inputs {
match matches.get_one::<String>(input) {
Some(value) => {
map.insert(input.clone(), value.clone());
fn argument_names(&self) -> impl Iterator<Item = &String> {
self.operation.arguments.iter().map(|i| &i.name)
}
fn derivation_configuration(&self) -> Option<(&DerivationAlgorithm, &DerivationPath)> {
self.derivation_algorithm
.as_ref()
.zip(self.derivation_prefix.as_ref())
}
}
pub fn generate_command(workflow: &Workflow) -> clap::Command {
let mut command = clap::Command::new(&workflow.name).arg(clap::arg!(
--"input-file" [FILE]
"A file containing any inputs not passed on the command line"
));
for input in &workflow.inputs {
// can also be included in the JSON file, so we won't mark this as required.
let arg = clap::Arg::new(input)
.required(false)
.long(input.replace('_', "-"))
.value_name(input.to_uppercase());
command = command.arg(arg);
}
for input in &workflow.optional_inputs {
let arg = clap::Arg::new(input)
.required(false)
.long(input.replace('_', "-"))
.value_name(input.to_uppercase());
command = command.arg(arg);
}
command
}
fn load_inputs<T: AsRef<str> + Into<String> + std::fmt::Display>(
inputs: impl IntoIterator<Item = T>,
optional_inputs: impl IntoIterator<Item = T>,
matches: &clap::ArgMatches,
) -> StringMap {
let mut map = StringMap::default();
let input_file: Option<StringMap> = matches
.get_one::<std::path::PathBuf>("input-file")
.and_then(|p| std::fs::File::open(p).ok())
.and_then(|f| serde_json::from_reader(f).ok());
for input in inputs {
match matches.get_one::<String>(input.as_ref()) {
Some(value) => {
map.insert(input.into(), value.clone());
continue;
}
None => {
if let Some(value) = input_file.as_ref().and_then(|f| f.get(input.as_ref())) {
map.insert(input.into(), value.clone());
continue;
}
None => {
if let Some(value) = input_file.as_ref().and_then(|f| f.get(input)) {
map.insert(input.clone(), value.clone());
continue;
}
}
}
panic!("Required workflow input was not found: {input}");
}
for input in optional_inputs {
match matches.get_one::<String>(input.as_ref()) {
Some(value) => {
map.insert(input.into(), value.clone());
continue;
}
None => {
if let Some(value) = input_file.as_ref().and_then(|f| f.get(input.as_ref())) {
map.insert(input.into(), value.clone());
continue;
}
}
panic!("Required workflow input was not found: {input}");
}
map
}
fn simulate_workflow(&self, mut data: HashSet<String>, operations: &[InvocableOperation]) {
// simulate the steps by using a HashSet to traverse the inputs and outputs and ensure
// there's no inconsistencies
for (i, step) in self.steps.iter().enumerate() {
// NOTE: overflow possible but unlikely
let step_index = i + 1;
let step_type = &step.r#type;
// Find the relevant Operation
let Some(invocable) = operations.iter().find(|op| op.name == *step_type) else {
panic!("Could not find operation: {step_type}");
};
// Check if we have the keys we want to pass into the module.
for in_memory_name in step.inputs.values() {
if !data.contains(in_memory_name) && !step.values.contains_key(in_memory_name) {
eprintln!("Failed simulation: step #{step_index} ({step_type}): missing value {in_memory_name}");
}
}
// Check that the module accepts those keys.
for module_input_name in step.inputs.keys() {
if !invocable
.operation
.arguments
.iter()
.any(|arg| *module_input_name == arg.name)
{
eprintln!("Simulation: step #{step_index} ({step_type}): input value {module_input_name} will be passed through as JSON input");
}
}
// Add the keys we get from the module.
for in_memory_name in step.outputs.values() {
data.insert(in_memory_name.clone());
}
}
}
fn run_workflow(
&self,
mut data: HashMap<String, Value>,
operations: &[InvocableOperation],
config: &[ModuleConfig],
) {
let mut derived_keys = vec![];
let mut derivation_accounts = vec![];
for step in &self.steps {
let operation = operations
.iter()
.find(|op| op.name == step.r#type)
.expect("operation matched step type");
// Load keys from Keyfork, from previously requested workflow
let config = config
.iter()
.find(|module| module.name == operation.module)
.expect("could not find module config");
let algo = &config.algorithm;
let path_prefix = &config.derivation_prefix;
if !derivation_accounts.is_empty() {
derived_keys.extend(derive_keys(
algo.as_ref()
.expect("a module requested keys but didn't provide algorithm"),
path_prefix
.as_ref()
.expect("a module requested keys but didn't provide prefix"),
&derivation_accounts,
));
}
derivation_accounts.clear();
// Prepare all inputs for the operation invocation
//
// NOTE: this could be .clone().into_iter() but it would create an extra allocation of
// the HashMap, and an unnecessary alloc of the key.
let inputs: HashMap<String, Value> = data
.iter()
.map(|(k, v)| (k, v.clone()))
.filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (desired, _stored) = step.inputs.iter().find(|(_, v)| k == *v)?;
Some((desired.clone(), v))
})
.chain(
step.values
.iter()
.map(|(k, v)| (k.clone(), Value::String(v.clone()))),
)
.collect();
let OperationResult {
blob,
derivation_accounts: new_accounts,
} = operation.invoke(&inputs, &derived_keys);
derived_keys.clear();
derivation_accounts.extend(new_accounts);
data.extend(blob.into_iter().filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (_given, stored) = step.outputs.iter().find(|(k1, _)| k == **k1)?;
Some((stored.clone(), v))
}));
}
let last_outputs = &self.steps.last().unwrap().outputs;
data.retain(|stored_name, _| {
last_outputs
.values()
.any(|storage_name| stored_name == storage_name)
});
let json_as_str = serde_json::to_string(&data).unwrap();
println!("{json_as_str}");
}
pub fn handle(&self, matches: &clap::ArgMatches, modules: Commands, config: &[ModuleConfig]) {
let inputs = self.load_inputs(matches);
let data: HashMap<String, Value> = inputs
.into_iter()
.map(|(k, v)| (k, Value::String(v)))
.collect();
let mut operations = vec![];
for (module_name, module_binary, module_operations) in modules {
for operation in module_operations {
let operation_name = &operation.name;
let io = InvocableOperation {
module: module_name.clone(),
name: format!("{module_name}-{operation_name}"),
binary: module_binary.clone(),
operation: operation.clone(),
};
operations.push(io);
}
}
if matches.get_flag("simulate-workflow") {
self.simulate_workflow(data.into_keys().collect(), &operations);
return;
}
self.run_workflow(data, &operations, config);
}
map
}
fn load_operations(commands: Commands, config: &[ModuleConfig]) -> Vec<CLIOperation> {
let mut operations = vec![];
for (module_name, module_binary, module_operations) in commands {
for operation in module_operations {
let operation_name = &operation.name;
let module_config = config.iter().find(|conf| conf.name == *module_name);
let io = CLIOperation {
name: format!("{module_name}-{operation_name}"),
binary: module_binary.clone(),
operation: operation.clone(),
derivation_algorithm: module_config.and_then(|m| m.algorithm.clone()),
derivation_prefix: module_config.and_then(|m| m.derivation_prefix.clone()),
};
operations.push(io);
}
}
operations
}
pub fn handle(
workflow: &Workflow,
matches: &clap::ArgMatches,
modules: Commands,
config: &[ModuleConfig],
) {
let inputs = load_inputs(&workflow.inputs, &workflow.optional_inputs, matches);
let data: HashMap<String, Value> = inputs
.into_iter()
.map(|(k, v)| (k, Value::String(v)))
.collect();
let operations = load_operations(modules, config);
if matches.get_flag("simulate-workflow") {
let reports = workflow
.simulate_workflow(data.into_keys().collect(), &operations)
.expect("Simulation failure");
for report in reports {
println!("{report}");
}
return;
}
let result = workflow
.run_workflow(data, &operations, &derive_keys)
.expect("Invocation failure");
println!("{}", serde_json::to_string(&result).expect("valid JSON"));
}

View File

@ -0,0 +1,20 @@
name: generate-address
inputs:
- chain_name
optional_inputs:
- account
step:
- type: cosmos-get-chain-info
inputs:
chain_name: chain_name
outputs:
blockchain_config: blockchain_config
- type: cosmos-generate-wallet
inputs:
account: account
blockchain_config: blockchain_config
- type: cosmos-get-wallet-address
inputs:
blockchain_config: blockchain_config
outputs:
pubkey: pubkey

View File

@ -0,0 +1,46 @@
name: stake
inputs:
- delegate_address
- validator_address
- chain_name
- asset_name
- asset_amount
optional_inputs:
- gas_factor
step:
- type: cosmos-get-chain-info
inputs:
chain_name: chain_name
outputs:
blockchain_config: blockchain_config
- type: internal-load-file
values:
filename: "account_info.json"
outputs:
account_number: account_number
sequence_number: sequence_number
- type: cosmos-stake
inputs:
delegate_address: delegate_address
validator_address: validator_address
amount: asset_amount
denom: asset_name
blockchain_config: blockchain_config
gas_factor: gas_factor
outputs:
fee: fee
tx_messages: tx_messages
- type: cosmos-sign
inputs:
fee: fee
tx_messages: tx_messages
account_number: account_number
sequence_number: sequence_number
blockchain_config: blockchain_config
outputs:
transaction: signed_transaction
- type: internal-save-file
values:
filename: "transaction.json"
inputs:
transaction: signed_transaction

View File

@ -0,0 +1,46 @@
name: withdraw
inputs:
- delegate_address
- validator_address
- chain_name
- asset_name
- asset_amount
optional_inputs:
- gas_factor
step:
- type: cosmos-get-chain-info
inputs:
chain_name: chain_name
outputs:
blockchain_config: blockchain_config
- type: internal-load-file
values:
filename: "account_info.json"
outputs:
account_number: account_number
sequence_number: sequence_number
- type: cosmos-withdraw
inputs:
delegate_address: delegate_address
validator_address: validator_address
amount: asset_amount
denom: asset_name
blockchain_config: blockchain_config
gas_factor: gas_factor
outputs:
fee: fee
tx_messages: tx_messages
- type: cosmos-sign
inputs:
fee: fee
tx_messages: tx_messages
account_number: account_number
sequence_number: sequence_number
blockchain_config: blockchain_config
outputs:
transaction: signed_transaction
- type: internal-save-file
values:
filename: "transaction.json"
inputs:
transaction: signed_transaction

View File

@ -0,0 +1,10 @@
name: generate-address
optional_inputs:
- account
step:
- type: sol-generate-wallet
inputs:
account: account
- type: sol-get-wallet-address
outputs:
pubkey: pubkey

View File

@ -0,0 +1,50 @@
name: transfer-token
inputs:
- from_address
- to_address
- token_name
- token_amount
step:
- type: sol-get-token-info
inputs:
token: token_name
outputs:
token_address: token_address
token_decimals: token_decimals
- type: internal-load-file
values:
filename: "nonce.json"
outputs:
nonce_authority: nonce_authority
nonce_data: nonce_data
nonce_address: nonce_address
- type: sol-transfer-token
inputs:
amount: token_amount
token_address: token_address
decimals: token_decimals
to_address: to_address
from_address: from_address
outputs:
instructions: instructions
derivation_accounts: derivation_accounts
- type: sol-compile
inputs:
instructions: instructions
derivation_accounts: derivation_accounts
nonce_address: nonce_address
nonce_authority: nonce_authority
nonce_data: nonce_data
outputs:
transaction: unsigned_transaction
- type: sol-sign
inputs:
transaction: unsigned_transaction
blockhash: nonce_data
outputs:
transaction: transaction
- type: internal-save-file
values:
filename: "transaction.json"
inputs:
transaction: signed_transaction

View File

@ -3,102 +3,6 @@ name = "sol"
derivation_prefix = "m/44'/501'/0'"
algorithm = "Ed25519"
[[module.workflow]]
# The name of the workflow, which can be called by:
# `icepick workflow sol transfer-token`
name = "transfer-token"
# These values are used as inputs for other workflows, acquired from the CLI.
# These values can only be strings, but other values can be any value that can
# be serialized by serde_json::Value.
# These values can also be loaded using "internal-load-file", using some form
# of later-defined signature validation.
inputs = ["from_address", "to_address", "token_name", "token_amount"]
# Get the token address and token decimals for the given token
[[module.workflow.step]]
type = "sol-get-token-info"
# The key is the key that is passed to the program in the
# `values` field. The value is the item in storage. In this case,
# we read a `token-name` from our input, but the operation expects `token`.
inputs = { token = "token_name" }
# Because these two fields are currently unused in our storage, we can grab
# them from the outputs of our module. The key is the key of the output value
# we want to store, and the value is the name to be assigned in storage.
outputs = { token_address = "token_address", token_decimals = "token_decimals" }
# Load the transaction nonce from the SD card
[[module.workflow.step]]
type = "internal-load-file"
# Pre-defined values to be passed to the module.
# In this case, the `filename` field is reserved for marking which file to load.
values = { filename = "nonce.json" }
# This value is marked to be saved in-memory, and can be used as an input for
# later steps.
outputs = { nonce_authority = "nonce_authority", nonce_data = "nonce_data", nonce_address = "nonce_address" }
[[module.workflow.step]]
# Generate an unsigned Transaction
# This step MUST run immediately before sol-sign, as in the current version of
# Icepick, keys are only held in memory in-between a single module invocation.
type = "sol-transfer-token"
# If using a lot of inputs, it may be best to use a non-inline table.
# Non-inline tables _must_ be the last step, as otherwise, `outputs` for
# example would be considered a member of `inputs`. In this case, we use a
# non-inline table for `outputs` even though it would fit on one line, to avoid
# the ambiguity.
[module.workflow.step.inputs]
amount = "token_amount"
token_address = "token_address"
decimals = "token_decimals"
to_address = "to_address"
from_address = "from_address"
[module.workflow.step.outputs]
instructions = "instructions"
derivation_accounts = "derivation_accounts"
[[module.workflow.step]]
type = "sol-compile"
[module.workflow.step.inputs]
instructions = "instructions"
derivation_accounts = "derivation_accounts"
nonce_address = "nonce_address"
nonce_authority = "nonce_authority"
nonce_data = "nonce_data"
[module.workflow.step.outputs]
transaction = "unsigned_transaction"
# Sign the transaction
[[module.workflow.step]]
type = "sol-sign"
[module.workflow.step.inputs]
transaction = "unsigned_transaction"
blockhash = "nonce_data"
[module.workflow.step.outputs]
transaction = "signed_transaction"
# Write the signed transaction to a file
[[module.workflow.step]]
type = "internal-save-file"
# We are using a static filename here, so we use `values` instead of `inputs`.
values = { filename = "transaction.json" }
# All fields in both `inputs` and `values`, other than `filename`, will be
# persisted to the file. In this case, the `transaction` field of the file will
# contain the signed transaction.
inputs = { transaction = "signed_transaction" }
# NOTE: To get a nonce address, the `generate-nonce-account` workflow should be
# run. It is the only workflow that uses a blockhash, which is why a
# `broadcast-with-blockhash` or similar is not, and should not be, implemented.