Compare commits

..

2 Commits

Author SHA1 Message Date
Ryan Heywood e8965ebcb3
spacemesh: derive(Clone) for Cluster 2025-02-13 20:34:42 -05:00
Ryan Heywood 685e4e0388
add base support for spacemesh 2025-02-09 01:35:37 -05:00
20 changed files with 323 additions and 497 deletions

2
Cargo.lock generated
View File

@ -1970,7 +1970,6 @@ dependencies = [
name = "icepick" name = "icepick"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"bincode",
"chrono", "chrono",
"clap", "clap",
"icepick-module", "icepick-module",
@ -1984,7 +1983,6 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"serde_yaml", "serde_yaml",
"smex",
"thiserror 2.0.11", "thiserror 2.0.11",
"toml 0.8.19", "toml 0.8.19",
] ]

View File

@ -705,8 +705,8 @@ impl Module for Solana {
"nonce_pubkey": keypair.pubkey().to_string(), "nonce_pubkey": keypair.pubkey().to_string(),
"nonce_privkey": [keypair.secret().to_bytes()], "nonce_privkey": [keypair.secret().to_bytes()],
"transaction": instructions, "transaction": instructions,
"derivation_accounts": [0u32 | 1 << 31],
}, },
"derivation_accounts": [0u32 | 1 << 31],
})) }))
} }
Operation::GetNonceAccountData(GetNonceAccountData { Operation::GetNonceAccountData(GetNonceAccountData {

View File

@ -1,7 +1,7 @@
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath}; use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use std::collections::{BTreeMap, HashSet}; use std::collections::{HashMap, HashSet};
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum SimulationError { pub enum SimulationError {
@ -24,51 +24,21 @@ pub enum WorkflowError {
InvocationError(String), InvocationError(String),
} }
/// An input for a workflow argument. When inputs are read, they should be referenced by the first
/// name. Additional names can be provided as aliases, to allow chaining workflows together when
/// names may not make sense - such as a Solana address then being used as an authorization
/// address.
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Input {
/// An input with a single identifier.
/// The name of the input.
pub name: String,
/// A description of the input.
pub description: String,
/// Aliases used when loading inputs.
#[serde(default)]
pub aliases: Vec<String>,
/// Whether the workflow input is optional.
pub optional: Option<bool>,
}
impl Input {
pub fn identifiers(&self) -> impl Iterator<Item = &String> {
[&self.name].into_iter().chain(self.aliases.iter())
}
pub fn is_required(&self) -> bool {
self.optional.is_some_and(|o| o)
}
}
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Workflow { pub struct Workflow {
pub name: String, pub name: String,
pub description: String, #[serde(default)]
pub inputs: Vec<String>,
#[serde(default)] #[serde(default)]
pub inputs: Vec<Input>, pub optional_inputs: Vec<String>,
#[serde(rename = "step")] #[serde(rename = "step")]
steps: Vec<WorkflowStep>, steps: Vec<WorkflowStep>,
} }
pub type StringMap<T = String> = BTreeMap<String, T>; pub type StringMap = HashMap<String, String>;
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug)]
pub struct WorkflowStep { pub struct WorkflowStep {
@ -90,7 +60,7 @@ pub struct WorkflowStep {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct OperationResult { pub struct OperationResult {
// All values returned from an operation. // All values returned from an operation.
blob: StringMap<Value>, blob: HashMap<String, Value>,
// Any requested accounts from an operation. // Any requested accounts from an operation.
// //
@ -146,10 +116,10 @@ impl Workflow {
pub fn run_workflow<T: InvocableOperation>( pub fn run_workflow<T: InvocableOperation>(
&self, &self,
mut data: StringMap<Value>, mut data: HashMap<String, Value>,
operations: &[T], operations: &[T],
derive_keys: DeriveKeys, derive_keys: DeriveKeys,
) -> Result<StringMap<Value>, WorkflowError> { ) -> Result<HashMap<String, Value>, WorkflowError> {
let mut derived_keys = vec![]; let mut derived_keys = vec![];
let mut derivation_accounts = vec![]; let mut derivation_accounts = vec![];
@ -160,7 +130,7 @@ impl Workflow {
}; };
// Prepare all inputs for the operation invocation // Prepare all inputs for the operation invocation
let inputs: StringMap<Value> = data let inputs: HashMap<String, Value> = data
.iter() .iter()
.map(|(k, v)| (k, v.clone())) .map(|(k, v)| (k, v.clone()))
.filter_map(|(k, v)| { .filter_map(|(k, v)| {
@ -221,7 +191,7 @@ pub trait WorkflowHandler {
/// within themselves. /// within themselves.
pub trait InvocableOperation { pub trait InvocableOperation {
/// Invoke the operation with the supplied inputs and derived keys. /// Invoke the operation with the supplied inputs and derived keys.
fn invoke(&self, input: &StringMap<Value>, derived_keys: &[Vec<u8>]) -> OperationResult; fn invoke(&self, input: &HashMap<String, Value>, derived_keys: &[Vec<u8>]) -> OperationResult;
/// The name of the operation. /// The name of the operation.
fn name(&self) -> &String; fn name(&self) -> &String;

View File

@ -4,7 +4,6 @@ version = "0.1.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
bincode = "1.3.3"
chrono = { version = "0.4.39", default-features = false, features = ["now", "serde", "std"] } chrono = { version = "0.4.39", default-features = false, features = ["now", "serde", "std"] }
clap = { version = "4.5.20", features = ["cargo", "derive", "string"] } clap = { version = "4.5.20", features = ["cargo", "derive", "string"] }
icepick-module = { version = "0.1.0", path = "../icepick-module" } icepick-module = { version = "0.1.0", path = "../icepick-module" }
@ -18,12 +17,9 @@ miniquorum = { version = "0.1.0", path = "../miniquorum", default-features = fal
serde = { workspace = true, features = ["derive"] } serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["arbitrary_precision"] } serde_json = { workspace = true, features = ["arbitrary_precision"] }
serde_yaml = "0.9.34" serde_yaml = "0.9.34"
smex = { version = "0.1.0", registry = "distrust" }
thiserror = "2.0.3" thiserror = "2.0.3"
toml = "0.8.19" toml = "0.8.19"
[build-dependencies] [build-dependencies]
bincode = "1.3.3"
icepick-workflow = { version = "0.1.0", path = "../icepick-workflow" } icepick-workflow = { version = "0.1.0", path = "../icepick-workflow" }
serde_yaml = "0.9.34" serde_yaml = "0.9.34"
smex = { version = "0.1.0", registry = "distrust" }

View File

@ -1,5 +1,5 @@
use icepick_workflow::Workflow; use icepick_workflow::Workflow;
use std::{collections::BTreeMap, path::{PathBuf, Path}}; use std::{collections::HashMap, path::{PathBuf, Path}};
fn env_var(var: &'static str) -> String { fn env_var(var: &'static str) -> String {
println!("cargo::rerun-if-env-changed={var}"); println!("cargo::rerun-if-env-changed={var}");
@ -11,16 +11,15 @@ fn track_path(path: &Path) {
} }
fn main() { fn main() {
let out_dir = env_var("OUT_DIR"); let out_dir = env_var("CARGO_TARGET_DIR");
let crate_dir = env_var("CARGO_MANIFEST_DIR"); let crate_dir = env_var("CARGO_MANIFEST_DIR");
let workflows_dir = PathBuf::from(crate_dir).join("workflows"); let workflows_dir = PathBuf::from(crate_dir).join("workflows");
track_path(&workflows_dir); track_path(&workflows_dir);
let mut workflows_by_module: BTreeMap<String, Vec<Workflow>> = Default::default(); let mut workflows_by_module: HashMap<String, Vec<Workflow>> = Default::default();
for module_dir in std::fs::read_dir(&workflows_dir).unwrap() { for module_dir in std::fs::read_dir(&workflows_dir).unwrap() {
let module_dir = module_dir.unwrap(); let module_dir = module_dir.unwrap();
dbg!(&module_dir);
let path = module_dir.path(); let path = module_dir.path();
if !path.is_dir() { if !path.is_dir() {
panic!("found unexpected file {}", path.to_string_lossy()); panic!("found unexpected file {}", path.to_string_lossy());
@ -29,7 +28,6 @@ fn main() {
let mut workflows = vec![]; let mut workflows = vec![];
for workflow_file in std::fs::read_dir(&path).unwrap() { for workflow_file in std::fs::read_dir(&path).unwrap() {
dbg!(&workflow_file);
let workflow_file = workflow_file.unwrap(); let workflow_file = workflow_file.unwrap();
let path = workflow_file.path(); let path = workflow_file.path();
if !path.is_file() { if !path.is_file() {
@ -41,15 +39,12 @@ fn main() {
workflows.push(workflow); workflows.push(workflow);
} }
workflows.sort_by(|a, b| a.name.cmp(&b.name));
workflows_by_module.insert( workflows_by_module.insert(
module_dir.file_name().to_str().unwrap().to_owned(), module_dir.file_name().to_str().unwrap().to_owned(),
workflows, workflows,
); );
} }
let out_path = PathBuf::from(out_dir).join("workflows.hex"); let out_path = PathBuf::from(out_dir).join("workflows.yaml");
let result = bincode::serialize(&workflows_by_module).unwrap(); let out_file = std::fs::File::create(&out_path).unwrap();
let hexed = smex::encode(&result); serde_yaml::to_writer(out_file, &workflows_by_module).unwrap();
std::fs::write(out_path, hexed).unwrap();
} }

View File

@ -3,7 +3,7 @@ use icepick_module::help::*;
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath}; use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
collections::{HashMap, BTreeMap}, collections::HashMap,
io::{IsTerminal, Write}, io::{IsTerminal, Write},
path::PathBuf, path::PathBuf,
process::{Command, Stdio}, process::{Command, Stdio},
@ -113,11 +113,6 @@ struct Config {
// command name, invocable binary, operations // command name, invocable binary, operations
type Commands<'a> = &'a [(String, String, Vec<Operation>)]; type Commands<'a> = &'a [(String, String, Vec<Operation>)];
fn default_workflows() -> HashMap<String, Vec<icepick_workflow::Workflow>> {
let workflows_hex = include_str!(concat!(env!("OUT_DIR"), "/workflows.hex"));
bincode::deserialize(&smex::decode(workflows_hex).unwrap()).unwrap()
}
pub fn do_cli_thing() { pub fn do_cli_thing() {
/* parse config file to get module names */ /* parse config file to get module names */
let config_file = std::env::vars().find_map(|(k, v)| { let config_file = std::env::vars().find_map(|(k, v)| {
@ -126,7 +121,7 @@ pub fn do_cli_thing() {
} }
None None
}); });
let config_path = config_file.unwrap_or_else(|| "/etc/icepick/icepick.toml".to_string()); let config_path = config_file.unwrap_or_else(|| "icepick.toml".to_string());
let config_content = std::fs::read_to_string(config_path).expect("can't read config file"); let config_content = std::fs::read_to_string(config_path).expect("can't read config file");
let mut config: Config = match toml::from_str(&config_content) { let mut config: Config = match toml::from_str(&config_content) {
Ok(config) => config, Ok(config) => config,
@ -148,13 +143,6 @@ pub fn do_cli_thing() {
workflows: Default::default(), workflows: Default::default(),
}); });
let workflows = default_workflows();
for module in &mut config.modules {
if let Some(module_workflows) = workflows.get(&module.name) {
module.workflows.extend(module_workflows.iter().cloned());
}
}
let workflows_file = std::env::vars().find_map(|(k, v)| { let workflows_file = std::env::vars().find_map(|(k, v)| {
if k == "ICEPICK_WORKFLOWS_FILE" { if k == "ICEPICK_WORKFLOWS_FILE" {
return Some(v); return Some(v);
@ -162,14 +150,13 @@ pub fn do_cli_thing() {
None None
}); });
let workflows_path = workflows_file.unwrap_or_else(|| "workflows.yaml".to_string()); let workflows_path = workflows_file.unwrap_or_else(|| "workflows.yaml".to_string());
let workflows_content = std::fs::read(&workflows_path).expect("can't read workflows from file");
let workflows: HashMap<String, Vec<icepick_workflow::Workflow>> =
serde_yaml::from_slice(&workflows_content).unwrap();
if let Ok(content) = std::fs::read(&workflows_path) { for module in &mut config.modules {
let workflows: HashMap<String, Vec<icepick_workflow::Workflow>> = if let Some(module_workflows) = workflows.get(&module.name) {
serde_yaml::from_slice(&content).unwrap(); module.workflows.extend(module_workflows.iter().cloned());
for module in &mut config.modules {
if let Some(module_workflows) = workflows.get(&module.name) {
module.workflows.extend(module_workflows.iter().cloned());
}
} }
} }
@ -376,7 +363,7 @@ pub fn do_cli_thing() {
} }
}; };
let inputs: BTreeMap<String, serde_json::Value> = let inputs: HashMap<String, serde_json::Value> =
serde_json::from_value(inputs).unwrap(); serde_json::from_value(inputs).unwrap();
let workflow = workflows let workflow = workflows

View File

@ -1,9 +1,10 @@
use icepick_workflow::{Input, InvocableOperation, OperationResult, StringMap, Workflow}; use icepick_workflow::{InvocableOperation, OperationResult, Workflow};
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationPath}; use keyfork_derive_util::{request::DerivationAlgorithm, DerivationPath};
use keyfork_shard::{openpgp::OpenPGP, Format}; use keyfork_shard::{openpgp::OpenPGP, Format};
use miniquorum::{Payload, PayloadVerification}; use miniquorum::{Payload, PayloadVerification};
use serde_json::Value; use serde_json::Value;
use std::{ use std::{
collections::HashMap,
io::Write, io::Write,
process::{Command, Stdio}, process::{Command, Stdio},
}; };
@ -19,6 +20,8 @@ pub enum Purpose {
RunQuorum, RunQuorum,
} }
pub type StringMap = std::collections::HashMap<String, String>;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct CLIOperation { struct CLIOperation {
/// The name of the operation (i.e. `transfer-token`). /// The name of the operation (i.e. `transfer-token`).
@ -38,7 +41,7 @@ struct CLIOperation {
} }
impl InvocableOperation for CLIOperation { impl InvocableOperation for CLIOperation {
fn invoke(&self, input: &StringMap<Value>, derived_keys: &[Vec<u8>]) -> OperationResult { fn invoke(&self, input: &HashMap<String, Value>, derived_keys: &[Vec<u8>]) -> OperationResult {
let (command, args) = get_command(&self.binary); let (command, args) = get_command(&self.binary);
let json = serde_json::json!({ let json = serde_json::json!({
@ -91,33 +94,31 @@ impl InvocableOperation for CLIOperation {
} }
pub fn generate_command(workflow: &Workflow) -> clap::Command { pub fn generate_command(workflow: &Workflow) -> clap::Command {
let mut command = clap::Command::new(&workflow.name).about(&workflow.description); let mut command = clap::Command::new(&workflow.name).arg(clap::arg!(
// NOTE: all required inputs are still marked as .required(false) since they could be included --"input-file" [FILE]
// in the `--input-file` argument. "A file containing any inputs not passed on the command line"
for input in workflow.inputs.iter() { ));
for arg in input.identifiers() { for input in &workflow.inputs {
let arg = clap::Arg::new(arg) // can also be included in the JSON file, so we won't mark this as required.
.required(false) let arg = clap::Arg::new(input)
.help(&input.description) .required(false)
.long(arg.replace('_', "-")) .long(input.replace('_', "-"))
.value_name(arg.to_uppercase()) .value_name(input.to_uppercase());
.conflicts_with_all( command = command.arg(arg);
input
.identifiers()
.filter(|name| *name != arg)
.collect::<Vec<_>>(),
);
command = command.arg(arg);
}
} }
command.arg(clap::arg!( for input in &workflow.optional_inputs {
--"input-file" [FILE] let arg = clap::Arg::new(input)
"A file containing any inputs not passed on the command line" .required(false)
)) .long(input.replace('_', "-"))
.value_name(input.to_uppercase());
command = command.arg(arg);
}
command
} }
fn load_inputs<'a>( fn load_inputs<T: AsRef<str> + Into<String> + std::fmt::Display>(
inputs: impl IntoIterator<Item = &'a Input>, inputs: impl IntoIterator<Item = T>,
optional_inputs: impl IntoIterator<Item = T>,
matches: &clap::ArgMatches, matches: &clap::ArgMatches,
) -> StringMap { ) -> StringMap {
let mut map = StringMap::default(); let mut map = StringMap::default();
@ -126,25 +127,33 @@ fn load_inputs<'a>(
.and_then(|p| std::fs::File::open(p).ok()) .and_then(|p| std::fs::File::open(p).ok())
.and_then(|f| serde_json::from_reader(f).ok()); .and_then(|f| serde_json::from_reader(f).ok());
for input in inputs { for input in inputs {
let identifier = &input.name; match matches.get_one::<String>(input.as_ref()) {
match input
.identifiers()
.filter_map(|name| matches.get_one::<String>(name))
.next()
{
Some(value) => { Some(value) => {
map.insert(identifier.clone(), value.clone()); map.insert(input.into(), value.clone());
continue; continue;
} }
None => { None => {
if let Some(value) = input_file.as_ref().and_then(|f| f.get(identifier)) { if let Some(value) = input_file.as_ref().and_then(|f| f.get(input.as_ref())) {
map.insert(identifier.clone(), value.clone()); map.insert(input.into(), value.clone());
continue; continue;
} }
} }
} }
if input.is_required() { panic!("Required workflow input was not found: {input}");
panic!("Required workflow input was not found: {identifier}"); }
for input in optional_inputs {
match matches.get_one::<String>(input.as_ref()) {
Some(value) => {
map.insert(input.into(), value.clone());
continue;
}
None => {
if let Some(value) = input_file.as_ref().and_then(|f| f.get(input.as_ref())) {
map.insert(input.into(), value.clone());
continue;
}
}
} }
} }
@ -182,10 +191,13 @@ pub fn parse_quorum_file(
let threshold = threshold.unwrap_or(u8::try_from(certs.len()).expect("too many certs!")); let threshold = threshold.unwrap_or(u8::try_from(certs.len()).expect("too many certs!"));
let policy = match purpose { let policy = match purpose {
Purpose::AddSignature => { Purpose::AddSignature => {
// All signatures must be valid, but we don't require a minimum. // All signatures must be valid, but we don't require a minimum.
PayloadVerification::new().with_threshold(0) PayloadVerification::new().with_threshold(0)
} }
Purpose::RunQuorum => PayloadVerification::new().with_threshold(threshold), Purpose::RunQuorum => {
PayloadVerification::new().with_threshold(threshold)
},
}; };
payload.verify_signatures(&certs, &policy, None).unwrap(); payload.verify_signatures(&certs, &policy, None).unwrap();
@ -201,19 +213,20 @@ pub fn parse_quorum_with_shardfile(
let payload: Payload = serde_json::from_reader(payload_file).unwrap(); let payload: Payload = serde_json::from_reader(payload_file).unwrap();
let opgp = OpenPGP; let opgp = OpenPGP;
let (threshold, certs) = opgp let (threshold, certs) = opgp.decrypt_metadata_from_file(
.decrypt_metadata_from_file( None::<&std::path::Path>,
None::<&std::path::Path>, std::fs::File::open(shardfile_path).unwrap(),
std::fs::File::open(shardfile_path).unwrap(), keyfork_prompt::default_handler().unwrap(),
keyfork_prompt::default_handler().unwrap(), ).unwrap();
)
.unwrap();
let policy = match purpose { let policy = match purpose {
Purpose::AddSignature => { Purpose::AddSignature => {
// All signatures must be valid, but we don't require a minimum. // All signatures must be valid, but we don't require a minimum.
PayloadVerification::new().with_threshold(0) PayloadVerification::new().with_threshold(0)
} }
Purpose::RunQuorum => PayloadVerification::new().with_threshold(threshold), Purpose::RunQuorum => {
PayloadVerification::new().with_threshold(threshold)
},
}; };
payload.verify_signatures(&certs, &policy, None).unwrap(); payload.verify_signatures(&certs, &policy, None).unwrap();
@ -223,7 +236,7 @@ pub fn parse_quorum_with_shardfile(
pub fn handle_payload( pub fn handle_payload(
workflow: &Workflow, workflow: &Workflow,
inputs: StringMap<Value>, inputs: HashMap<String, Value>,
modules: Commands, modules: Commands,
config: &[ModuleConfig], config: &[ModuleConfig],
) { ) {
@ -241,8 +254,8 @@ pub fn handle(
modules: Commands, modules: Commands,
config: &[ModuleConfig], config: &[ModuleConfig],
) { ) {
let inputs = load_inputs(&workflow.inputs, matches); let inputs = load_inputs(&workflow.inputs, &workflow.optional_inputs, matches);
let data: StringMap<Value> = inputs let data: HashMap<String, Value> = inputs
.into_iter() .into_iter()
.map(|(k, v)| (k, Value::String(v))) .map(|(k, v)| (k, Value::String(v)))
.collect(); .collect();

View File

@ -1,43 +0,0 @@
name: "broadcast"
description: |-
Broadcast a transaction on a Cosmos-based blockchain.
inputs:
- name: "nonce_address"
description: >-
The address of the account used for the transaction nonce.
- name: "chain_name"
description: >-
The name of the Cosmos chain to broadcast a transaction on.
step:
- type: "cosmos-get-chain-info"
inputs:
chain_name: "chain_name"
outputs:
blockchain_config: "blockchain_config"
- type: "cosmos-get-account-data"
inputs:
account_id: "nonce_address"
blockchain_config: "blockchain_config"
outputs:
account_number: "account_number"
sequence_number: "sequence_number"
- type: "internal-save-file"
values:
filename: "account_info.json"
inputs:
account_number: "account_number"
sequence_number: "sequence_number"
- type: "internal-load-file"
values:
filename: "transaction.json"
outputs:
transaction: "transaction"
- type: "cosmos-broadcast"
inputs:
blockchain_config: "blockchain_config"
transaction: "transaction"
outputs:
status: "status"
url: "url"
error: "error"
error_code: "error_code"

View File

@ -1,14 +1,8 @@
name: generate-address name: generate-address
description: |-
Generate an address on a given Cosmos-based blockchain.
inputs: inputs:
- name: chain_name - chain_name
description: >- optional_inputs:
The name of the Cosmos chain you'd like to generate an address for. - account
- name: account
description: >-
The account to use, if not the default account.
optional: true
step: step:
- type: cosmos-get-chain-info - type: cosmos-get-chain-info
inputs: inputs:

View File

@ -1,27 +1,12 @@
name: stake name: stake
description: |-
Stake coins on the provided chain.
inputs: inputs:
- name: delegate_address - delegate_address
description: >- - validator_address
Address holding the coins to be staked to a validator. - chain_name
- name: validator_address - asset_name
description: >- - asset_amount
Address of the validator operator. optional_inputs:
- name: chain_name - gas_factor
description: >-
The name of the Cosmos-based chain.
- name: asset_name
description: >-
The name of the asset to stake.
- name: asset_amount
description: >-
The amount of the asset to stake.
- name: gas_factor
description: >-
An amount to multiply the required gas by; necessary if a chain requires
more gas for a specific operation.
optional: true
step: step:
- type: cosmos-get-chain-info - type: cosmos-get-chain-info
inputs: inputs:

View File

@ -1,55 +0,0 @@
name: "transfer"
description: |-
Transfer a Cosmos coin.
inputs:
- name: "from_address"
description: >-
The address from which to send coin.
- name: "to_address"
description: >-
The address to send coins to.
- name: "asset_name"
description: >-
The name of the asset to send.
- name: "chain_name"
description: >-
The name of the Cosmos chain the asset lives on.
- name: "asset_amount"
description: >-
The amount of the asset to send.
step:
- type: "cosmos-get-chain-info"
inputs:
chain_name: "chain_name"
outputs:
blockchain_config: "blockchain_config"
- type: "internal-load-file"
values:
filename: "account_info.json"
outputs:
account_number: "account_number"
sequence_number: "sequence_number"
- type: "cosmos-transfer"
inputs:
from_address: "from_address"
to_address: "to_address"
amount: "asset_amount"
denom: "asset_name"
blockchain_config: "blockchain_config"
outputs:
fee: "fee"
tx_messages: "tx_messages"
- type: "cosmos-sign"
inputs:
fee: "fee"
tx_messages: "tx_messages"
account_number: "account_number"
sequence_number: "sequence_number"
blockchain_config: "blockchain_config"
outputs:
transaction: "signed_transaction"
- type: "internal-save-file"
values:
filename: "transaction.json"
inputs:
transaction: "signed_transaction"

View File

@ -1,21 +1,10 @@
name: withdraw-rewards name: withdraw-rewards
description: |-
Withdraw rewards gained from staking to a validator.
inputs: inputs:
- name: delegate_address - delegate_address
description: >- - validator_address
The owner of the staked coins; also, the recipient of rewards. - chain_name
- name: validator_address optional_inputs:
description: >- - gas_factor
The validator from whom coins are staked.
- name: chain_name
description: >-
The name of the Cosmos-based chain.
- name: gas_factor
description: >-
An amount to multiply the required gas by; necessary if a chain requires
more gas for a specific operation.
optional: true
step: step:
- type: cosmos-get-chain-info - type: cosmos-get-chain-info
inputs: inputs:

View File

@ -1,30 +1,12 @@
name: withdraw name: withdraw
description: |-
Withdraw staked coins from a validator.
Staked coins may be held for an unbonding period, depending on the chain upon
which they are staked.
inputs: inputs:
- name: delegate_address - delegate_address
description: >- - validator_address
The owner of the staked coins. - chain_name
- name: validator_address - asset_name
description: >- - asset_amount
The validator from whom coins are staked. optional_inputs:
- name: chain_name - gas_factor
description: >-
The name of the Cosmos-based chain.
- name: asset_name
description: >-
The name of the asset to withdraw.
- name: asset_amount
description: >-
The amount of the asset to withdraw.
- name: gas_factor
description: >-
An amount to multiply the required gas by; necessary if a chain requires
more gas for a specific operation.
optional: true
step: step:
- type: cosmos-get-chain-info - type: cosmos-get-chain-info
inputs: inputs:

View File

@ -1,40 +0,0 @@
name: "broadcast"
description: |-
Broadcast a transaction on the Solana blockchain.
inputs:
- name: "nonce_address"
description: >-
The address of the nonce account.
- name: "cluster"
description: >-
The name of the Solana cluster to broadcast the transaction on, if not
mainnet-beta.
optional: true
step:
- type: "sol-get-nonce-account-data"
inputs:
nonce_address: "nonce_address"
cluster: "cluster"
outputs:
authority: "nonce_authority"
durable_nonce: "nonce"
- type: "internal-save-file"
values:
filename: "nonce.json"
inputs:
nonce_authority: "nonce_authority"
nonce_data: "nonce"
nonce_address: "nonce_address"
- type: "internal-load-file"
values:
filename: "transaction.json"
outputs:
transaction: "transaction"
- type: "sol-broadcast"
inputs:
cluster: "cluster"
transaction: "transaction"
outputs:
status: "status"
url: "url"
error: "error"

View File

@ -1,11 +1,6 @@
name: generate-address name: generate-address
description: |- optional_inputs:
Generate a Solana address. - account
inputs:
- name: account
description: >-
The account to use, if not the default account.
optional: true
step: step:
- type: sol-generate-wallet - type: sol-generate-wallet
inputs: inputs:

View File

@ -1,75 +0,0 @@
name: "generate-nonce-account"
description: |-
Using a temporary Keyfork instance, generate a nonce address for the given
authorization address.
inputs:
- name: "cluster"
description: >-
Name of the Solana cluster to generate the nonce account on, if not
mainnet-beta.
- name: "authorization_address"
description: >-
The address used to authorize advancing the nonce.
The authorization address (also called "address" or "pubkey" in other
workflows) is required to be a signer of the transaction, so the
authorization address is often the principal address - the one performing
the transaction.
step:
- type: "sol-generate-wallet"
- type: "sol-get-wallet-address"
outputs:
pubkey: "wallet_pubkey"
- type: "sol-await-funds"
inputs:
address: "wallet_pubkey"
cluster: "cluster"
values:
lamports: "1510000"
- type: "sol-get-blockhash"
inputs:
cluster: "cluster"
outputs:
blockhash: "blockhash"
- type: "sol-create-nonce-account-and-signing-key"
inputs:
from_address: "wallet_pubkey"
authorization_address: "authorization_address"
outputs:
transaction: "instructions"
nonce_pubkey: "nonce_pubkey"
nonce_privkey: "private_keys"
derivation_accounts: "derivation_accounts"
- type: "sol-compile"
inputs:
instructions: "instructions"
derivation_accounts: "derivation_accounts"
blockhash: "blockhash"
outputs:
transaction: "unsigned_transaction"
- type: "sol-sign"
inputs:
blockhash: "blockhash"
signing_keys: "private_keys"
transaction: "unsigned_transaction"
outputs:
transaction: "signed_transaction"
- type: "sol-broadcast"
inputs:
cluster: "cluster"
transaction: "signed_transaction"
outputs:
status: "status"
url: "url"
error: "error"
- type: "internal-cat"
inputs:
status: "status"
url: "url"
nonce_account: "nonce_pubkey"
error: "error"
outputs:
status: "status"
url: "url"
nonce_account: "nonce_account"
error: "error"

View File

@ -1,19 +1,9 @@
name: transfer-token name: transfer-token
description: |-
Transfer SPL tokens held on the Solana blockchain.
inputs: inputs:
- name: from_address - from_address
description: >- - to_address
The address from which to send tokens. - token_name
- name: to_address - token_amount
description: >-
The address to send coins to.
- name: token_name
description: >-
The name of the token to transfer.
- name: token_amount
description: >-
The amount of the token to transfer.
step: step:
- type: sol-get-token-info - type: sol-get-token-info
inputs: inputs:

View File

@ -1,49 +0,0 @@
name: "transfer"
description: |-
Transfer SOL from one address to another.
inputs:
- name: "to_address"
description: >-
The address to send SOL to.
- name: "from_address"
description: >-
The address to send SOL from.
- name: "amount"
description: >-
The amount of SOL to send.
step:
- type: "internal-load-file"
values:
filename: "nonce.json"
outputs:
nonce_authority: "nonce_authority"
nonce_data: "nonce_data"
nonce_address: "nonce_address"
- type: "sol-transfer"
inputs:
from_address: "from_address"
to_address: "to_address"
amount: "amount"
outputs:
instructions: "instructions"
derivation_accounts: "derivation_accounts"
- type: "sol-compile"
inputs:
instructions: "instructions"
derivation_accounts: "derivation_accounts"
nonce_address: "nonce_address"
nonce_authority: "nonce_authority"
nonce_data: "nonce_data"
outputs:
transaction: "unsigned_transaction"
- type: "sol-sign"
inputs:
blockhash: "nonce_data"
transaction: "unsigned_transaction"
outputs:
transaction: "signed_transaction"
- type: "internal-save-file"
values:
filename: "transaction.json"
inputs:
transaction: "signed_transaction"

View File

@ -1,15 +1,7 @@
name: generate-address name: generate-address
description: |- optional_inputs:
Generate a Spacemesh address - account
inputs: - cluster
- name: account
description: >-
The account to use, if not the default account.
optional: true
- name: cluster
description: >-
The Spacemesh cluster to use, if not the mainnet.
optional: true
step: step:
- type: spacemesh-generate-wallet - type: spacemesh-generate-wallet
inputs: inputs:

View File

@ -3,11 +3,213 @@ name = "sol"
derivation_prefix = "m/44'/501'/0'" derivation_prefix = "m/44'/501'/0'"
algorithm = "Ed25519" algorithm = "Ed25519"
# NOTE: To get a nonce address, the `generate-nonce-account` workflow should be
# run. It is the only workflow that uses a blockhash, which is why a
# `broadcast-with-blockhash` or similar is not, and should not be, implemented.
[[module.workflow]]
name = "broadcast"
inputs = ["nonce_address", "cluster"]
[[module.workflow.step]]
type = "sol-get-nonce-account-data"
inputs = { nonce_address = "nonce_address", cluster = "cluster" }
outputs = { authority = "nonce_authority", durable_nonce = "nonce" }
[[module.workflow.step]]
type = "internal-save-file"
values = { filename = "nonce.json" }
inputs = { nonce_authority = "nonce_authority", nonce_data = "nonce", nonce_address = "nonce_address" }
[[module.workflow.step]]
type = "internal-load-file"
values = { filename = "transaction.json" }
outputs = { transaction = "transaction" }
[[module.workflow.step]]
type = "sol-broadcast"
inputs = { cluster = "cluster", transaction = "transaction" }
outputs = { status = "status", url = "url", error = "error" }
[[module.workflow]]
name = "generate-nonce-account"
inputs = ["cluster", "authorization_address"]
[[module.workflow.step]]
type = "sol-generate-wallet"
[[module.workflow.step]]
type = "sol-get-wallet-address"
outputs = { pubkey = "wallet_pubkey" }
[[module.workflow.step]]
type = "sol-await-funds"
inputs = { address = "wallet_pubkey", cluster = "cluster" }
# enough to cover two signatures and the 1_500_000 approx. rent fee
values = { lamports = "1510000" }
[[module.workflow.step]]
type = "sol-get-blockhash"
inputs = { cluster = "cluster" }
outputs = { blockhash = "blockhash" }
[[module.workflow.step]]
type = "sol-create-nonce-account-and-signing-key"
[module.workflow.step.inputs]
from_address = "wallet_pubkey"
authorization_address = "authorization_address"
[module.workflow.step.outputs]
transaction = "unsigned_transaction"
nonce_pubkey = "nonce_pubkey"
nonce_privkey = "private_keys"
[[module.workflow.step]]
type = "sol-sign"
[module.workflow.step.inputs]
blockhash = "blockhash"
signing_keys = "private_keys"
transaction = "unsigned_transaction"
[module.workflow.step.outputs]
transaction = "signed_transaction"
[[module.workflow.step]]
type = "sol-broadcast"
inputs = { cluster = "cluster", transaction = "signed_transaction" }
outputs = { status = "status", url = "url" }
[[module.workflow.step]]
type = "internal-cat"
inputs = { status = "status", url = "url", nonce_account = "nonce_pubkey" }
outputs = { status = "status", url = "url", nonce_account = "nonce_account" }
[[module.workflow]]
# Transfer SOL from one address to another.
name = "transfer"
inputs = ["to_address", "from_address", "amount"]
[[module.workflow.step]]
type = "internal-load-file"
values = { filename = "nonce.json" }
outputs = { nonce_authority = "nonce_authority", nonce_data = "nonce_data", nonce_address = "nonce_address" }
[[module.workflow.step]]
type = "sol-transfer"
inputs = { from_address = "from_address", to_address = "to_address", amount = "amount" }
outputs = { instructions = "instructions", derivation_accounts = "derivation_accounts" }
[[module.workflow.step]]
type = "sol-compile"
[module.workflow.step.inputs]
instructions = "instructions"
derivation_accounts = "derivation_accounts"
nonce_address = "nonce_address"
nonce_authority = "nonce_authority"
nonce_data = "nonce_data"
[module.workflow.step.outputs]
transaction = "unsigned_transaction"
[[module.workflow.step]]
type = "sol-sign"
inputs = { blockhash = "nonce_data", transaction = "unsigned_transaction" }
outputs = { transaction = "signed_transaction" }
[[module.workflow.step]]
type = "internal-save-file"
values = { filename = "transaction.json" }
inputs = { transaction = "signed_transaction" }
[[module]] [[module]]
name = "cosmos" name = "cosmos"
derivation_prefix = "m/44'/118'/0'" derivation_prefix = "m/44'/118'/0'"
algorithm = "Secp256k1" algorithm = "Secp256k1"
[[module.workflow]]
name = "transfer"
inputs = ["from_address", "to_address", "asset_name", "chain_name", "asset_amount"]
[[module.workflow.step]]
# NOTE: chain_name can't be discoverable by filtering from asset_name, since
# some asset devnets reuse the name. There's no difference between KYVE on Kyve
# or Korellia (devnet).
type = "cosmos-get-chain-info"
inputs = { chain_name = "chain_name" }
outputs = { blockchain_config = "blockchain_config" }
[[module.workflow.step]]
type = "internal-load-file"
values = { filename = "account_info.json" }
outputs = { account_number = "account_number", sequence_number = "sequence_number" }
[[module.workflow.step]]
type = "cosmos-transfer"
[module.workflow.step.inputs]
from_address = "from_address"
to_address = "to_address"
amount = "asset_amount"
denom = "asset_name"
blockchain_config = "blockchain_config"
[module.workflow.step.outputs]
fee = "fee"
tx_messages = "tx_messages"
[[module.workflow.step]]
type = "cosmos-sign"
[module.workflow.step.inputs]
fee = "fee"
tx_messages = "tx_messages"
account_number = "account_number"
sequence_number = "sequence_number"
blockchain_config = "blockchain_config"
[module.workflow.step.outputs]
transaction = "signed_transaction"
[[module.workflow.step]]
type = "internal-save-file"
values = { filename = "transaction.json" }
inputs = { transaction = "signed_transaction" }
[[module.workflow]]
name = "broadcast"
# NOTE: For the purpose of Cosmos, the nonce is a direct part of the signer's
# account.
inputs = ["nonce_address", "chain_name"]
[[module.workflow.step]]
type = "cosmos-get-chain-info"
inputs = { chain_name = "chain_name" }
outputs = { blockchain_config = "blockchain_config" }
[[module.workflow.step]]
type = "cosmos-get-account-data"
inputs = { account_id = "nonce_address", blockchain_config = "blockchain_config" }
outputs = { account_number = "account_number", sequence_number = "sequence_number" }
[[module.workflow.step]]
type = "internal-save-file"
values = { filename = "account_info.json" }
inputs = { account_number = "account_number", sequence_number = "sequence_number" }
[[module.workflow.step]]
type = "internal-load-file"
values = { filename = "transaction.json" }
outputs = { transaction = "transaction" }
[[module.workflow.step]]
type = "cosmos-broadcast"
inputs = { blockchain_config = "blockchain_config", transaction = "transaction" }
outputs = { status = "status", url = "url", error = "error", error_code = "error_code" }
[[module]] [[module]]
name = "spacemesh" name = "spacemesh"
derivation_prefix = "m/44'/540'/0'/0'" derivation_prefix = "m/44'/540'/0'/0'"