Compare commits
13 Commits
ryansquare
...
main
Author | SHA1 | Date |
---|---|---|
|
3e9490644a | |
|
79cef4d01a | |
|
af9babe526 | |
|
25e8f9d6ee | |
|
42b6b6ad4e | |
|
4832300098 | |
|
097bacbdea | |
|
4f5779c983 | |
|
5224bc00a3 | |
|
dcb9c50d29 | |
|
e4756fd158 | |
|
d4c1434d9a | |
|
e5d2cab12c |
|
@ -1970,6 +1970,7 @@ dependencies = [
|
|||
name = "icepick"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"chrono",
|
||||
"clap",
|
||||
"icepick-module",
|
||||
|
@ -1983,6 +1984,7 @@ dependencies = [
|
|||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"smex",
|
||||
"thiserror 2.0.11",
|
||||
"toml 0.8.19",
|
||||
]
|
||||
|
|
|
@ -705,8 +705,8 @@ impl Module for Solana {
|
|||
"nonce_pubkey": keypair.pubkey().to_string(),
|
||||
"nonce_privkey": [keypair.secret().to_bytes()],
|
||||
"transaction": instructions,
|
||||
"derivation_accounts": [0u32 | 1 << 31],
|
||||
},
|
||||
"derivation_accounts": [0u32 | 1 << 31],
|
||||
}))
|
||||
}
|
||||
Operation::GetNonceAccountData(GetNonceAccountData {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::collections::{BTreeMap, HashSet};
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum SimulationError {
|
||||
|
@ -24,21 +24,51 @@ pub enum WorkflowError {
|
|||
InvocationError(String),
|
||||
}
|
||||
|
||||
/// An input for a workflow argument. When inputs are read, they should be referenced by the first
|
||||
/// name. Additional names can be provided as aliases, to allow chaining workflows together when
|
||||
/// names may not make sense - such as a Solana address then being used as an authorization
|
||||
/// address.
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct Input {
|
||||
/// An input with a single identifier.
|
||||
/// The name of the input.
|
||||
pub name: String,
|
||||
|
||||
/// A description of the input.
|
||||
pub description: String,
|
||||
|
||||
/// Aliases used when loading inputs.
|
||||
#[serde(default)]
|
||||
pub aliases: Vec<String>,
|
||||
|
||||
/// Whether the workflow input is optional.
|
||||
pub optional: Option<bool>,
|
||||
}
|
||||
|
||||
impl Input {
|
||||
pub fn identifiers(&self) -> impl Iterator<Item = &String> {
|
||||
[&self.name].into_iter().chain(self.aliases.iter())
|
||||
}
|
||||
|
||||
pub fn is_required(&self) -> bool {
|
||||
self.optional.is_some_and(|o| o)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct Workflow {
|
||||
pub name: String,
|
||||
|
||||
#[serde(default)]
|
||||
pub inputs: Vec<String>,
|
||||
pub description: String,
|
||||
|
||||
#[serde(default)]
|
||||
pub optional_inputs: Vec<String>,
|
||||
pub inputs: Vec<Input>,
|
||||
|
||||
#[serde(rename = "step")]
|
||||
steps: Vec<WorkflowStep>,
|
||||
}
|
||||
|
||||
pub type StringMap = HashMap<String, String>;
|
||||
pub type StringMap<T = String> = BTreeMap<String, T>;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct WorkflowStep {
|
||||
|
@ -60,7 +90,7 @@ pub struct WorkflowStep {
|
|||
#[derive(Serialize, Deserialize)]
|
||||
pub struct OperationResult {
|
||||
// All values returned from an operation.
|
||||
blob: HashMap<String, Value>,
|
||||
blob: StringMap<Value>,
|
||||
|
||||
// Any requested accounts from an operation.
|
||||
//
|
||||
|
@ -116,10 +146,10 @@ impl Workflow {
|
|||
|
||||
pub fn run_workflow<T: InvocableOperation>(
|
||||
&self,
|
||||
mut data: HashMap<String, Value>,
|
||||
mut data: StringMap<Value>,
|
||||
operations: &[T],
|
||||
derive_keys: DeriveKeys,
|
||||
) -> Result<HashMap<String, Value>, WorkflowError> {
|
||||
) -> Result<StringMap<Value>, WorkflowError> {
|
||||
let mut derived_keys = vec![];
|
||||
let mut derivation_accounts = vec![];
|
||||
|
||||
|
@ -130,7 +160,7 @@ impl Workflow {
|
|||
};
|
||||
|
||||
// Prepare all inputs for the operation invocation
|
||||
let inputs: HashMap<String, Value> = data
|
||||
let inputs: StringMap<Value> = data
|
||||
.iter()
|
||||
.map(|(k, v)| (k, v.clone()))
|
||||
.filter_map(|(k, v)| {
|
||||
|
@ -191,7 +221,7 @@ pub trait WorkflowHandler {
|
|||
/// within themselves.
|
||||
pub trait InvocableOperation {
|
||||
/// Invoke the operation with the supplied inputs and derived keys.
|
||||
fn invoke(&self, input: &HashMap<String, Value>, derived_keys: &[Vec<u8>]) -> OperationResult;
|
||||
fn invoke(&self, input: &StringMap<Value>, derived_keys: &[Vec<u8>]) -> OperationResult;
|
||||
|
||||
/// The name of the operation.
|
||||
fn name(&self) -> &String;
|
||||
|
|
|
@ -4,6 +4,7 @@ version = "0.1.0"
|
|||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
bincode = "1.3.3"
|
||||
chrono = { version = "0.4.39", default-features = false, features = ["now", "serde", "std"] }
|
||||
clap = { version = "4.5.20", features = ["cargo", "derive", "string"] }
|
||||
icepick-module = { version = "0.1.0", path = "../icepick-module" }
|
||||
|
@ -17,9 +18,12 @@ miniquorum = { version = "0.1.0", path = "../miniquorum", default-features = fal
|
|||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true, features = ["arbitrary_precision"] }
|
||||
serde_yaml = "0.9.34"
|
||||
smex = { version = "0.1.0", registry = "distrust" }
|
||||
thiserror = "2.0.3"
|
||||
toml = "0.8.19"
|
||||
|
||||
[build-dependencies]
|
||||
bincode = "1.3.3"
|
||||
icepick-workflow = { version = "0.1.0", path = "../icepick-workflow" }
|
||||
serde_yaml = "0.9.34"
|
||||
smex = { version = "0.1.0", registry = "distrust" }
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use icepick_workflow::Workflow;
|
||||
use std::{collections::HashMap, path::{PathBuf, Path}};
|
||||
use std::{collections::BTreeMap, path::{PathBuf, Path}};
|
||||
|
||||
fn env_var(var: &'static str) -> String {
|
||||
println!("cargo::rerun-if-env-changed={var}");
|
||||
|
@ -11,15 +11,16 @@ fn track_path(path: &Path) {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
let out_dir = env_var("CARGO_TARGET_DIR");
|
||||
let out_dir = env_var("OUT_DIR");
|
||||
let crate_dir = env_var("CARGO_MANIFEST_DIR");
|
||||
let workflows_dir = PathBuf::from(crate_dir).join("workflows");
|
||||
track_path(&workflows_dir);
|
||||
|
||||
let mut workflows_by_module: HashMap<String, Vec<Workflow>> = Default::default();
|
||||
let mut workflows_by_module: BTreeMap<String, Vec<Workflow>> = Default::default();
|
||||
|
||||
for module_dir in std::fs::read_dir(&workflows_dir).unwrap() {
|
||||
let module_dir = module_dir.unwrap();
|
||||
dbg!(&module_dir);
|
||||
let path = module_dir.path();
|
||||
if !path.is_dir() {
|
||||
panic!("found unexpected file {}", path.to_string_lossy());
|
||||
|
@ -28,6 +29,7 @@ fn main() {
|
|||
let mut workflows = vec![];
|
||||
|
||||
for workflow_file in std::fs::read_dir(&path).unwrap() {
|
||||
dbg!(&workflow_file);
|
||||
let workflow_file = workflow_file.unwrap();
|
||||
let path = workflow_file.path();
|
||||
if !path.is_file() {
|
||||
|
@ -39,12 +41,15 @@ fn main() {
|
|||
workflows.push(workflow);
|
||||
}
|
||||
|
||||
workflows.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
|
||||
workflows_by_module.insert(
|
||||
module_dir.file_name().to_str().unwrap().to_owned(),
|
||||
workflows,
|
||||
);
|
||||
}
|
||||
let out_path = PathBuf::from(out_dir).join("workflows.yaml");
|
||||
let out_file = std::fs::File::create(&out_path).unwrap();
|
||||
serde_yaml::to_writer(out_file, &workflows_by_module).unwrap();
|
||||
let out_path = PathBuf::from(out_dir).join("workflows.hex");
|
||||
let result = bincode::serialize(&workflows_by_module).unwrap();
|
||||
let hexed = smex::encode(&result);
|
||||
std::fs::write(out_path, hexed).unwrap();
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use icepick_module::help::*;
|
|||
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
collections::{HashMap, BTreeMap},
|
||||
io::{IsTerminal, Write},
|
||||
path::PathBuf,
|
||||
process::{Command, Stdio},
|
||||
|
@ -113,6 +113,11 @@ struct Config {
|
|||
// command name, invocable binary, operations
|
||||
type Commands<'a> = &'a [(String, String, Vec<Operation>)];
|
||||
|
||||
fn default_workflows() -> HashMap<String, Vec<icepick_workflow::Workflow>> {
|
||||
let workflows_hex = include_str!(concat!(env!("OUT_DIR"), "/workflows.hex"));
|
||||
bincode::deserialize(&smex::decode(workflows_hex).unwrap()).unwrap()
|
||||
}
|
||||
|
||||
pub fn do_cli_thing() {
|
||||
/* parse config file to get module names */
|
||||
let config_file = std::env::vars().find_map(|(k, v)| {
|
||||
|
@ -121,7 +126,7 @@ pub fn do_cli_thing() {
|
|||
}
|
||||
None
|
||||
});
|
||||
let config_path = config_file.unwrap_or_else(|| "icepick.toml".to_string());
|
||||
let config_path = config_file.unwrap_or_else(|| "/etc/icepick/icepick.toml".to_string());
|
||||
let config_content = std::fs::read_to_string(config_path).expect("can't read config file");
|
||||
let mut config: Config = match toml::from_str(&config_content) {
|
||||
Ok(config) => config,
|
||||
|
@ -143,6 +148,13 @@ pub fn do_cli_thing() {
|
|||
workflows: Default::default(),
|
||||
});
|
||||
|
||||
let workflows = default_workflows();
|
||||
for module in &mut config.modules {
|
||||
if let Some(module_workflows) = workflows.get(&module.name) {
|
||||
module.workflows.extend(module_workflows.iter().cloned());
|
||||
}
|
||||
}
|
||||
|
||||
let workflows_file = std::env::vars().find_map(|(k, v)| {
|
||||
if k == "ICEPICK_WORKFLOWS_FILE" {
|
||||
return Some(v);
|
||||
|
@ -150,13 +162,14 @@ pub fn do_cli_thing() {
|
|||
None
|
||||
});
|
||||
let workflows_path = workflows_file.unwrap_or_else(|| "workflows.yaml".to_string());
|
||||
let workflows_content = std::fs::read(&workflows_path).expect("can't read workflows from file");
|
||||
let workflows: HashMap<String, Vec<icepick_workflow::Workflow>> =
|
||||
serde_yaml::from_slice(&workflows_content).unwrap();
|
||||
|
||||
for module in &mut config.modules {
|
||||
if let Some(module_workflows) = workflows.get(&module.name) {
|
||||
module.workflows.extend(module_workflows.iter().cloned());
|
||||
if let Ok(content) = std::fs::read(&workflows_path) {
|
||||
let workflows: HashMap<String, Vec<icepick_workflow::Workflow>> =
|
||||
serde_yaml::from_slice(&content).unwrap();
|
||||
for module in &mut config.modules {
|
||||
if let Some(module_workflows) = workflows.get(&module.name) {
|
||||
module.workflows.extend(module_workflows.iter().cloned());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -363,7 +376,7 @@ pub fn do_cli_thing() {
|
|||
}
|
||||
};
|
||||
|
||||
let inputs: HashMap<String, serde_json::Value> =
|
||||
let inputs: BTreeMap<String, serde_json::Value> =
|
||||
serde_json::from_value(inputs).unwrap();
|
||||
|
||||
let workflow = workflows
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
use icepick_workflow::{InvocableOperation, OperationResult, Workflow};
|
||||
use icepick_workflow::{Input, InvocableOperation, OperationResult, StringMap, Workflow};
|
||||
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationPath};
|
||||
use keyfork_shard::{openpgp::OpenPGP, Format};
|
||||
use miniquorum::{Payload, PayloadVerification};
|
||||
use serde_json::Value;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::Write,
|
||||
process::{Command, Stdio},
|
||||
};
|
||||
|
@ -20,8 +19,6 @@ pub enum Purpose {
|
|||
RunQuorum,
|
||||
}
|
||||
|
||||
pub type StringMap = std::collections::HashMap<String, String>;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct CLIOperation {
|
||||
/// The name of the operation (i.e. `transfer-token`).
|
||||
|
@ -41,7 +38,7 @@ struct CLIOperation {
|
|||
}
|
||||
|
||||
impl InvocableOperation for CLIOperation {
|
||||
fn invoke(&self, input: &HashMap<String, Value>, derived_keys: &[Vec<u8>]) -> OperationResult {
|
||||
fn invoke(&self, input: &StringMap<Value>, derived_keys: &[Vec<u8>]) -> OperationResult {
|
||||
let (command, args) = get_command(&self.binary);
|
||||
|
||||
let json = serde_json::json!({
|
||||
|
@ -94,31 +91,33 @@ impl InvocableOperation for CLIOperation {
|
|||
}
|
||||
|
||||
pub fn generate_command(workflow: &Workflow) -> clap::Command {
|
||||
let mut command = clap::Command::new(&workflow.name).arg(clap::arg!(
|
||||
--"input-file" [FILE]
|
||||
"A file containing any inputs not passed on the command line"
|
||||
));
|
||||
for input in &workflow.inputs {
|
||||
// can also be included in the JSON file, so we won't mark this as required.
|
||||
let arg = clap::Arg::new(input)
|
||||
.required(false)
|
||||
.long(input.replace('_', "-"))
|
||||
.value_name(input.to_uppercase());
|
||||
command = command.arg(arg);
|
||||
let mut command = clap::Command::new(&workflow.name).about(&workflow.description);
|
||||
// NOTE: all required inputs are still marked as .required(false) since they could be included
|
||||
// in the `--input-file` argument.
|
||||
for input in workflow.inputs.iter() {
|
||||
for arg in input.identifiers() {
|
||||
let arg = clap::Arg::new(arg)
|
||||
.required(false)
|
||||
.help(&input.description)
|
||||
.long(arg.replace('_', "-"))
|
||||
.value_name(arg.to_uppercase())
|
||||
.conflicts_with_all(
|
||||
input
|
||||
.identifiers()
|
||||
.filter(|name| *name != arg)
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
command = command.arg(arg);
|
||||
}
|
||||
}
|
||||
for input in &workflow.optional_inputs {
|
||||
let arg = clap::Arg::new(input)
|
||||
.required(false)
|
||||
.long(input.replace('_', "-"))
|
||||
.value_name(input.to_uppercase());
|
||||
command = command.arg(arg);
|
||||
}
|
||||
command
|
||||
command.arg(clap::arg!(
|
||||
--"input-file" [FILE]
|
||||
"A file containing any inputs not passed on the command line"
|
||||
))
|
||||
}
|
||||
|
||||
fn load_inputs<T: AsRef<str> + Into<String> + std::fmt::Display>(
|
||||
inputs: impl IntoIterator<Item = T>,
|
||||
optional_inputs: impl IntoIterator<Item = T>,
|
||||
fn load_inputs<'a>(
|
||||
inputs: impl IntoIterator<Item = &'a Input>,
|
||||
matches: &clap::ArgMatches,
|
||||
) -> StringMap {
|
||||
let mut map = StringMap::default();
|
||||
|
@ -127,33 +126,25 @@ fn load_inputs<T: AsRef<str> + Into<String> + std::fmt::Display>(
|
|||
.and_then(|p| std::fs::File::open(p).ok())
|
||||
.and_then(|f| serde_json::from_reader(f).ok());
|
||||
for input in inputs {
|
||||
match matches.get_one::<String>(input.as_ref()) {
|
||||
let identifier = &input.name;
|
||||
match input
|
||||
.identifiers()
|
||||
.filter_map(|name| matches.get_one::<String>(name))
|
||||
.next()
|
||||
{
|
||||
Some(value) => {
|
||||
map.insert(input.into(), value.clone());
|
||||
map.insert(identifier.clone(), value.clone());
|
||||
continue;
|
||||
}
|
||||
None => {
|
||||
if let Some(value) = input_file.as_ref().and_then(|f| f.get(input.as_ref())) {
|
||||
map.insert(input.into(), value.clone());
|
||||
if let Some(value) = input_file.as_ref().and_then(|f| f.get(identifier)) {
|
||||
map.insert(identifier.clone(), value.clone());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
panic!("Required workflow input was not found: {input}");
|
||||
}
|
||||
|
||||
for input in optional_inputs {
|
||||
match matches.get_one::<String>(input.as_ref()) {
|
||||
Some(value) => {
|
||||
map.insert(input.into(), value.clone());
|
||||
continue;
|
||||
}
|
||||
None => {
|
||||
if let Some(value) = input_file.as_ref().and_then(|f| f.get(input.as_ref())) {
|
||||
map.insert(input.into(), value.clone());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if input.is_required() {
|
||||
panic!("Required workflow input was not found: {identifier}");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -191,13 +182,10 @@ pub fn parse_quorum_file(
|
|||
let threshold = threshold.unwrap_or(u8::try_from(certs.len()).expect("too many certs!"));
|
||||
let policy = match purpose {
|
||||
Purpose::AddSignature => {
|
||||
// All signatures must be valid, but we don't require a minimum.
|
||||
// All signatures must be valid, but we don't require a minimum.
|
||||
PayloadVerification::new().with_threshold(0)
|
||||
}
|
||||
Purpose::RunQuorum => {
|
||||
PayloadVerification::new().with_threshold(threshold)
|
||||
|
||||
},
|
||||
Purpose::RunQuorum => PayloadVerification::new().with_threshold(threshold),
|
||||
};
|
||||
payload.verify_signatures(&certs, &policy, None).unwrap();
|
||||
|
||||
|
@ -213,20 +201,19 @@ pub fn parse_quorum_with_shardfile(
|
|||
let payload: Payload = serde_json::from_reader(payload_file).unwrap();
|
||||
|
||||
let opgp = OpenPGP;
|
||||
let (threshold, certs) = opgp.decrypt_metadata_from_file(
|
||||
None::<&std::path::Path>,
|
||||
std::fs::File::open(shardfile_path).unwrap(),
|
||||
keyfork_prompt::default_handler().unwrap(),
|
||||
).unwrap();
|
||||
let (threshold, certs) = opgp
|
||||
.decrypt_metadata_from_file(
|
||||
None::<&std::path::Path>,
|
||||
std::fs::File::open(shardfile_path).unwrap(),
|
||||
keyfork_prompt::default_handler().unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
let policy = match purpose {
|
||||
Purpose::AddSignature => {
|
||||
// All signatures must be valid, but we don't require a minimum.
|
||||
// All signatures must be valid, but we don't require a minimum.
|
||||
PayloadVerification::new().with_threshold(0)
|
||||
}
|
||||
Purpose::RunQuorum => {
|
||||
PayloadVerification::new().with_threshold(threshold)
|
||||
|
||||
},
|
||||
Purpose::RunQuorum => PayloadVerification::new().with_threshold(threshold),
|
||||
};
|
||||
|
||||
payload.verify_signatures(&certs, &policy, None).unwrap();
|
||||
|
@ -236,7 +223,7 @@ pub fn parse_quorum_with_shardfile(
|
|||
|
||||
pub fn handle_payload(
|
||||
workflow: &Workflow,
|
||||
inputs: HashMap<String, Value>,
|
||||
inputs: StringMap<Value>,
|
||||
modules: Commands,
|
||||
config: &[ModuleConfig],
|
||||
) {
|
||||
|
@ -254,8 +241,8 @@ pub fn handle(
|
|||
modules: Commands,
|
||||
config: &[ModuleConfig],
|
||||
) {
|
||||
let inputs = load_inputs(&workflow.inputs, &workflow.optional_inputs, matches);
|
||||
let data: HashMap<String, Value> = inputs
|
||||
let inputs = load_inputs(&workflow.inputs, matches);
|
||||
let data: StringMap<Value> = inputs
|
||||
.into_iter()
|
||||
.map(|(k, v)| (k, Value::String(v)))
|
||||
.collect();
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
name: "broadcast"
|
||||
description: |-
|
||||
Broadcast a transaction on a Cosmos-based blockchain.
|
||||
inputs:
|
||||
- name: "nonce_address"
|
||||
description: >-
|
||||
The address of the account used for the transaction nonce.
|
||||
- name: "chain_name"
|
||||
description: >-
|
||||
The name of the Cosmos chain to broadcast a transaction on.
|
||||
step:
|
||||
- type: "cosmos-get-chain-info"
|
||||
inputs:
|
||||
chain_name: "chain_name"
|
||||
outputs:
|
||||
blockchain_config: "blockchain_config"
|
||||
- type: "cosmos-get-account-data"
|
||||
inputs:
|
||||
account_id: "nonce_address"
|
||||
blockchain_config: "blockchain_config"
|
||||
outputs:
|
||||
account_number: "account_number"
|
||||
sequence_number: "sequence_number"
|
||||
- type: "internal-save-file"
|
||||
values:
|
||||
filename: "account_info.json"
|
||||
inputs:
|
||||
account_number: "account_number"
|
||||
sequence_number: "sequence_number"
|
||||
- type: "internal-load-file"
|
||||
values:
|
||||
filename: "transaction.json"
|
||||
outputs:
|
||||
transaction: "transaction"
|
||||
- type: "cosmos-broadcast"
|
||||
inputs:
|
||||
blockchain_config: "blockchain_config"
|
||||
transaction: "transaction"
|
||||
outputs:
|
||||
status: "status"
|
||||
url: "url"
|
||||
error: "error"
|
||||
error_code: "error_code"
|
|
@ -1,8 +1,14 @@
|
|||
name: generate-address
|
||||
description: |-
|
||||
Generate an address on a given Cosmos-based blockchain.
|
||||
inputs:
|
||||
- chain_name
|
||||
optional_inputs:
|
||||
- account
|
||||
- name: chain_name
|
||||
description: >-
|
||||
The name of the Cosmos chain you'd like to generate an address for.
|
||||
- name: account
|
||||
description: >-
|
||||
The account to use, if not the default account.
|
||||
optional: true
|
||||
step:
|
||||
- type: cosmos-get-chain-info
|
||||
inputs:
|
||||
|
|
|
@ -1,12 +1,27 @@
|
|||
name: stake
|
||||
description: |-
|
||||
Stake coins on the provided chain.
|
||||
inputs:
|
||||
- delegate_address
|
||||
- validator_address
|
||||
- chain_name
|
||||
- asset_name
|
||||
- asset_amount
|
||||
optional_inputs:
|
||||
- gas_factor
|
||||
- name: delegate_address
|
||||
description: >-
|
||||
Address holding the coins to be staked to a validator.
|
||||
- name: validator_address
|
||||
description: >-
|
||||
Address of the validator operator.
|
||||
- name: chain_name
|
||||
description: >-
|
||||
The name of the Cosmos-based chain.
|
||||
- name: asset_name
|
||||
description: >-
|
||||
The name of the asset to stake.
|
||||
- name: asset_amount
|
||||
description: >-
|
||||
The amount of the asset to stake.
|
||||
- name: gas_factor
|
||||
description: >-
|
||||
An amount to multiply the required gas by; necessary if a chain requires
|
||||
more gas for a specific operation.
|
||||
optional: true
|
||||
step:
|
||||
- type: cosmos-get-chain-info
|
||||
inputs:
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
name: "transfer"
|
||||
description: |-
|
||||
Transfer a Cosmos coin.
|
||||
inputs:
|
||||
- name: "from_address"
|
||||
description: >-
|
||||
The address from which to send coin.
|
||||
- name: "to_address"
|
||||
description: >-
|
||||
The address to send coins to.
|
||||
- name: "asset_name"
|
||||
description: >-
|
||||
The name of the asset to send.
|
||||
- name: "chain_name"
|
||||
description: >-
|
||||
The name of the Cosmos chain the asset lives on.
|
||||
- name: "asset_amount"
|
||||
description: >-
|
||||
The amount of the asset to send.
|
||||
step:
|
||||
- type: "cosmos-get-chain-info"
|
||||
inputs:
|
||||
chain_name: "chain_name"
|
||||
outputs:
|
||||
blockchain_config: "blockchain_config"
|
||||
- type: "internal-load-file"
|
||||
values:
|
||||
filename: "account_info.json"
|
||||
outputs:
|
||||
account_number: "account_number"
|
||||
sequence_number: "sequence_number"
|
||||
- type: "cosmos-transfer"
|
||||
inputs:
|
||||
from_address: "from_address"
|
||||
to_address: "to_address"
|
||||
amount: "asset_amount"
|
||||
denom: "asset_name"
|
||||
blockchain_config: "blockchain_config"
|
||||
outputs:
|
||||
fee: "fee"
|
||||
tx_messages: "tx_messages"
|
||||
- type: "cosmos-sign"
|
||||
inputs:
|
||||
fee: "fee"
|
||||
tx_messages: "tx_messages"
|
||||
account_number: "account_number"
|
||||
sequence_number: "sequence_number"
|
||||
blockchain_config: "blockchain_config"
|
||||
outputs:
|
||||
transaction: "signed_transaction"
|
||||
- type: "internal-save-file"
|
||||
values:
|
||||
filename: "transaction.json"
|
||||
inputs:
|
||||
transaction: "signed_transaction"
|
|
@ -1,10 +1,21 @@
|
|||
name: withdraw-rewards
|
||||
description: |-
|
||||
Withdraw rewards gained from staking to a validator.
|
||||
inputs:
|
||||
- delegate_address
|
||||
- validator_address
|
||||
- chain_name
|
||||
optional_inputs:
|
||||
- gas_factor
|
||||
- name: delegate_address
|
||||
description: >-
|
||||
The owner of the staked coins; also, the recipient of rewards.
|
||||
- name: validator_address
|
||||
description: >-
|
||||
The validator from whom coins are staked.
|
||||
- name: chain_name
|
||||
description: >-
|
||||
The name of the Cosmos-based chain.
|
||||
- name: gas_factor
|
||||
description: >-
|
||||
An amount to multiply the required gas by; necessary if a chain requires
|
||||
more gas for a specific operation.
|
||||
optional: true
|
||||
step:
|
||||
- type: cosmos-get-chain-info
|
||||
inputs:
|
||||
|
|
|
@ -1,12 +1,30 @@
|
|||
name: withdraw
|
||||
description: |-
|
||||
Withdraw staked coins from a validator.
|
||||
|
||||
Staked coins may be held for an unbonding period, depending on the chain upon
|
||||
which they are staked.
|
||||
inputs:
|
||||
- delegate_address
|
||||
- validator_address
|
||||
- chain_name
|
||||
- asset_name
|
||||
- asset_amount
|
||||
optional_inputs:
|
||||
- gas_factor
|
||||
- name: delegate_address
|
||||
description: >-
|
||||
The owner of the staked coins.
|
||||
- name: validator_address
|
||||
description: >-
|
||||
The validator from whom coins are staked.
|
||||
- name: chain_name
|
||||
description: >-
|
||||
The name of the Cosmos-based chain.
|
||||
- name: asset_name
|
||||
description: >-
|
||||
The name of the asset to withdraw.
|
||||
- name: asset_amount
|
||||
description: >-
|
||||
The amount of the asset to withdraw.
|
||||
- name: gas_factor
|
||||
description: >-
|
||||
An amount to multiply the required gas by; necessary if a chain requires
|
||||
more gas for a specific operation.
|
||||
optional: true
|
||||
step:
|
||||
- type: cosmos-get-chain-info
|
||||
inputs:
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
name: "broadcast"
|
||||
description: |-
|
||||
Broadcast a transaction on the Solana blockchain.
|
||||
inputs:
|
||||
- name: "nonce_address"
|
||||
description: >-
|
||||
The address of the nonce account.
|
||||
- name: "cluster"
|
||||
description: >-
|
||||
The name of the Solana cluster to broadcast the transaction on, if not
|
||||
mainnet-beta.
|
||||
optional: true
|
||||
step:
|
||||
- type: "sol-get-nonce-account-data"
|
||||
inputs:
|
||||
nonce_address: "nonce_address"
|
||||
cluster: "cluster"
|
||||
outputs:
|
||||
authority: "nonce_authority"
|
||||
durable_nonce: "nonce"
|
||||
- type: "internal-save-file"
|
||||
values:
|
||||
filename: "nonce.json"
|
||||
inputs:
|
||||
nonce_authority: "nonce_authority"
|
||||
nonce_data: "nonce"
|
||||
nonce_address: "nonce_address"
|
||||
- type: "internal-load-file"
|
||||
values:
|
||||
filename: "transaction.json"
|
||||
outputs:
|
||||
transaction: "transaction"
|
||||
- type: "sol-broadcast"
|
||||
inputs:
|
||||
cluster: "cluster"
|
||||
transaction: "transaction"
|
||||
outputs:
|
||||
status: "status"
|
||||
url: "url"
|
||||
error: "error"
|
|
@ -1,6 +1,11 @@
|
|||
name: generate-address
|
||||
optional_inputs:
|
||||
- account
|
||||
description: |-
|
||||
Generate a Solana address.
|
||||
inputs:
|
||||
- name: account
|
||||
description: >-
|
||||
The account to use, if not the default account.
|
||||
optional: true
|
||||
step:
|
||||
- type: sol-generate-wallet
|
||||
inputs:
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
name: "generate-nonce-account"
|
||||
description: |-
|
||||
Using a temporary Keyfork instance, generate a nonce address for the given
|
||||
authorization address.
|
||||
inputs:
|
||||
- name: "cluster"
|
||||
description: >-
|
||||
Name of the Solana cluster to generate the nonce account on, if not
|
||||
mainnet-beta.
|
||||
- name: "authorization_address"
|
||||
description: >-
|
||||
The address used to authorize advancing the nonce.
|
||||
|
||||
The authorization address (also called "address" or "pubkey" in other
|
||||
workflows) is required to be a signer of the transaction, so the
|
||||
authorization address is often the principal address - the one performing
|
||||
the transaction.
|
||||
step:
|
||||
- type: "sol-generate-wallet"
|
||||
- type: "sol-get-wallet-address"
|
||||
outputs:
|
||||
pubkey: "wallet_pubkey"
|
||||
- type: "sol-await-funds"
|
||||
inputs:
|
||||
address: "wallet_pubkey"
|
||||
cluster: "cluster"
|
||||
values:
|
||||
lamports: "1510000"
|
||||
- type: "sol-get-blockhash"
|
||||
inputs:
|
||||
cluster: "cluster"
|
||||
outputs:
|
||||
blockhash: "blockhash"
|
||||
- type: "sol-create-nonce-account-and-signing-key"
|
||||
inputs:
|
||||
from_address: "wallet_pubkey"
|
||||
authorization_address: "authorization_address"
|
||||
outputs:
|
||||
transaction: "instructions"
|
||||
nonce_pubkey: "nonce_pubkey"
|
||||
nonce_privkey: "private_keys"
|
||||
derivation_accounts: "derivation_accounts"
|
||||
- type: "sol-compile"
|
||||
inputs:
|
||||
instructions: "instructions"
|
||||
derivation_accounts: "derivation_accounts"
|
||||
blockhash: "blockhash"
|
||||
outputs:
|
||||
transaction: "unsigned_transaction"
|
||||
- type: "sol-sign"
|
||||
inputs:
|
||||
blockhash: "blockhash"
|
||||
signing_keys: "private_keys"
|
||||
transaction: "unsigned_transaction"
|
||||
outputs:
|
||||
transaction: "signed_transaction"
|
||||
- type: "sol-broadcast"
|
||||
inputs:
|
||||
cluster: "cluster"
|
||||
transaction: "signed_transaction"
|
||||
outputs:
|
||||
status: "status"
|
||||
url: "url"
|
||||
error: "error"
|
||||
- type: "internal-cat"
|
||||
inputs:
|
||||
status: "status"
|
||||
url: "url"
|
||||
nonce_account: "nonce_pubkey"
|
||||
error: "error"
|
||||
outputs:
|
||||
status: "status"
|
||||
url: "url"
|
||||
nonce_account: "nonce_account"
|
||||
error: "error"
|
|
@ -1,9 +1,19 @@
|
|||
name: transfer-token
|
||||
description: |-
|
||||
Transfer SPL tokens held on the Solana blockchain.
|
||||
inputs:
|
||||
- from_address
|
||||
- to_address
|
||||
- token_name
|
||||
- token_amount
|
||||
- name: from_address
|
||||
description: >-
|
||||
The address from which to send tokens.
|
||||
- name: to_address
|
||||
description: >-
|
||||
The address to send coins to.
|
||||
- name: token_name
|
||||
description: >-
|
||||
The name of the token to transfer.
|
||||
- name: token_amount
|
||||
description: >-
|
||||
The amount of the token to transfer.
|
||||
step:
|
||||
- type: sol-get-token-info
|
||||
inputs:
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
name: "transfer"
|
||||
description: |-
|
||||
Transfer SOL from one address to another.
|
||||
inputs:
|
||||
- name: "to_address"
|
||||
description: >-
|
||||
The address to send SOL to.
|
||||
- name: "from_address"
|
||||
description: >-
|
||||
The address to send SOL from.
|
||||
- name: "amount"
|
||||
description: >-
|
||||
The amount of SOL to send.
|
||||
step:
|
||||
- type: "internal-load-file"
|
||||
values:
|
||||
filename: "nonce.json"
|
||||
outputs:
|
||||
nonce_authority: "nonce_authority"
|
||||
nonce_data: "nonce_data"
|
||||
nonce_address: "nonce_address"
|
||||
- type: "sol-transfer"
|
||||
inputs:
|
||||
from_address: "from_address"
|
||||
to_address: "to_address"
|
||||
amount: "amount"
|
||||
outputs:
|
||||
instructions: "instructions"
|
||||
derivation_accounts: "derivation_accounts"
|
||||
- type: "sol-compile"
|
||||
inputs:
|
||||
instructions: "instructions"
|
||||
derivation_accounts: "derivation_accounts"
|
||||
nonce_address: "nonce_address"
|
||||
nonce_authority: "nonce_authority"
|
||||
nonce_data: "nonce_data"
|
||||
outputs:
|
||||
transaction: "unsigned_transaction"
|
||||
- type: "sol-sign"
|
||||
inputs:
|
||||
blockhash: "nonce_data"
|
||||
transaction: "unsigned_transaction"
|
||||
outputs:
|
||||
transaction: "signed_transaction"
|
||||
- type: "internal-save-file"
|
||||
values:
|
||||
filename: "transaction.json"
|
||||
inputs:
|
||||
transaction: "signed_transaction"
|
|
@ -1,7 +1,15 @@
|
|||
name: generate-address
|
||||
optional_inputs:
|
||||
- account
|
||||
- cluster
|
||||
description: |-
|
||||
Generate a Spacemesh address
|
||||
inputs:
|
||||
- name: account
|
||||
description: >-
|
||||
The account to use, if not the default account.
|
||||
optional: true
|
||||
- name: cluster
|
||||
description: >-
|
||||
The Spacemesh cluster to use, if not the mainnet.
|
||||
optional: true
|
||||
step:
|
||||
- type: spacemesh-generate-wallet
|
||||
inputs:
|
||||
|
|
202
icepick.toml
202
icepick.toml
|
@ -3,213 +3,11 @@ name = "sol"
|
|||
derivation_prefix = "m/44'/501'/0'"
|
||||
algorithm = "Ed25519"
|
||||
|
||||
# NOTE: To get a nonce address, the `generate-nonce-account` workflow should be
|
||||
# run. It is the only workflow that uses a blockhash, which is why a
|
||||
# `broadcast-with-blockhash` or similar is not, and should not be, implemented.
|
||||
[[module.workflow]]
|
||||
name = "broadcast"
|
||||
inputs = ["nonce_address", "cluster"]
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "sol-get-nonce-account-data"
|
||||
inputs = { nonce_address = "nonce_address", cluster = "cluster" }
|
||||
outputs = { authority = "nonce_authority", durable_nonce = "nonce" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "internal-save-file"
|
||||
values = { filename = "nonce.json" }
|
||||
inputs = { nonce_authority = "nonce_authority", nonce_data = "nonce", nonce_address = "nonce_address" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "internal-load-file"
|
||||
values = { filename = "transaction.json" }
|
||||
outputs = { transaction = "transaction" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "sol-broadcast"
|
||||
inputs = { cluster = "cluster", transaction = "transaction" }
|
||||
outputs = { status = "status", url = "url", error = "error" }
|
||||
|
||||
[[module.workflow]]
|
||||
name = "generate-nonce-account"
|
||||
inputs = ["cluster", "authorization_address"]
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "sol-generate-wallet"
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "sol-get-wallet-address"
|
||||
outputs = { pubkey = "wallet_pubkey" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "sol-await-funds"
|
||||
inputs = { address = "wallet_pubkey", cluster = "cluster" }
|
||||
# enough to cover two signatures and the 1_500_000 approx. rent fee
|
||||
values = { lamports = "1510000" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "sol-get-blockhash"
|
||||
inputs = { cluster = "cluster" }
|
||||
outputs = { blockhash = "blockhash" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "sol-create-nonce-account-and-signing-key"
|
||||
|
||||
[module.workflow.step.inputs]
|
||||
from_address = "wallet_pubkey"
|
||||
authorization_address = "authorization_address"
|
||||
|
||||
[module.workflow.step.outputs]
|
||||
transaction = "unsigned_transaction"
|
||||
nonce_pubkey = "nonce_pubkey"
|
||||
nonce_privkey = "private_keys"
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "sol-sign"
|
||||
|
||||
[module.workflow.step.inputs]
|
||||
blockhash = "blockhash"
|
||||
signing_keys = "private_keys"
|
||||
transaction = "unsigned_transaction"
|
||||
|
||||
[module.workflow.step.outputs]
|
||||
transaction = "signed_transaction"
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "sol-broadcast"
|
||||
inputs = { cluster = "cluster", transaction = "signed_transaction" }
|
||||
outputs = { status = "status", url = "url" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "internal-cat"
|
||||
inputs = { status = "status", url = "url", nonce_account = "nonce_pubkey" }
|
||||
outputs = { status = "status", url = "url", nonce_account = "nonce_account" }
|
||||
|
||||
[[module.workflow]]
|
||||
# Transfer SOL from one address to another.
|
||||
name = "transfer"
|
||||
inputs = ["to_address", "from_address", "amount"]
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "internal-load-file"
|
||||
values = { filename = "nonce.json" }
|
||||
outputs = { nonce_authority = "nonce_authority", nonce_data = "nonce_data", nonce_address = "nonce_address" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "sol-transfer"
|
||||
inputs = { from_address = "from_address", to_address = "to_address", amount = "amount" }
|
||||
outputs = { instructions = "instructions", derivation_accounts = "derivation_accounts" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "sol-compile"
|
||||
|
||||
[module.workflow.step.inputs]
|
||||
instructions = "instructions"
|
||||
derivation_accounts = "derivation_accounts"
|
||||
nonce_address = "nonce_address"
|
||||
nonce_authority = "nonce_authority"
|
||||
nonce_data = "nonce_data"
|
||||
|
||||
[module.workflow.step.outputs]
|
||||
transaction = "unsigned_transaction"
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "sol-sign"
|
||||
|
||||
inputs = { blockhash = "nonce_data", transaction = "unsigned_transaction" }
|
||||
outputs = { transaction = "signed_transaction" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "internal-save-file"
|
||||
|
||||
values = { filename = "transaction.json" }
|
||||
inputs = { transaction = "signed_transaction" }
|
||||
|
||||
[[module]]
|
||||
name = "cosmos"
|
||||
derivation_prefix = "m/44'/118'/0'"
|
||||
algorithm = "Secp256k1"
|
||||
|
||||
[[module.workflow]]
|
||||
name = "transfer"
|
||||
inputs = ["from_address", "to_address", "asset_name", "chain_name", "asset_amount"]
|
||||
|
||||
[[module.workflow.step]]
|
||||
# NOTE: chain_name can't be discoverable by filtering from asset_name, since
|
||||
# some asset devnets reuse the name. There's no difference between KYVE on Kyve
|
||||
# or Korellia (devnet).
|
||||
type = "cosmos-get-chain-info"
|
||||
inputs = { chain_name = "chain_name" }
|
||||
outputs = { blockchain_config = "blockchain_config" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "internal-load-file"
|
||||
values = { filename = "account_info.json" }
|
||||
outputs = { account_number = "account_number", sequence_number = "sequence_number" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "cosmos-transfer"
|
||||
|
||||
[module.workflow.step.inputs]
|
||||
from_address = "from_address"
|
||||
to_address = "to_address"
|
||||
amount = "asset_amount"
|
||||
denom = "asset_name"
|
||||
blockchain_config = "blockchain_config"
|
||||
|
||||
[module.workflow.step.outputs]
|
||||
fee = "fee"
|
||||
tx_messages = "tx_messages"
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "cosmos-sign"
|
||||
|
||||
[module.workflow.step.inputs]
|
||||
fee = "fee"
|
||||
tx_messages = "tx_messages"
|
||||
account_number = "account_number"
|
||||
sequence_number = "sequence_number"
|
||||
blockchain_config = "blockchain_config"
|
||||
|
||||
[module.workflow.step.outputs]
|
||||
transaction = "signed_transaction"
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "internal-save-file"
|
||||
values = { filename = "transaction.json" }
|
||||
inputs = { transaction = "signed_transaction" }
|
||||
|
||||
[[module.workflow]]
|
||||
name = "broadcast"
|
||||
# NOTE: For the purpose of Cosmos, the nonce is a direct part of the signer's
|
||||
# account.
|
||||
inputs = ["nonce_address", "chain_name"]
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "cosmos-get-chain-info"
|
||||
inputs = { chain_name = "chain_name" }
|
||||
outputs = { blockchain_config = "blockchain_config" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "cosmos-get-account-data"
|
||||
inputs = { account_id = "nonce_address", blockchain_config = "blockchain_config" }
|
||||
outputs = { account_number = "account_number", sequence_number = "sequence_number" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "internal-save-file"
|
||||
values = { filename = "account_info.json" }
|
||||
inputs = { account_number = "account_number", sequence_number = "sequence_number" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "internal-load-file"
|
||||
values = { filename = "transaction.json" }
|
||||
outputs = { transaction = "transaction" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
type = "cosmos-broadcast"
|
||||
inputs = { blockchain_config = "blockchain_config", transaction = "transaction" }
|
||||
outputs = { status = "status", url = "url", error = "error", error_code = "error_code" }
|
||||
|
||||
[[module]]
|
||||
name = "spacemesh"
|
||||
derivation_prefix = "m/44'/540'/0'/0'"
|
||||
|
|
Loading…
Reference in New Issue