diff --git a/crates/icepick/src/cli/mod.rs b/crates/icepick/src/cli/mod.rs index 4eeb0e0..a324512 100644 --- a/crates/icepick/src/cli/mod.rs +++ b/crates/icepick/src/cli/mod.rs @@ -6,6 +6,8 @@ use std::{ process::{Command, Stdio}, }; +mod workflow; + pub fn get_command(bin_name: &str) -> (&str, Vec<&str>) { if std::env::vars().any(|(k, _)| &k == "ICEPICK_USE_CARGO") { ("cargo", vec!["run", "-q", "--bin", bin_name, "--"]) @@ -19,8 +21,12 @@ struct ModuleConfig { name: String, command_name: Option, algorithm: keyfork_derive_util::request::DerivationAlgorithm, + #[serde(with = "serde_derivation")] derivation_prefix: keyfork_derive_util::DerivationPath, + + #[serde(rename = "workflow", default)] + workflows: Vec, } mod serde_derivation { @@ -41,9 +47,8 @@ mod serde_derivation { D: Deserializer<'de>, { use serde::de::Error; - String::deserialize(deserializer).and_then(|string| { - DerivationPath::from_str(&string).map_err(|e| Error::custom(e.to_string())) - }) + String::deserialize(deserializer) + .and_then(|string| DerivationPath::from_str(&string).map_err(Error::custom)) } } @@ -94,6 +99,24 @@ pub fn do_cli_thing() { .expect("successful deserialization of operation"); commands.push((module_name.clone(), bin, operations)); } + + let mut workflows = vec![]; + for module in &config.modules { + workflows.push((module.name.clone(), module.workflows.clone())); + } + let workflows = workflows.leak(); + let mut workflow_command = + clap::Command::new("workflow").about("Run a pre-defined Icepick workflow"); + for module in workflows.iter() { + let mut module_subcommand = clap::Command::new(module.0.as_str()); + for workflow in &module.1 { + module_subcommand = module_subcommand.subcommand(workflow.generate_command()); + } + workflow_command = workflow_command.subcommand(module_subcommand); + } + + icepick_command = icepick_command.subcommand(workflow_command); + let commands = commands.leak(); for command in commands.iter() { let mut subcommand = clap::Command::new(command.0.as_str()); diff --git a/crates/icepick/src/cli/workflow.rs b/crates/icepick/src/cli/workflow.rs new file mode 100644 index 0000000..f1d5516 --- /dev/null +++ b/crates/icepick/src/cli/workflow.rs @@ -0,0 +1,77 @@ +use super::get_command; +use serde::{Deserialize, Serialize}; +use std::process::{Command, Stdio}; + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct Workflow { + pub name: String, + pub inputs: Vec, + + #[serde(rename = "step")] + steps: Vec, +} + +pub type StringMap = std::collections::HashMap; + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct WorkflowStep { + r#type: String, + + #[serde(default)] + values: StringMap, + + #[serde(default)] + inputs: StringMap, + + #[serde(default)] + outputs: StringMap, +} + +impl Workflow { + /// Generate a [`clap::Command`] for a [`Workflow`], where the inputs can be defined either by + /// command-line arguments or via a JSON input file. + pub fn generate_command(&self) -> clap::Command { + let mut command = clap::Command::new(&self.name).arg(clap::arg!( + --"input-file" [FILE] + "A file containing any inputs not passed on the command line" + )); + for input in &self.inputs { + let arg = clap::Arg::new(input) + .required(false) + .long(input) + .value_name(input.to_uppercase()); + command = command.arg(arg); + } + command + } + + fn load_inputs(&self, matches: &clap::ArgMatches) -> StringMap { + let mut map = StringMap::default(); + let input_file: Option = matches + .get_one::("input-file") + .and_then(|p| std::fs::File::open(p).ok()) + .and_then(|f| serde_json::from_reader(f).ok()); + for input in &self.inputs { + match matches.get_one::(&input.replace('_', "-")) { + Some(value) => { + map.insert(input.clone(), value.clone()); + continue; + } + None => { + if let Some(value) = input_file.as_ref().and_then(|f| f.get(input)) { + map.insert(input.clone(), value.clone()); + continue; + } + } + } + panic!("Key was not found: {input}"); + } + + map + } + + pub fn handle(self, matches: &clap::ArgMatches) { + let inputs = self.load_inputs(matches); + // step 2: run through commands + } +} diff --git a/icepick.toml b/icepick.toml index 9fc346e..7e5c439 100644 --- a/icepick.toml +++ b/icepick.toml @@ -2,3 +2,81 @@ name = "sol" derivation_prefix = "m/44'/501'/0'" algorithm = "Ed25519" + +[[module.workflow]] +# The name of the workflow, which can be called by: +# `icepick workflow sol transfer-token` +name = "transfer-token" + +# These values are used as inputs for other workflows, acquired from the CLI. +# These values can only be strings, but other values can be any value that can +# be serialized by serde_json::Value. +# These values can also be loaded using "internal-load-file", using some form +# of later-defined signature validation. +inputs = ["from_address", "to_address", "token_name", "token_amount"] + +# Load the Blockhash from the SD card +[[module.workflow.step]] +type = "internal-load-file" + +# Pre-defined values to be passed to the module +values = { filename = "blockhash.json" } + +# This value is marked to be saved in-memory, and can be used as an input for +# later steps. +outputs = { blockhash = "blockhash" } + +# Get the token address and token decimals for the given token +[[module.workflow.step]] +type = "sol-get-token-info" + +# The key is the key that is passed to the program in the +# `values` field. The value is the item in storage. In this case, +# they are the same, because we read a `token-name` from our input, +# store it in our storage as `token-name`, and `sol-token-info` will +# expect a `token-name`. +inputs = { token_name = "token_name" } + +# Because these two fields are currently unused in our storage, we can grab +# them from the outputs of our module. The key is the key of the output value +# we want to store, and the value is the name to be assigned in storage. +outputs = { token_address = "token_address", token_decimals = "token_decimals" } + +[[module.workflow.step]] +# Generate an unsigned Transaction +type = "sol-transfer-token" + +# If using a lot of inputs, it may be best to use a non-inline table. +# Non-inline tables _must_ be the last step, as otherwise, `outputs` for +# example would be considered a member of `inputs`. In this case, we use a +# non-inline table for `outputs` even though it would fit on one line, to avoid +# the ambiguity. +[module.workflow.step.inputs] +amount = "token_amount" +token_address = "token_address" +token_decimals = "token_decimals" +to_address = "to_address" +from_address = "from-address" + +[module.workflow.step.outputs] +transaction = "unsigned_transaction" + +# Sign the transaction +[[module.workflow.step]] +type = "sol-sign" + +inputs = { transaction = "unsigned_transaction", blockhash = "blockhash" } + +outputs = { transaction = "signed_transaction" } + +# Write the signed transaction to a file +[[module.workflow.step]] +type = "internal-save-file" + +# We are using a static filename here, so we use `values` instead of `inputs`. +values = { filename = "transaction.json" } + +# All fields in both `inputs` and `values`, other than `filename`, will be +# persisted to the file. In this case, the `transaction` field of the file will +# contain the signed transaction. +inputs = { transaction = "signed_transaction" }