begin migration from icepick.toml for workflows to workflows.yaml

This commit is contained in:
Ryan Heywood 2025-01-23 03:03:02 -05:00
parent a473f74417
commit a32fc17e2c
Signed by: ryan
GPG Key ID: 8E401478A3FBEF72
10 changed files with 495 additions and 345 deletions

53
Cargo.lock generated
View File

@ -1659,12 +1659,14 @@ version = "0.1.0"
dependencies = [
"clap",
"icepick-module",
"icepick-workflow",
"keyfork-derive-util",
"keyforkd-client",
"keyforkd-models",
"serde",
"serde_json",
"thiserror 2.0.9",
"serde_yaml",
"thiserror 2.0.11",
"toml 0.8.19",
]
@ -1677,7 +1679,7 @@ dependencies = [
"icepick-module",
"serde",
"serde_json",
"thiserror 2.0.9",
"thiserror 2.0.11",
"tokio",
]
@ -1688,7 +1690,7 @@ dependencies = [
"icepick-module",
"serde",
"serde_json",
"thiserror 2.0.9",
"thiserror 2.0.11",
]
[[package]]
@ -1720,7 +1722,17 @@ dependencies = [
"spl-associated-token-account 6.0.0",
"spl-token 7.0.0",
"spl-token-2022 6.0.0",
"thiserror 2.0.9",
"thiserror 2.0.11",
]
[[package]]
name = "icepick-workflow"
version = "0.1.0"
dependencies = [
"keyfork-derive-util",
"serde",
"serde_json",
"thiserror 2.0.11",
]
[[package]]
@ -1993,9 +2005,9 @@ checksum = "fb5eae1e7471415b59f852ccb43b7858f0650a5d158ccbfb1d39088d0881f582"
[[package]]
name = "keyfork-derive-util"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://git.distrust.co/public/_cargo-index.git"
checksum = "12df1b3dbe1ac978fa53d86889156da010e290edb60ec308064906696d6a85b7"
checksum = "9205ca562716366941d69b16f0610b570811fe428b884879efbe68897d57edbc"
dependencies = [
"digest 0.10.7",
"ed25519-dalek 2.1.1",
@ -3095,6 +3107,19 @@ dependencies = [
"syn 2.0.92",
]
[[package]]
name = "serde_yaml"
version = "0.9.34+deprecated"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
dependencies = [
"indexmap",
"itoa",
"ryu",
"serde",
"unsafe-libyaml",
]
[[package]]
name = "sha2"
version = "0.9.9"
@ -4958,11 +4983,11 @@ dependencies = [
[[package]]
name = "thiserror"
version = "2.0.9"
version = "2.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc"
checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc"
dependencies = [
"thiserror-impl 2.0.9",
"thiserror-impl 2.0.11",
]
[[package]]
@ -4978,9 +5003,9 @@ dependencies = [
[[package]]
name = "thiserror-impl"
version = "2.0.9"
version = "2.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4"
checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2"
dependencies = [
"proc-macro2",
"quote",
@ -5205,6 +5230,12 @@ dependencies = [
"void",
]
[[package]]
name = "unsafe-libyaml"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
[[package]]
name = "untrusted"
version = "0.9.0"

View File

@ -3,9 +3,11 @@
resolver = "2"
members = [
"crates/icepick",
"crates/icepick-workflow",
"crates/icepick-module",
"crates/builtins/icepick-internal",
"crates/by-chain/icepick-solana", "crates/by-chain/icepick-cosmos",
"crates/by-chain/icepick-solana",
"crates/by-chain/icepick-cosmos",
]
[workspace.dependencies]

View File

@ -0,0 +1,10 @@
[package]
name = "icepick-workflow"
version = "0.1.0"
edition = "2021"
[dependencies]
keyfork-derive-util = { version = "0.2.2", registry = "distrust", default-features = false }
serde.workspace = true
serde_json.workspace = true
thiserror = "2.0.11"

View File

@ -0,0 +1,202 @@
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::{HashMap, HashSet};
#[derive(thiserror::Error, Debug)]
pub enum SimulationError {
#[error("Step not found: {0}")]
StepNotFound(String),
#[error("Expected input variable or static value not found in step {0}: {1}")]
ValueNotFound(String, String),
}
#[derive(thiserror::Error, Debug)]
pub enum WorkflowError {
#[error("Invocable operation could not be found: {0}")]
InvocableOperationNotFound(String),
#[error("Derivation configuration for operation not found: {0}")]
DerivationConfigurationNotFound(String),
#[error("An error was encountered while invoking an operation")]
InvocationError(String),
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Workflow {
pub name: String,
#[serde(default)]
pub inputs: Vec<String>,
#[serde(rename = "step")]
steps: Vec<WorkflowStep>,
}
pub type StringMap = HashMap<String, String>;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct WorkflowStep {
r#type: String,
#[serde(default)]
values: StringMap,
#[serde(default)]
inputs: StringMap,
#[serde(default)]
outputs: StringMap,
}
// TODO: This should probably be migrated to an actual Result type, instead of
// currently just shoving everything in "blob". Probably done after derivation_accounts
// gets hoisted out of here.
#[derive(Serialize, Deserialize)]
pub struct OperationResult {
// All values returned from an operation.
blob: HashMap<String, Value>,
// Any requested accounts from an operation.
//
// TODO: Move this to its own step.
#[serde(default)]
derivation_accounts: Vec<DerivationIndex>,
}
type DeriveKeys<'a> = &'a dyn Fn(&DerivationAlgorithm, &DerivationPath, &[DerivationIndex]) -> Vec<Vec<u8>>;
impl Workflow {
pub fn simulate_workflow<T: InvocableOperation + Sized>(
&self,
mut data: HashSet<String>,
operations: &[T],
) -> Result<Vec<String>, SimulationError> {
let mut reports = vec![];
for step in self.steps.iter() {
let step_type = step.r#type.clone();
let Some(invocable) = operations.iter().find(|op| *op.name() == step_type) else {
return Err(SimulationError::StepNotFound(step_type));
};
// Check we have the values the module expects
for in_memory_name in step.inputs.values() {
if !data.contains(in_memory_name) && !step.values.contains_key(in_memory_name) {
return Err(SimulationError::ValueNotFound(
step_type,
in_memory_name.to_owned(),
));
}
}
// Check whether the module expects the keys as arguments, or if the
// keys will be passed as a "payload" variable.
let mut inputs = step.inputs.keys().collect::<HashSet<_>>();
for argument in invocable.argument_names() {
inputs.remove(argument);
}
for remaining_input in inputs {
reports.push(format!(
"Step {step_type}: Input {remaining_input} is not interpreted as a argument"
));
}
// Add the return values from the module into memory
data.extend(step.outputs.values().cloned());
}
Ok(reports)
}
pub fn run_workflow<T: InvocableOperation>(
&self,
mut data: HashMap<String, Value>,
operations: &[T],
derive_keys: DeriveKeys,
) -> Result<HashMap<String, Value>, WorkflowError> {
let mut derived_keys = vec![];
let mut derivation_accounts = vec![];
for step in &self.steps {
let step_type = step.r#type.clone();
let Some(operation) = operations.iter().find(|op| *op.name() == step_type) else {
return Err(WorkflowError::InvocableOperationNotFound(step_type));
};
// Add requested derivation keys and clear derivation account requests.
if !derivation_accounts.is_empty() {
let Some((algo, path_prefix)) = operation.derivation_configuration() else {
return Err(WorkflowError::DerivationConfigurationNotFound(step_type));
};
derived_keys.extend(derive_keys(&algo, &path_prefix, &derivation_accounts));
}
derivation_accounts.clear();
// Prepare all inputs for the operation invocation
let inputs: HashMap<String, Value> = data
.iter()
.map(|(k, v)| (k, v.clone()))
.filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (desired, _stored) = step.inputs.iter().find(|(_, v)| k == *v)?;
Some((desired.clone(), v))
})
.chain(
step.values
.iter()
.map(|(k, v)| (k.clone(), Value::String(v.clone()))),
)
.collect();
let OperationResult {
blob,
derivation_accounts: new_accounts,
} = operation.invoke(&inputs, &derived_keys);
derived_keys.clear();
derivation_accounts.extend(new_accounts);
data.extend(blob.into_iter().filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (_given, stored) = step.outputs.iter().find(|(k1, _)| k == **k1)?;
Some((stored.clone(), v))
}));
}
if let Some(last_step) = &self.steps.last() {
let values = last_step.outputs.values().collect::<HashSet<_>>();
data.retain(|stored_name, _| {
values.contains(stored_name)
});
}
Ok(data)
}
}
pub trait WorkflowHandler {
/// Load all inputs for the Workflow from some external source, such as CLI arguments or
/// JSON payloads. The inputs can then be used to simulate or perform a workflow.
fn load_inputs(&self) -> StringMap;
}
/// The configuration for an Icepick operation that can be invoked.
///
/// Implementors of this trait should include all necessary requirements to invoke the operation
/// within themselves.
pub trait InvocableOperation {
/// Invoke the operation with the supplied inputs and derived keys.
fn invoke(&self, input: &HashMap<String, Value>, derived_keys: &[Vec<u8>]) -> OperationResult;
/// The name of the operation.
fn name(&self) -> &String;
/// The names of arguments that can be passed to the function.
fn argument_names(&self) -> impl Iterator<Item = &String>;
/// The derivation algorithm and derivation path to be prefixed to all derivation requests.
fn derivation_configuration(&self) -> Option<(&DerivationAlgorithm, &DerivationPath)>;
}

View File

@ -6,10 +6,16 @@ edition = "2021"
[dependencies]
clap = { version = "4.5.20", features = ["cargo", "derive", "string"] }
icepick-module = { version = "0.1.0", path = "../icepick-module" }
icepick-workflow = { version = "0.1.0", path = "../icepick-workflow" }
keyfork-derive-util = { version = "0.2.1", registry = "distrust" }
keyforkd-client = { version = "0.2.1", registry = "distrust" }
keyforkd-models = { version = "0.2.0", registry = "distrust" }
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
serde_yaml = "0.9.34"
thiserror = "2.0.3"
toml = "0.8.19"
[build-dependencies]
icepick-workflow = { version = "0.1.0", path = "../icepick-workflow" }
serde_yaml = "0.9.34"

45
crates/icepick/build.rs Normal file
View File

@ -0,0 +1,45 @@
use icepick_workflow::Workflow;
use std::{collections::HashMap, path::PathBuf};
fn env_var(var: &'static str) -> String {
println!("cargo::rerun-if-env-changed={var}");
std::env::var(var).expect(var)
}
fn main() {
let out_dir = env_var("CARGO_TARGET_DIR");
let crate_dir = env_var("CARGO_MANIFEST_DIR");
let workflows_dir = PathBuf::from(crate_dir).join("workflows");
let mut workflows_by_module: HashMap<String, Vec<Workflow>> = Default::default();
for module_dir in std::fs::read_dir(&workflows_dir).unwrap() {
let module_dir = module_dir.unwrap();
let path = module_dir.path();
if !path.is_dir() {
panic!("found unexpected file {}", path.to_string_lossy());
}
let mut workflows = vec![];
for workflow_file in std::fs::read_dir(&path).unwrap() {
let workflow_file = workflow_file.unwrap();
let path = workflow_file.path();
if !path.is_file() {
panic!("found unexpected non-file {}", path.to_string_lossy());
}
let file_content = std::fs::read(path).unwrap();
let workflow: Workflow = serde_yaml::from_slice(&file_content).unwrap();
workflows.push(workflow);
}
workflows_by_module.insert(
module_dir.file_name().to_str().unwrap().to_owned(),
workflows,
);
}
let out_path = PathBuf::from(out_dir).join("workflows.yaml");
let out_file = std::fs::File::create(out_path).unwrap();
serde_yaml::to_writer(out_file, &workflows_by_module).unwrap();
}

View File

@ -70,7 +70,7 @@ struct ModuleConfig {
/// All workflows for a module.
#[serde(rename = "workflow", default)]
workflows: Vec<workflow::Workflow>,
workflows: Vec<icepick_workflow::Workflow>,
}
mod serde_derivation {
@ -134,6 +134,23 @@ pub fn do_cli_thing() {
workflows: Default::default(),
});
let workflows_file = std::env::vars().find_map(|(k, v)| {
if k == "ICEPICK_WORKFLOWS_FILE" {
return Some(v);
}
None
});
let workflows_path = workflows_file.unwrap_or_else(|| "workflows.yaml".to_string());
let workflows_content = std::fs::read(&workflows_path).expect("can't read workflows from file");
let workflows: HashMap<String, Vec<icepick_workflow::Workflow>> =
serde_yaml::from_slice(&workflows_content).unwrap();
for module in &mut config.modules {
if let Some(module_workflows) = workflows.get(&module.name) {
module.workflows.extend(module_workflows.iter().cloned());
}
}
let mut commands = vec![];
let mut icepick_command = command!();
for module in &config.modules {
@ -174,7 +191,7 @@ pub fn do_cli_thing() {
for module in workflows.iter() {
let mut module_subcommand = clap::Command::new(module.0.as_str());
for workflow in &module.1 {
module_subcommand = module_subcommand.subcommand(workflow.generate_command());
module_subcommand = module_subcommand.subcommand(workflow::generate_command(workflow));
}
workflow_command = workflow_command.subcommand(module_subcommand);
}
@ -219,7 +236,7 @@ pub fn do_cli_thing() {
.find(|(module, _)| module == module_name)
.and_then(|(_, workflows)| workflows.iter().find(|x| x.name == workflow_name))
.expect("workflow from CLI should match config");
workflow.handle(matches, commands, &config.modules);
workflow::handle(workflow, matches, commands, &config.modules);
return;
}

View File

@ -1,68 +1,35 @@
use keyfork_derive_util::DerivationIndex;
use serde::{Deserialize, Serialize};
use icepick_workflow::{InvocableOperation, OperationResult, Workflow};
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationPath};
use serde_json::Value;
use std::{
collections::{HashMap, HashSet},
collections::HashMap,
io::Write,
process::{Command, Stdio},
};
use super::{derive_keys, get_command, Commands, ModuleConfig, Operation};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Workflow {
pub name: String,
#[serde(default)]
pub inputs: Vec<String>,
#[serde(rename = "step")]
steps: Vec<WorkflowStep>,
}
pub type StringMap = std::collections::HashMap<String, String>;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct WorkflowStep {
r#type: String,
#[serde(default)]
blob: StringMap,
#[serde(default)]
values: StringMap,
#[serde(default)]
inputs: StringMap,
#[serde(default)]
outputs: StringMap,
}
#[derive(Clone, Debug)]
struct InvocableOperation {
module: String,
struct CLIOperation {
/// The name of the operation (i.e. `transfer-token`).
name: String,
/// The binary to invoke the operation.
binary: String,
/// Information describing the invocation requirements of the operation.
operation: Operation,
/// The derivation algorithm used when deriving keys for the operation.
derivation_algorithm: Option<DerivationAlgorithm>,
/// The derivation prefix used when deriving keys for the operation.
derivation_prefix: Option<DerivationPath>,
}
// TODO: This should probably be migrated to an actual Result type, instead of
// currently just shoving everything in "blob". Probably done after derivation_accounts
// gets hoisted out of here.
#[derive(Serialize, Deserialize)]
struct OperationResult {
// All values returned from an operation.
blob: HashMap<String, Value>,
// Any requested accounts from an operation.
//
// TODO: Move this to its own step.
#[serde(default)]
derivation_accounts: Vec<DerivationIndex>,
}
impl InvocableOperation {
impl InvocableOperation for CLIOperation {
fn invoke(&self, input: &HashMap<String, Value>, derived_keys: &[Vec<u8>]) -> OperationResult {
let (command, args) = get_command(&self.binary);
@ -87,24 +54,40 @@ impl InvocableOperation {
let result = child.wait_with_output().unwrap();
if !result.status.success() {
panic!("Bad exit ({}: {}): {}", &self.binary, &self.operation.name, String::from_utf8_lossy(&result.stderr));
panic!(
"Bad exit ({}: {}): {}",
&self.binary,
&self.operation.name,
String::from_utf8_lossy(&result.stderr)
);
}
let output = result.stdout;
let json: OperationResult = serde_json::from_slice(&output).expect("valid json");
json
}
fn name(&self) -> &String {
&self.name
}
impl Workflow {
/// Generate a [`clap::Command`] for a [`Workflow`], where the inputs can be defined either by
/// command-line arguments or via a JSON input file.
pub fn generate_command(&self) -> clap::Command {
let mut command = clap::Command::new(&self.name).arg(clap::arg!(
fn argument_names(&self) -> impl Iterator<Item = &String> {
self.operation.arguments.iter().map(|i| &i.name)
}
fn derivation_configuration(&self) -> Option<(&DerivationAlgorithm, &DerivationPath)> {
self.derivation_algorithm
.as_ref()
.zip(self.derivation_prefix.as_ref())
}
}
pub fn generate_command(workflow: &Workflow) -> clap::Command {
let mut command = clap::Command::new(&workflow.name).arg(clap::arg!(
--"input-file" [FILE]
"A file containing any inputs not passed on the command line"
));
for input in &self.inputs {
for input in &workflow.inputs {
let arg = clap::Arg::new(input)
.required(false)
.long(input.replace('_', "-"))
@ -114,21 +97,24 @@ impl Workflow {
command
}
fn load_inputs(&self, matches: &clap::ArgMatches) -> StringMap {
fn load_inputs<T: AsRef<str> + Into<String> + std::fmt::Display>(
inputs: impl IntoIterator<Item = T>,
matches: &clap::ArgMatches,
) -> StringMap {
let mut map = StringMap::default();
let input_file: Option<StringMap> = matches
.get_one::<std::path::PathBuf>("input-file")
.and_then(|p| std::fs::File::open(p).ok())
.and_then(|f| serde_json::from_reader(f).ok());
for input in &self.inputs {
match matches.get_one::<String>(input) {
for input in inputs {
match matches.get_one::<String>(input.as_ref()) {
Some(value) => {
map.insert(input.clone(), value.clone());
map.insert(input.into(), value.clone());
continue;
}
None => {
if let Some(value) = input_file.as_ref().and_then(|f| f.get(input)) {
map.insert(input.clone(), value.clone());
if let Some(value) = input_file.as_ref().and_then(|f| f.get(input.as_ref())) {
map.insert(input.into(), value.clone());
continue;
}
}
@ -139,152 +125,49 @@ impl Workflow {
map
}
fn simulate_workflow(&self, mut data: HashSet<String>, operations: &[InvocableOperation]) {
// simulate the steps by using a HashSet to traverse the inputs and outputs and ensure
// there's no inconsistencies
for (i, step) in self.steps.iter().enumerate() {
// NOTE: overflow possible but unlikely
let step_index = i + 1;
let step_type = &step.r#type;
// Find the relevant Operation
let Some(invocable) = operations.iter().find(|op| op.name == *step_type) else {
panic!("Could not find operation: {step_type}");
};
// Check if we have the keys we want to pass into the module.
for in_memory_name in step.inputs.values() {
if !data.contains(in_memory_name) && !step.values.contains_key(in_memory_name) {
eprintln!("Failed simulation: step #{step_index} ({step_type}): missing value {in_memory_name}");
}
}
// Check that the module accepts those keys.
for module_input_name in step.inputs.keys() {
if !invocable
.operation
.arguments
.iter()
.any(|arg| *module_input_name == arg.name)
{
eprintln!("Simulation: step #{step_index} ({step_type}): input value {module_input_name} will be passed through as JSON input");
}
}
// Add the keys we get from the module.
for in_memory_name in step.outputs.values() {
data.insert(in_memory_name.clone());
}
}
}
fn run_workflow(
&self,
mut data: HashMap<String, Value>,
operations: &[InvocableOperation],
config: &[ModuleConfig],
) {
let mut derived_keys = vec![];
let mut derivation_accounts = vec![];
for step in &self.steps {
let operation = operations
.iter()
.find(|op| op.name == step.r#type)
.expect("operation matched step type");
// Load keys from Keyfork, from previously requested workflow
let config = config
.iter()
.find(|module| module.name == operation.module)
.expect("could not find module config");
let algo = &config.algorithm;
let path_prefix = &config.derivation_prefix;
if !derivation_accounts.is_empty() {
derived_keys.extend(derive_keys(
algo.as_ref()
.expect("a module requested keys but didn't provide algorithm"),
path_prefix
.as_ref()
.expect("a module requested keys but didn't provide prefix"),
&derivation_accounts,
));
}
derivation_accounts.clear();
// Prepare all inputs for the operation invocation
//
// NOTE: this could be .clone().into_iter() but it would create an extra allocation of
// the HashMap, and an unnecessary alloc of the key.
let inputs: HashMap<String, Value> = data
.iter()
.map(|(k, v)| (k, v.clone()))
.filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (desired, _stored) = step.inputs.iter().find(|(_, v)| k == *v)?;
Some((desired.clone(), v))
})
.chain(
step.values
.iter()
.map(|(k, v)| (k.clone(), Value::String(v.clone()))),
)
.collect();
let OperationResult {
blob,
derivation_accounts: new_accounts,
} = operation.invoke(&inputs, &derived_keys);
derived_keys.clear();
derivation_accounts.extend(new_accounts);
data.extend(blob.into_iter().filter_map(|(k, v)| {
// We have our stored name, `k`, which matches with this inner loop's `v`. We
// need to return our desired name, rather than our stored name, and the value
// in our storage, our current `v`.
let (_given, stored) = step.outputs.iter().find(|(k1, _)| k == **k1)?;
Some((stored.clone(), v))
}));
}
let last_outputs = &self.steps.last().unwrap().outputs;
data.retain(|stored_name, _| {
last_outputs
.values()
.any(|storage_name| stored_name == storage_name)
});
let json_as_str = serde_json::to_string(&data).unwrap();
println!("{json_as_str}");
}
pub fn handle(&self, matches: &clap::ArgMatches, modules: Commands, config: &[ModuleConfig]) {
let inputs = self.load_inputs(matches);
let data: HashMap<String, Value> = inputs
.into_iter()
.map(|(k, v)| (k, Value::String(v)))
.collect();
fn load_operations(commands: Commands, config: &[ModuleConfig]) -> Vec<CLIOperation> {
let mut operations = vec![];
for (module_name, module_binary, module_operations) in modules {
for (module_name, module_binary, module_operations) in commands {
for operation in module_operations {
let operation_name = &operation.name;
let io = InvocableOperation {
module: module_name.clone(),
let module_config = config.iter().find(|conf| conf.name == *module_name);
let io = CLIOperation {
name: format!("{module_name}-{operation_name}"),
binary: module_binary.clone(),
operation: operation.clone(),
derivation_algorithm: module_config.and_then(|m| m.algorithm.clone()),
derivation_prefix: module_config.and_then(|m| m.derivation_prefix.clone()),
};
operations.push(io);
}
}
operations
}
pub fn handle(
workflow: &Workflow,
matches: &clap::ArgMatches,
modules: Commands,
config: &[ModuleConfig],
) {
let inputs = load_inputs(&workflow.inputs, matches);
let data: HashMap<String, Value> = inputs
.into_iter()
.map(|(k, v)| (k, Value::String(v)))
.collect();
let operations = load_operations(modules, config);
if matches.get_flag("simulate-workflow") {
self.simulate_workflow(data.into_keys().collect(), &operations);
let reports = workflow.simulate_workflow(data.into_keys().collect(), &operations).expect("Simulation failure");
for report in reports {
println!("{report}");
}
return;
}
self.run_workflow(data, &operations, config);
}
let result = workflow.run_workflow(data, &operations, &derive_keys).expect("Invocation failure");
println!("{}", serde_json::to_string(&result).expect("valid JSON"));
}

View File

@ -0,0 +1,50 @@
name: transfer-token
inputs:
- from_address
- to_address
- token_name
- token_amount
step:
- type: sol-get-token-info
inputs:
token: token_name
outputs:
token_address: token_address
token_decimals: token_decimals
- type: internal-load-file
values:
filename: "nonce.json"
outputs:
nonce_authority: nonce_authority
nonce_data: nonce_data
nonce_address: nonce_address
- type: sol-transfer-token
inputs:
amount: token_amount
token_address: token_address
decimals: token_decimals
to_address: to_address
from_address: from_address
outputs:
instructions: instructions
derivation_accounts: derivation_accounts
- type: sol-compile
inputs:
instructions: instructions
derivation_accounts: derivation_accounts
nonce_address: nonce_address
nonce_authority: nonce_authority
nonce_data: nonce_data
outputs:
transaction: unsigned_transaction
- type: sol-sign
inputs:
transaction: unsigned_transaction
blockhash: nonce_data
outputs:
transaction: transaction
- type: internal-save-file
values:
filename: "transaction.json"
inputs:
transaction: signed_transaction

View File

@ -3,102 +3,6 @@ name = "sol"
derivation_prefix = "m/44'/501'/0'"
algorithm = "Ed25519"
[[module.workflow]]
# The name of the workflow, which can be called by:
# `icepick workflow sol transfer-token`
name = "transfer-token"
# These values are used as inputs for other workflows, acquired from the CLI.
# These values can only be strings, but other values can be any value that can
# be serialized by serde_json::Value.
# These values can also be loaded using "internal-load-file", using some form
# of later-defined signature validation.
inputs = ["from_address", "to_address", "token_name", "token_amount"]
# Get the token address and token decimals for the given token
[[module.workflow.step]]
type = "sol-get-token-info"
# The key is the key that is passed to the program in the
# `values` field. The value is the item in storage. In this case,
# we read a `token-name` from our input, but the operation expects `token`.
inputs = { token = "token_name" }
# Because these two fields are currently unused in our storage, we can grab
# them from the outputs of our module. The key is the key of the output value
# we want to store, and the value is the name to be assigned in storage.
outputs = { token_address = "token_address", token_decimals = "token_decimals" }
# Load the transaction nonce from the SD card
[[module.workflow.step]]
type = "internal-load-file"
# Pre-defined values to be passed to the module.
# In this case, the `filename` field is reserved for marking which file to load.
values = { filename = "nonce.json" }
# This value is marked to be saved in-memory, and can be used as an input for
# later steps.
outputs = { nonce_authority = "nonce_authority", nonce_data = "nonce_data", nonce_address = "nonce_address" }
[[module.workflow.step]]
# Generate an unsigned Transaction
# This step MUST run immediately before sol-sign, as in the current version of
# Icepick, keys are only held in memory in-between a single module invocation.
type = "sol-transfer-token"
# If using a lot of inputs, it may be best to use a non-inline table.
# Non-inline tables _must_ be the last step, as otherwise, `outputs` for
# example would be considered a member of `inputs`. In this case, we use a
# non-inline table for `outputs` even though it would fit on one line, to avoid
# the ambiguity.
[module.workflow.step.inputs]
amount = "token_amount"
token_address = "token_address"
decimals = "token_decimals"
to_address = "to_address"
from_address = "from_address"
[module.workflow.step.outputs]
instructions = "instructions"
derivation_accounts = "derivation_accounts"
[[module.workflow.step]]
type = "sol-compile"
[module.workflow.step.inputs]
instructions = "instructions"
derivation_accounts = "derivation_accounts"
nonce_address = "nonce_address"
nonce_authority = "nonce_authority"
nonce_data = "nonce_data"
[module.workflow.step.outputs]
transaction = "unsigned_transaction"
# Sign the transaction
[[module.workflow.step]]
type = "sol-sign"
[module.workflow.step.inputs]
transaction = "unsigned_transaction"
blockhash = "nonce_data"
[module.workflow.step.outputs]
transaction = "signed_transaction"
# Write the signed transaction to a file
[[module.workflow.step]]
type = "internal-save-file"
# We are using a static filename here, so we use `values` instead of `inputs`.
values = { filename = "transaction.json" }
# All fields in both `inputs` and `values`, other than `filename`, will be
# persisted to the file. In this case, the `transaction` field of the file will
# contain the signed transaction.
inputs = { transaction = "signed_transaction" }
# NOTE: To get a nonce address, the `generate-nonce-account` workflow should be
# run. It is the only workflow that uses a blockhash, which is why a
# `broadcast-with-blockhash` or similar is not, and should not be, implemented.