Compare commits
No commits in common. "main" and "merge-blob-and-values" have entirely different histories.
main
...
merge-blob
|
@ -72,7 +72,7 @@ const LAMPORTS_PER_SOL: u64 = 1_000_000_000;
|
|||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum Error {}
|
||||
|
||||
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum Cluster {
|
||||
Devnet,
|
||||
|
@ -431,10 +431,7 @@ impl Module for Solana {
|
|||
Some((address, decimals)) => serde_json::json!({
|
||||
"blob": {
|
||||
"token_address": address,
|
||||
// forgive me father, for i have sinned
|
||||
// see: https://git.distrust.co/public/icepick/issues/26
|
||||
// TransferToken { decimals: String }
|
||||
"token_decimals": decimals.to_string(),
|
||||
"token_decimals": decimals,
|
||||
}
|
||||
}),
|
||||
None => serde_json::json!({
|
||||
|
@ -626,10 +623,7 @@ impl Module for Solana {
|
|||
"derivation_accounts": [0u32 | 1 << 31],
|
||||
}))
|
||||
}
|
||||
Operation::Sign(Sign {
|
||||
blockhash,
|
||||
mut transaction,
|
||||
}) => {
|
||||
Operation::Sign(Sign { blockhash, mut transaction }) => {
|
||||
let keys = request
|
||||
.derived_keys
|
||||
.unwrap_or_default()
|
||||
|
@ -647,10 +641,7 @@ impl Module for Solana {
|
|||
}
|
||||
}))
|
||||
}
|
||||
Operation::Broadcast(Broadcast {
|
||||
cluster,
|
||||
transaction,
|
||||
}) => {
|
||||
Operation::Broadcast(Broadcast { cluster, transaction }) => {
|
||||
let cluster = cluster.unwrap_or(Cluster::MainnetBeta);
|
||||
let cluster_url = format!("https://api.{cluster}.solana.com");
|
||||
|
||||
|
@ -658,20 +649,12 @@ impl Module for Solana {
|
|||
let client = solana_rpc_client::rpc_client::RpcClient::new(cluster_url);
|
||||
let _simulated_response = client.simulate_transaction(&transaction).unwrap();
|
||||
let response = client.send_and_confirm_transaction(&transaction);
|
||||
let cluster_suffix = {
|
||||
if cluster == Cluster::MainnetBeta {
|
||||
String::new()
|
||||
} else {
|
||||
format!("?cluster={cluster}")
|
||||
}
|
||||
};
|
||||
Ok(match response {
|
||||
Ok(s) => {
|
||||
serde_json::json!({
|
||||
"blob": {
|
||||
"status": "send_and_confirm",
|
||||
"succcess": s.to_string(),
|
||||
"url": format!("https://explorer.solana.com/tx/{s}{cluster_suffix}"),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
use clap::command;
|
||||
use icepick_module::help::*;
|
||||
use keyfork_derive_util::{request::DerivationAlgorithm, DerivationIndex, DerivationPath};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
|
@ -18,35 +17,6 @@ pub fn get_command(bin_name: &str) -> (&str, Vec<&str>) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn derive_keys(
|
||||
algo: &DerivationAlgorithm,
|
||||
path_prefix: &DerivationPath,
|
||||
accounts: &[DerivationIndex],
|
||||
) -> Vec<Vec<u8>> {
|
||||
if accounts.is_empty() {
|
||||
return vec![];
|
||||
}
|
||||
let mut derived_keys = vec![];
|
||||
let mut client = keyforkd_client::Client::discover_socket().expect("keyforkd started");
|
||||
for account in accounts {
|
||||
let request = keyfork_derive_util::request::DerivationRequest::new(
|
||||
algo.clone(),
|
||||
&path_prefix.clone().chain_push(account.clone()),
|
||||
);
|
||||
let request = keyforkd_models::Request::Derivation(request);
|
||||
let response = client.request(&request).expect("valid derivation");
|
||||
match response {
|
||||
keyforkd_models::Response::Derivation(
|
||||
keyfork_derive_util::request::DerivationResponse { data, .. },
|
||||
) => {
|
||||
derived_keys.push(data.to_vec());
|
||||
}
|
||||
_ => panic!("Unexpected response"),
|
||||
}
|
||||
}
|
||||
derived_keys
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
struct ModuleConfig {
|
||||
name: String,
|
||||
|
@ -189,7 +159,7 @@ pub fn do_cli_thing() {
|
|||
.find(|(module, _)| module == module_name)
|
||||
.and_then(|(_, workflows)| workflows.iter().find(|x| x.name == workflow_name))
|
||||
.expect("workflow from CLI should match config");
|
||||
workflow.handle(matches, commands, &config.modules);
|
||||
workflow.handle(matches, commands);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -249,7 +219,24 @@ pub fn do_cli_thing() {
|
|||
let accounts: Vec<keyfork_derive_util::DerivationIndex> =
|
||||
serde_json::from_value(accounts.clone())
|
||||
.expect("valid derivation_accounts");
|
||||
derived_keys.extend(derive_keys(&algo, &path, &accounts));
|
||||
let mut client =
|
||||
keyforkd_client::Client::discover_socket().expect("keyforkd started");
|
||||
for account in accounts {
|
||||
let request = keyfork_derive_util::request::DerivationRequest::new(
|
||||
algo.clone(),
|
||||
&path.clone().chain_push(account),
|
||||
);
|
||||
let request = keyforkd_models::Request::Derivation(request);
|
||||
let response = client.request(&request).expect("valid derivation");
|
||||
match response {
|
||||
keyforkd_models::Response::Derivation(
|
||||
keyfork_derive_util::request::DerivationResponse { data, .. },
|
||||
) => {
|
||||
derived_keys.push(data.to_vec());
|
||||
}
|
||||
_ => panic!("Unexpected response"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let json = serde_json::json!({
|
||||
|
|
|
@ -1,13 +1,8 @@
|
|||
use keyfork_derive_util::DerivationIndex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
io::Write,
|
||||
process::{Command, Stdio},
|
||||
};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
use super::{derive_keys, get_command, Commands, ModuleConfig, Operation};
|
||||
use super::{Commands, Operation};
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct Workflow {
|
||||
|
@ -39,61 +34,11 @@ pub struct WorkflowStep {
|
|||
|
||||
#[derive(Clone, Debug)]
|
||||
struct InvocableOperation {
|
||||
module: String,
|
||||
name: String,
|
||||
binary: String,
|
||||
operation: Operation,
|
||||
}
|
||||
|
||||
// TODO: This should probably be migrated to an actual Result type, instead of
|
||||
// currently just shoving everything in "blob". Probably done after derivation_accounts
|
||||
// gets hoisted out of here.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct OperationResult {
|
||||
// All values returned from an operation.
|
||||
blob: HashMap<String, Value>,
|
||||
|
||||
// Any requested accounts from an operation.
|
||||
//
|
||||
// TODO: Move this to its own step.
|
||||
#[serde(default)]
|
||||
derivation_accounts: Vec<DerivationIndex>,
|
||||
}
|
||||
|
||||
impl InvocableOperation {
|
||||
fn invoke(&self, input: &HashMap<String, Value>, derived_keys: &[Vec<u8>]) -> OperationResult {
|
||||
let (command, args) = get_command(&self.binary);
|
||||
|
||||
let json = serde_json::json!({
|
||||
"operation": self.operation.name,
|
||||
"values": input,
|
||||
"derived_keys": derived_keys,
|
||||
});
|
||||
|
||||
let mut child = Command::new(command)
|
||||
.args(args)
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()
|
||||
.unwrap();
|
||||
|
||||
let mut child_input = child.stdin.take().unwrap();
|
||||
serde_json::to_writer(&mut child_input, &json).unwrap();
|
||||
child_input
|
||||
.write_all(b"\n{\"operation\": \"exit\"}\n")
|
||||
.unwrap();
|
||||
|
||||
let result = child.wait_with_output().unwrap();
|
||||
if !result.status.success() {
|
||||
panic!("Bad exit: {}", String::from_utf8_lossy(&result.stderr));
|
||||
}
|
||||
|
||||
let output = result.stdout;
|
||||
let json: OperationResult = serde_json::from_slice(&output).expect("valid json");
|
||||
json
|
||||
}
|
||||
}
|
||||
|
||||
impl Workflow {
|
||||
/// Generate a [`clap::Command`] for a [`Workflow`], where the inputs can be defined either by
|
||||
/// command-line arguments or via a JSON input file.
|
||||
|
@ -176,78 +121,7 @@ impl Workflow {
|
|||
}
|
||||
}
|
||||
|
||||
fn run_workflow(
|
||||
&self,
|
||||
mut data: HashMap<String, Value>,
|
||||
operations: &[InvocableOperation],
|
||||
config: &[ModuleConfig],
|
||||
) {
|
||||
let mut derived_keys = vec![];
|
||||
let mut derivation_accounts = vec![];
|
||||
|
||||
for step in &self.steps {
|
||||
let operation = operations
|
||||
.iter()
|
||||
.find(|op| op.name == step.r#type)
|
||||
.expect("operation matched step type");
|
||||
|
||||
// Load keys from Keyfork, from previously requested workflow
|
||||
let config = config
|
||||
.iter()
|
||||
.find(|module| module.name == operation.module)
|
||||
.expect("could not find module config");
|
||||
let algo = &config.algorithm;
|
||||
let path_prefix = &config.derivation_prefix;
|
||||
derived_keys.extend(derive_keys(algo, path_prefix, &derivation_accounts));
|
||||
derivation_accounts.clear();
|
||||
|
||||
// Prepare all inputs for the operation invocation
|
||||
//
|
||||
// NOTE: this could be .clone().into_iter() but it would create an extra allocation of
|
||||
// the HashMap, and an unnecessary alloc of the key.
|
||||
let inputs: HashMap<String, Value> = data
|
||||
.iter()
|
||||
.map(|(k, v)| (k, v.clone()))
|
||||
.filter_map(|(k, v)| {
|
||||
// We have our stored name, `k`, which matches with this inner loop's `v`. We
|
||||
// need to return our desired name, rather than our stored name, and the value
|
||||
// in our storage, our current `v`.
|
||||
let (desired, _stored) = step.inputs.iter().find(|(_, v)| k == *v)?;
|
||||
Some((desired.clone(), v))
|
||||
})
|
||||
.chain(
|
||||
step.values
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), Value::String(v.clone()))),
|
||||
)
|
||||
.collect();
|
||||
let OperationResult {
|
||||
blob,
|
||||
derivation_accounts: new_accounts,
|
||||
} = operation.invoke(&inputs, &derived_keys);
|
||||
derived_keys.clear();
|
||||
derivation_accounts.extend(new_accounts);
|
||||
data.extend(blob.into_iter().filter_map(|(k, v)| {
|
||||
// We have our stored name, `k`, which matches with this inner loop's `v`. We
|
||||
// need to return our desired name, rather than our stored name, and the value
|
||||
// in our storage, our current `v`.
|
||||
let (_given, stored) = step.outputs.iter().find(|(k1, _)| k == **k1)?;
|
||||
Some((stored.clone(), v))
|
||||
}));
|
||||
}
|
||||
|
||||
let last_outputs = &self.steps.last().unwrap().outputs;
|
||||
data.retain(|stored_name, _| {
|
||||
last_outputs
|
||||
.values()
|
||||
.any(|storage_name| stored_name == storage_name)
|
||||
});
|
||||
|
||||
let json_as_str = serde_json::to_string(&data).unwrap();
|
||||
println!("{json_as_str}");
|
||||
}
|
||||
|
||||
pub fn handle(&self, matches: &clap::ArgMatches, modules: Commands, config: &[ModuleConfig]) {
|
||||
pub fn handle(&self, matches: &clap::ArgMatches, modules: Commands) {
|
||||
let inputs = self.load_inputs(matches);
|
||||
let data: HashMap<String, Value> = inputs
|
||||
.into_iter()
|
||||
|
@ -260,7 +134,6 @@ impl Workflow {
|
|||
for operation in module_operations {
|
||||
let operation_name = &operation.name;
|
||||
let io = InvocableOperation {
|
||||
module: module_name.clone(),
|
||||
name: format!("{module_name}-{operation_name}"),
|
||||
binary: module_binary.clone(),
|
||||
operation: operation.clone(),
|
||||
|
@ -274,6 +147,6 @@ impl Workflow {
|
|||
return;
|
||||
}
|
||||
|
||||
self.run_workflow(data, &operations, config);
|
||||
todo!("Unsimulated transaction!");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,4 +29,4 @@ echo "Waiting for signed transaction..."
|
|||
while test ! -f /data/output.json; do sleep 1; done
|
||||
|
||||
echo "Broadcasting transaction"
|
||||
icepick sol broadcast --cluster devnet < /data/output.json | jq .
|
||||
icepick sol broadcast --cluster devnet < /data/output.json
|
||||
|
|
16
icepick.toml
16
icepick.toml
|
@ -33,23 +33,15 @@ type = "sol-get-token-info"
|
|||
# The key is the key that is passed to the program in the
|
||||
# `values` field. The value is the item in storage. In this case,
|
||||
# we read a `token-name` from our input, but the operation expects `token`.
|
||||
inputs = { token = "token_name" }
|
||||
inputs = { token= "token_name" }
|
||||
|
||||
# Because these two fields are currently unused in our storage, we can grab
|
||||
# them from the outputs of our module. The key is the key of the output value
|
||||
# we want to store, and the value is the name to be assigned in storage.
|
||||
outputs = { token_address = "token_address", token_decimals = "token_decimals" }
|
||||
|
||||
# Get a blockhash
|
||||
[[module.workflow.step]]
|
||||
type = "sol-get-blockhash"
|
||||
|
||||
outputs = { blockhash = "blockhash" }
|
||||
|
||||
[[module.workflow.step]]
|
||||
# Generate an unsigned Transaction
|
||||
# This step MUST run immediately before sol-sign, as in the current version of
|
||||
# Icepick, keys are only held in memory in-between a single module invocation.
|
||||
type = "sol-transfer-token"
|
||||
|
||||
# If using a lot of inputs, it may be best to use a non-inline table.
|
||||
|
@ -67,6 +59,12 @@ from_address = "from_address"
|
|||
[module.workflow.step.outputs]
|
||||
transaction = "unsigned_transaction"
|
||||
|
||||
# Get a blockhash
|
||||
[[module.workflow.step]]
|
||||
type = "sol-get-blockhash"
|
||||
|
||||
outputs = { blockhash = "blockhash" }
|
||||
|
||||
# Sign the transaction
|
||||
[[module.workflow.step]]
|
||||
type = "sol-sign"
|
||||
|
|
Loading…
Reference in New Issue