convert from executable -> executable + library with macro and builder options (#9)

This commit is contained in:
Adam Leventhal 2021-10-17 10:40:22 -07:00 committed by GitHub
parent 62d2acf0aa
commit 97857c347c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 100148 additions and 871 deletions

833
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,21 +1,14 @@
[package] [workspace]
name = "progenitor" members = [
version = "0.0.0" "progenitor",
edition = "2018" "progenitor-macro",
license = "MPL-2.0" "progenitor-impl",
repository = "https://github.com/oxidecomputer/progenitor.git" "example-build",
description = "An OpenAPI client generator" "example-macro",
]
[dependencies] default-members = [
anyhow = "1" "progenitor",
getopts = "0.2" "progenitor-macro",
indexmap = "1.7.0" "progenitor-impl",
openapiv3 = "1.0.0-beta.2" ]
proc-macro2 = "1.0.29"
quote = "1.0.9"
regex = "1.5.4"
rustfmt-wrapper = "0.1.0"
schemars = "0.8.5"
serde = { version = "1", features = [ "derive" ] }
serde_json = "1.0.68"
typify = { git = "https://github.com/oxidecomputer/typify" }

17
example-build/Cargo.toml Normal file
View File

@ -0,0 +1,17 @@
[package]
name = "example-build"
version = "0.0.1"
authors = ["Adam H. Leventhal <ahl@oxidecomputer.com>"]
edition = "2018"
[dependencies]
anyhow = "1.0.44"
percent-encoding = "2.1.0"
serde = { version = "1.0.130", features = ["derive"] }
reqwest = { version = "0.11.5", features = ["json", "stream"] }
uuid = { version = "0.8.2", features = ["serde", "v4"] }
chrono = { version = "0.4.19", features = ["serde"] }
[build-dependencies]
progenitor = { path = "../progenitor" }
serde_json = "1.0.68"

22
example-build/build.rs Normal file
View File

@ -0,0 +1,22 @@
// Copyright 2021 Oxide Computer Company
use std::{
env,
fs::{self, File},
path::Path,
};
use progenitor::Generator;
fn main() {
let file = File::open("../sample_openapi/keeper.json").unwrap();
let spec = serde_json::from_reader(file).unwrap();
let mut generator = Generator::new();
let content = generator.generate_text(&spec).unwrap();
let mut out_file = Path::new(&env::var("OUT_DIR").unwrap()).to_path_buf();
out_file.push("codegen.rs");
fs::write(out_file, content).unwrap();
}

12
example-build/src/main.rs Normal file
View File

@ -0,0 +1,12 @@
// Copyright 2021 Oxide Computer Company
// Include the generated code.
include!(concat!(env!("OUT_DIR"), "/codegen.rs"));
fn main() {
let client = Client::new("https://foo/bar");
let _ = client.enrol(&types::EnrolBody {
host: "".to_string(),
key: "".to_string(),
});
}

14
example-macro/Cargo.toml Normal file
View File

@ -0,0 +1,14 @@
[package]
name = "example-macro"
version = "0.0.1"
authors = ["Adam H. Leventhal <ahl@oxidecomputer.com>"]
edition = "2018"
[dependencies]
progenitor = { path = "../progenitor" }
anyhow = "1.0.44"
percent-encoding = "2.1.0"
serde = { version = "1.0.130", features = ["derive"] }
reqwest = { version = "0.11.5", features = ["json", "stream"] }
uuid = { version = "0.8.2", features = ["serde", "v4"] }
chrono = { version = "0.4.19", features = ["serde"] }

View File

@ -0,0 +1,7 @@
// Copyright 2021 Oxide Computer Company
use progenitor::generate_api;
generate_api!("../sample_openapi/keeper.json");
fn main() {}

View File

@ -0,0 +1,25 @@
[package]
name = "progenitor-impl"
version = "0.0.0"
edition = "2018"
license = "MPL-2.0"
repository = "https://github.com/oxidecomputer/progenitor.git"
description = "An OpenAPI client generator - core implementation"
[dependencies]
anyhow = "1"
getopts = "0.2"
indexmap = "1.7.0"
openapiv3 = "1.0.0-beta.2"
proc-macro2 = "1.0.29"
quote = "1.0.9"
regex = "1.5.4"
rustfmt-wrapper = "0.1.0"
schemars = "0.8.5"
serde = { version = "1", features = [ "derive" ] }
serde_json = "1.0.68"
typify = { git = "https://github.com/oxidecomputer/typify" }
thiserror = "1.0.30"
[dev-dependencies]
expectorate = "1.0.4"

651
progenitor-impl/src/lib.rs Normal file
View File

@ -0,0 +1,651 @@
// Copyright 2021 Oxide Computer Company
use std::cmp::Ordering;
use openapiv3::{OpenAPI, ReferenceOr};
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use thiserror::Error;
use typify::TypeSpace;
use crate::to_schema::ToSchema;
mod template;
mod to_schema;
#[derive(Error, Debug)]
pub enum Error {
#[error("unexpected value type")]
BadValue(String, serde_json::Value),
#[error("type error")]
TypeError(#[from] typify::Error),
#[error("XXX")]
BadConversion(String),
#[error("invalid operation path")]
InvalidPath(String),
//#[error("unknown")]
//Unknown,
}
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Default)]
pub struct Generator {
type_space: TypeSpace,
}
impl Generator {
pub fn new() -> Self {
Self::default()
}
pub fn generate_tokens(&mut self, spec: &OpenAPI) -> Result<TokenStream> {
// Convert our components dictionary to schemars
let schemas = spec
.components
.iter()
.flat_map(|components| {
components.schemas.iter().map(|(name, ref_or_schema)| {
(name.clone(), ref_or_schema.to_schema())
})
})
.collect::<Vec<(String, _)>>();
self.type_space.set_type_mod("types");
self.type_space.add_ref_types(schemas)?;
enum ParamType {
Path,
Query,
Body,
}
let methods = spec
.operations()
.map(|(path, method, operation)| {
let mut query: Vec<(String, bool)> = Vec::new();
let mut raw_params = operation
.parameters
.iter()
.map(|parameter| {
match parameter.item()? {
openapiv3::Parameter::Path {
parameter_data,
style: openapiv3::PathStyle::Simple,
} => {
// Path parameters MUST be required.
assert!(parameter_data.required);
let nam = parameter_data.name.clone();
let schema =
parameter_data.schema()?.to_schema();
let typ = self
.type_space
.add_type_details(&schema)?
.parameter;
Ok((ParamType::Path, nam, typ))
}
openapiv3::Parameter::Query {
parameter_data,
allow_reserved: _,
style: openapiv3::QueryStyle::Form,
allow_empty_value,
} => {
if let Some(aev) = allow_empty_value {
if *aev {
todo!("allow empty value is a no go");
}
}
let nam = parameter_data.name.clone();
let schema =
parameter_data.schema()?.to_schema();
let mut typ = self
.type_space
.add_type_details(&schema)?
.parameter;
if !parameter_data.required {
typ = quote! { Option<#typ> };
}
query.push((
nam.to_string(),
!parameter_data.required,
));
Ok((ParamType::Query, nam, typ))
}
x => todo!("unhandled parameter type: {:#?}", x),
}
})
.collect::<Result<Vec<_>>>()?;
let mut bounds = Vec::new();
let (body_param, body_func) = if let Some(b) =
&operation.request_body
{
let b = b.item()?;
if b.is_binary()? {
bounds.push(quote! {B: Into<reqwest::Body>});
(Some(quote! {B}), Some(quote! { .body(body) }))
} else {
let mt = b.content_json()?;
if !mt.encoding.is_empty() {
todo!("media type encoding not empty: {:#?}", mt);
}
if let Some(s) = &mt.schema {
let schema = s.to_schema();
let typ = self
.type_space
.add_type_details(&schema)?
.parameter;
(Some(typ), Some(quote! { .json(body) }))
} else {
todo!("media type encoding, no schema: {:#?}", mt);
}
}
} else {
(None, None)
};
if let Some(body) = body_param {
raw_params.push((
ParamType::Body,
"body".to_string(),
body,
));
}
let tmp = template::parse(path)?;
let names = tmp.names();
let url_path = tmp.compile();
// Put parameters in a deterministic order.
raw_params.sort_by(|a, b| match (&a.0, &b.0) {
// Path params are first and are in positional order.
(ParamType::Path, ParamType::Path) => {
let aa = names.iter().position(|x| x == &a.1).unwrap();
let bb = names.iter().position(|x| x == &b.1).unwrap();
aa.cmp(&bb)
}
(ParamType::Path, ParamType::Query) => Ordering::Less,
(ParamType::Path, ParamType::Body) => Ordering::Less,
// Query params are in lexicographic order.
(ParamType::Query, ParamType::Body) => Ordering::Less,
(ParamType::Query, ParamType::Query) => a.1.cmp(&b.1),
(ParamType::Query, ParamType::Path) => Ordering::Greater,
// Body params are last and should be unique
(ParamType::Body, ParamType::Path) => Ordering::Greater,
(ParamType::Body, ParamType::Query) => Ordering::Greater,
(ParamType::Body, ParamType::Body) => {
panic!("should only be one body")
}
});
let (response_type, decode_response) = if operation
.responses
.responses
.len()
== 1
{
let only = operation.responses.responses.first().unwrap();
if !matches!(only.0, openapiv3::StatusCode::Code(200..=299))
{
todo!("code? {:#?}", only);
}
let i = only.1.item()?;
if !i.headers.is_empty() {
todo!("no response headers for now");
}
if !i.links.is_empty() {
todo!("no response links for now");
}
// Look at the response content. For now, support a
// single JSON-formatted response.
let typ = match (
i.content.len(),
i.content.get("application/json"),
) {
(0, _) => quote! { () },
(1, Some(mt)) => {
if !mt.encoding.is_empty() {
todo!(
"media type encoding not empty: {:#?}",
mt
);
}
if let Some(schema) = &mt.schema {
let schema = schema.to_schema();
self.type_space.add_type_details(&schema)?.ident
} else {
todo!(
"media type encoding, no schema: {:#?}",
mt
);
}
}
(1, None) => {
todo!(
"response content not JSON: {:#?}",
i.content
);
}
(_, _) => {
todo!(
"too many response contents: {:#?}",
i.content
);
}
};
(typ, quote! { res.json().await? })
} else if operation.responses.responses.is_empty() {
(quote! { reqwest::Response }, quote! { res })
} else {
todo!("responses? {:#?}", operation.responses);
};
let operation_id = format_ident!(
"{}",
operation.operation_id.as_deref().unwrap()
);
let bounds = if bounds.is_empty() {
quote! {}
} else {
quote! {
< #(#bounds),* >
}
};
let params = raw_params.into_iter().map(|(_, name, typ)| {
let name = format_ident!("{}", name);
quote! {
#name: #typ
}
});
let (query_build, query_use) = if query.is_empty() {
(quote! {}, quote! {})
} else {
let query_items = query.iter().map(|(qn, opt)| {
if *opt {
let qn_ident = format_ident!("{}", qn);
quote! {
if let Some(v) = & #qn_ident {
query.push((#qn, v.to_string()));
}
}
} else {
quote! {
query.push((#qn, #qn.to_string()));
}
}
});
let query_build = quote! {
let mut query = Vec::new();
#(#query_items)*
};
let query_use = quote! {
.query(&query)
};
(query_build, query_use)
};
let doc_comment = format!(
"{}: {} {}",
operation.operation_id.as_deref().unwrap(),
method.to_ascii_uppercase(),
path
);
let method_func = format_ident!("{}", method);
let method = quote! {
#[doc = #doc_comment]
pub async fn #operation_id #bounds (
&self,
#(#params),*
) -> Result<#response_type> {
#url_path
#query_build
let res = self.client
. #method_func (url)
#body_func
#query_use
.send()
.await?
.error_for_status()?;
Ok(#decode_response)
}
};
Ok(method)
})
.collect::<Result<Vec<_>>>()?;
let mut types = self
.type_space
.iter_types()
.map(|type_entry| {
(
type_entry.type_name(&self.type_space),
type_entry.output(&self.type_space),
)
})
.collect::<Vec<_>>();
types.sort_by(|a, b| a.0.cmp(&b.0));
let types = types.into_iter().map(|(_, def)| def);
let file = quote! {
use anyhow::Result;
mod progenitor_support {
use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS};
#[allow(dead_code)]
const PATH_SET: &AsciiSet = &CONTROLS
.add(b' ')
.add(b'"')
.add(b'#')
.add(b'<')
.add(b'>')
.add(b'?')
.add(b'`')
.add(b'{')
.add(b'}');
#[allow(dead_code)]
pub(crate) fn encode_path(pc: &str) -> String {
utf8_percent_encode(pc, PATH_SET).to_string()
}
}
pub mod types {
use serde::{Deserialize, Serialize};
#(#types)*
}
#[derive(Clone)]
pub struct Client {
baseurl: String,
client: reqwest::Client,
}
impl Client {
pub fn new(baseurl: &str) -> Client {
let dur = std::time::Duration::from_secs(15);
let client = reqwest::ClientBuilder::new()
.connect_timeout(dur)
.timeout(dur)
.build()
.unwrap();
Client::new_with_client(baseurl, client)
}
pub fn new_with_client(
baseurl: &str,
client: reqwest::Client,
) -> Client {
Client {
baseurl: baseurl.to_string(),
client,
}
}
#(#methods)*
}
};
Ok(file)
}
pub fn generate_text(&mut self, spec: &OpenAPI) -> Result<String> {
let output = self.generate_tokens(spec)?;
// Format the file with rustfmt and some whitespace niceties.
let content = rustfmt_wrapper::rustfmt(output).unwrap();
Ok(if cfg!(not(windows)) {
let regex = regex::Regex::new(r#"(})(\n\s*[^} ])"#).unwrap();
regex.replace_all(&content, "$1\n$2").to_string()
} else {
let regex = regex::Regex::new(r#"(})(\r\n\s*[^} ])"#).unwrap();
regex.replace_all(&content, "$1\r\n$2").to_string()
})
}
pub fn dependencies(&self) -> Vec<String> {
let mut deps = vec![
"anyhow = \"1.0.44\"",
"percent-encoding = \"2.1.0\"",
"serde = { version = \"1.0.130\", features = [\"derive\"] }",
"reqwest = { version = \"0.11.5\", features = [\"json\", \"stream\"] }",
];
if self.type_space.uses_uuid() {
deps.push(
"uuid = { version = \"0.8.2\", features = [\"serde\", \"v4\"] }",
)
}
if self.type_space.uses_chrono() {
deps.push(
"chrono = { version = \"0.4.19\", features = [\"serde\"] }",
)
}
deps.sort_unstable();
deps.iter().map(ToString::to_string).collect()
}
pub fn get_type_space(&self) -> &TypeSpace {
&self.type_space
}
}
trait ParameterDataExt {
fn schema(&self) -> Result<&openapiv3::ReferenceOr<openapiv3::Schema>>;
}
impl ParameterDataExt for openapiv3::ParameterData {
fn schema(&self) -> Result<&openapiv3::ReferenceOr<openapiv3::Schema>> {
match &self.format {
openapiv3::ParameterSchemaOrContent::Schema(s) => Ok(s),
x => {
Err(Error::BadConversion(format!("XXX param format {:#?}", x)))
}
}
}
}
trait ExtractJsonMediaType {
fn is_binary(&self) -> Result<bool>;
fn content_json(&self) -> Result<openapiv3::MediaType>;
}
impl ExtractJsonMediaType for openapiv3::Response {
fn content_json(&self) -> Result<openapiv3::MediaType> {
if self.content.len() != 1 {
todo!("expected one content entry, found {}", self.content.len());
}
if let Some(mt) = self.content.get("application/json") {
Ok(mt.clone())
} else {
todo!(
"could not find application/json, only found {}",
self.content.keys().next().unwrap()
);
}
}
fn is_binary(&self) -> Result<bool> {
if self.content.is_empty() {
/*
* XXX If there are no content types, I guess it is not binary?
*/
return Ok(false);
}
if self.content.len() != 1 {
todo!("expected one content entry, found {}", self.content.len());
}
if let Some(mt) = self.content.get("application/octet-stream") {
if !mt.encoding.is_empty() {
todo!("XXX encoding");
}
if let Some(s) = &mt.schema {
use openapiv3::{
SchemaKind, StringFormat, Type,
VariantOrUnknownOrEmpty::Item,
};
let s = s.item()?;
if s.schema_data.nullable {
todo!("XXX nullable binary?");
}
if s.schema_data.default.is_some() {
todo!("XXX default binary?");
}
if s.schema_data.discriminator.is_some() {
todo!("XXX binary discriminator?");
}
match &s.schema_kind {
SchemaKind::Type(Type::String(st)) => {
if st.min_length.is_some() || st.max_length.is_some() {
todo!("binary min/max length");
}
if !matches!(st.format, Item(StringFormat::Binary)) {
todo!(
"expected binary format string, got {:?}",
st.format
);
}
if st.pattern.is_some() {
todo!("XXX pattern");
}
if !st.enumeration.is_empty() {
todo!("XXX enumeration");
}
return Ok(true);
}
x => {
todo!("XXX schemakind type {:?}", x);
}
}
} else {
todo!("binary thing had no schema?");
}
}
Ok(false)
}
}
impl ExtractJsonMediaType for openapiv3::RequestBody {
fn content_json(&self) -> Result<openapiv3::MediaType> {
if self.content.len() != 1 {
todo!("expected one content entry, found {}", self.content.len());
}
if let Some(mt) = self.content.get("application/json") {
Ok(mt.clone())
} else {
todo!(
"could not find application/json, only found {}",
self.content.keys().next().unwrap()
);
}
}
fn is_binary(&self) -> Result<bool> {
if self.content.is_empty() {
/*
* XXX If there are no content types, I guess it is not binary?
*/
return Ok(false);
}
if self.content.len() != 1 {
todo!("expected one content entry, found {}", self.content.len());
}
if let Some(mt) = self.content.get("application/octet-stream") {
if !mt.encoding.is_empty() {
todo!("XXX encoding");
}
if let Some(s) = &mt.schema {
use openapiv3::{
SchemaKind, StringFormat, Type,
VariantOrUnknownOrEmpty::Item,
};
let s = s.item()?;
if s.schema_data.nullable {
todo!("XXX nullable binary?");
}
if s.schema_data.default.is_some() {
todo!("XXX default binary?");
}
if s.schema_data.discriminator.is_some() {
todo!("XXX binary discriminator?");
}
match &s.schema_kind {
SchemaKind::Type(Type::String(st)) => {
if st.min_length.is_some() || st.max_length.is_some() {
todo!("binary min/max length");
}
if !matches!(st.format, Item(StringFormat::Binary)) {
todo!(
"expected binary format string, got {:?}",
st.format
);
}
if st.pattern.is_some() {
todo!("XXX pattern");
}
if !st.enumeration.is_empty() {
todo!("XXX enumeration");
}
return Ok(true);
}
x => {
todo!("XXX schemakind type {:?}", x);
}
}
} else {
todo!("binary thing had no schema?");
}
}
Ok(false)
}
}
trait ReferenceOrExt<T> {
fn item(&self) -> Result<&T>;
}
impl<T> ReferenceOrExt<T> for openapiv3::ReferenceOr<T> {
fn item(&self) -> Result<&T> {
match self {
ReferenceOr::Reference { .. } => {
Err(Error::BadConversion("unexpected reference".to_string()))
}
ReferenceOr::Item(item) => Ok(item),
}
}
}

View File

@ -1,7 +1,10 @@
use anyhow::{anyhow, bail, Context, Result}; // Copyright 2021 Oxide Computer Company
use proc_macro2::TokenStream; use proc_macro2::TokenStream;
use quote::{format_ident, quote}; use quote::{format_ident, quote};
use crate::{Error, Result};
#[derive(Eq, PartialEq, Clone, Debug)] #[derive(Eq, PartialEq, Clone, Debug)]
enum Component { enum Component {
Constant(String), Constant(String),
@ -53,11 +56,6 @@ impl Template {
} }
pub fn parse(t: &str) -> Result<Template> { pub fn parse(t: &str) -> Result<Template> {
parse_inner(t)
.with_context(|| anyhow!("parse failure for template {:?}", t))
}
fn parse_inner(t: &str) -> Result<Template> {
enum State { enum State {
Start, Start,
ConstantOrParameter, ConstantOrParameter,
@ -76,12 +74,16 @@ fn parse_inner(t: &str) -> Result<Template> {
if c == '/' { if c == '/' {
s = State::ConstantOrParameter; s = State::ConstantOrParameter;
} else { } else {
bail!("path must start with a slash"); return Err(Error::InvalidPath(
"path must start with a slash".to_string(),
));
} }
} }
State::ConstantOrParameter => { State::ConstantOrParameter => {
if c == '/' || c == '}' { if c == '/' || c == '}' {
bail!("expected a constant or parameter"); return Err(Error::InvalidPath(
"expected a constant or parameter".to_string(),
));
} else if c == '{' { } else if c == '{' {
s = State::Parameter; s = State::Parameter;
} else { } else {
@ -95,7 +97,9 @@ fn parse_inner(t: &str) -> Result<Template> {
a = String::new(); a = String::new();
s = State::ConstantOrParameter; s = State::ConstantOrParameter;
} else if c == '{' || c == '}' { } else if c == '{' || c == '}' {
bail!("unexpected parameter"); return Err(Error::InvalidPath(
"unexpected parameter".to_string(),
));
} else { } else {
a.push(c); a.push(c);
} }
@ -106,7 +110,9 @@ fn parse_inner(t: &str) -> Result<Template> {
a = String::new(); a = String::new();
s = State::ParameterSlash; s = State::ParameterSlash;
} else if c == '/' || c == '{' { } else if c == '/' || c == '{' {
bail!("expected parameter"); return Err(Error::InvalidPath(
"unexpected parameter".to_string(),
));
} else { } else {
a.push(c); a.push(c);
} }
@ -115,17 +121,25 @@ fn parse_inner(t: &str) -> Result<Template> {
if c == '/' { if c == '/' {
s = State::ConstantOrParameter; s = State::ConstantOrParameter;
} else { } else {
bail!("expected a slash after parameter"); return Err(Error::InvalidPath(
"expected a slah after parameter".to_string(),
));
} }
} }
} }
} }
match s { match s {
State::Start => bail!("empty path"), State::Start => {
return Err(Error::InvalidPath("empty path".to_string()))
}
State::ConstantOrParameter | State::ParameterSlash => (), State::ConstantOrParameter | State::ParameterSlash => (),
State::Constant => components.push(Component::Constant(a)), State::Constant => components.push(Component::Constant(a)),
State::Parameter => bail!("unterminated parameter"), State::Parameter => {
return Err(Error::InvalidPath(
"unterminated parameter".to_string(),
))
}
} }
Ok(Template { components }) Ok(Template { components })

View File

@ -1,3 +1,5 @@
// Copyright 2021 Oxide Computer Company
use indexmap::IndexMap; use indexmap::IndexMap;
use openapiv3::AnySchema; use openapiv3::AnySchema;
use serde_json::Value; use serde_json::Value;
@ -360,6 +362,74 @@ impl Convert<schemars::schema::Schema> for openapiv3::Schema {
schemars::schema::Schema::Bool(true).into_object() schemars::schema::Schema::Bool(true).into_object()
} }
// Malformed object.
openapiv3::SchemaKind::Any(AnySchema {
pattern: None,
multiple_of: None,
exclusive_minimum: None,
exclusive_maximum: None,
minimum: None,
maximum: None,
properties,
required,
additional_properties,
min_properties,
max_properties,
items: None,
min_items: None,
max_items: None,
unique_items: None,
format: None,
}) => {
let object = openapiv3::Schema {
schema_data: self.schema_data.clone(),
schema_kind: openapiv3::SchemaKind::Type(
openapiv3::Type::Object(openapiv3::ObjectType {
properties: properties.clone(),
required: required.clone(),
additional_properties: additional_properties
.clone(),
min_properties: *min_properties,
max_properties: *max_properties,
}),
),
};
object.convert().into()
}
// Malformed array.
openapiv3::SchemaKind::Any(AnySchema {
pattern: None,
multiple_of: None,
exclusive_minimum: None,
exclusive_maximum: None,
minimum: None,
maximum: None,
properties,
required,
additional_properties: None,
min_properties: None,
max_properties: None,
items: items @ Some(_),
min_items,
max_items,
unique_items,
format: None,
}) if properties.is_empty() && required.is_empty() => {
let array = openapiv3::Schema {
schema_data: self.schema_data.clone(),
schema_kind: openapiv3::SchemaKind::Type(
openapiv3::Type::Array(openapiv3::ArrayType {
items: items.clone(),
min_items: *min_items,
max_items: *max_items,
unique_items: unique_items.unwrap_or(false),
}),
),
};
array.convert().into()
}
openapiv3::SchemaKind::Any(_) => { openapiv3::SchemaKind::Any(_) => {
panic!("not clear what we could usefully do here {:#?}", self) panic!("not clear what we could usefully do here {:#?}", self)
} }

View File

@ -0,0 +1,414 @@
use anyhow::Result;
mod progenitor_support {
use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS};
#[allow(dead_code)]
const PATH_SET: &AsciiSet = &CONTROLS
.add(b' ')
.add(b'"')
.add(b'#')
.add(b'<')
.add(b'>')
.add(b'?')
.add(b'`')
.add(b'{')
.add(b'}');
#[allow(dead_code)]
pub(crate) fn encode_path(pc: &str) -> String {
utf8_percent_encode(pc, PATH_SET).to_string()
}
}
pub mod types {
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Task {
pub id: String,
pub name: String,
pub output_rules: Vec<String>,
pub script: String,
pub state: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct TaskEvent {
pub payload: String,
pub seq: u32,
pub stream: String,
pub time: chrono::DateTime<chrono::offset::Utc>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct TaskOutput {
pub id: String,
pub path: String,
pub size: u64,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct TaskSubmit {
pub name: String,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub output_rules: Vec<String>,
pub script: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct TaskSubmitResult {
pub id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct UploadedChunk {
pub id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct UserCreate {
pub name: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct UserCreateResult {
pub id: String,
pub name: String,
pub token: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct WhoamiResult {
pub id: String,
pub name: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Worker {
pub deleted: bool,
pub id: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub instance_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub lastping: Option<chrono::DateTime<chrono::offset::Utc>>,
pub recycle: bool,
pub tasks: Vec<WorkerTask>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct WorkerAddOutput {
pub chunks: Vec<String>,
pub path: String,
pub size: i64,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct WorkerAppendTask {
pub payload: String,
pub stream: String,
pub time: chrono::DateTime<chrono::offset::Utc>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct WorkerBootstrap {
pub bootstrap: String,
pub token: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct WorkerBootstrapResult {
pub id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct WorkerCompleteTask {
pub failed: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct WorkerPingResult {
pub poweroff: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub task: Option<WorkerPingTask>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct WorkerPingTask {
pub id: String,
pub output_rules: Vec<String>,
pub script: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct WorkerTask {
pub id: String,
pub name: String,
pub owner: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct WorkersResult {
pub workers: Vec<Worker>,
}
}
#[derive(Clone)]
pub struct Client {
baseurl: String,
client: reqwest::Client,
}
impl Client {
pub fn new(baseurl: &str) -> Client {
let dur = std::time::Duration::from_secs(15);
let client = reqwest::ClientBuilder::new()
.connect_timeout(dur)
.timeout(dur)
.build()
.unwrap();
Client::new_with_client(baseurl, client)
}
pub fn new_with_client(baseurl: &str, client: reqwest::Client) -> Client {
Client {
baseurl: baseurl.to_string(),
client,
}
}
#[doc = "control_hold: POST /v1/control/hold"]
pub async fn control_hold(&self) -> Result<()> {
let url = format!("{}/v1/control/hold", self.baseurl,);
let res = self.client.post(url).send().await?.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "control_resume: POST /v1/control/resume"]
pub async fn control_resume(&self) -> Result<()> {
let url = format!("{}/v1/control/resume", self.baseurl,);
let res = self.client.post(url).send().await?.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "task_get: GET /v1/task/{task}"]
pub async fn task_get(&self, task: &str) -> Result<types::Task> {
let url = format!(
"{}/v1/task/{}",
self.baseurl,
progenitor_support::encode_path(&task.to_string()),
);
let res = self.client.get(url).send().await?.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "tasks_get: GET /v1/tasks"]
pub async fn tasks_get(&self) -> Result<Vec<types::Task>> {
let url = format!("{}/v1/tasks", self.baseurl,);
let res = self.client.get(url).send().await?.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "task_submit: POST /v1/tasks"]
pub async fn task_submit(&self, body: &types::TaskSubmit) -> Result<types::TaskSubmitResult> {
let url = format!("{}/v1/tasks", self.baseurl,);
let res = self
.client
.post(url)
.json(body)
.send()
.await?
.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "task_events_get: GET /v1/tasks/{task}/events"]
pub async fn task_events_get(
&self,
task: &str,
minseq: Option<u32>,
) -> Result<Vec<types::TaskEvent>> {
let url = format!(
"{}/v1/tasks/{}/events",
self.baseurl,
progenitor_support::encode_path(&task.to_string()),
);
let mut query = Vec::new();
if let Some(v) = &minseq {
query.push(("minseq", v.to_string()));
}
let res = self
.client
.get(url)
.query(&query)
.send()
.await?
.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "task_outputs_get: GET /v1/tasks/{task}/outputs"]
pub async fn task_outputs_get(&self, task: &str) -> Result<Vec<types::TaskOutput>> {
let url = format!(
"{}/v1/tasks/{}/outputs",
self.baseurl,
progenitor_support::encode_path(&task.to_string()),
);
let res = self.client.get(url).send().await?.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "task_output_download: GET /v1/tasks/{task}/outputs/{output}"]
pub async fn task_output_download(
&self,
task: &str,
output: &str,
) -> Result<reqwest::Response> {
let url = format!(
"{}/v1/tasks/{}/outputs/{}",
self.baseurl,
progenitor_support::encode_path(&task.to_string()),
progenitor_support::encode_path(&output.to_string()),
);
let res = self.client.get(url).send().await?.error_for_status()?;
Ok(res)
}
#[doc = "user_create: POST /v1/users"]
pub async fn user_create(&self, body: &types::UserCreate) -> Result<types::UserCreateResult> {
let url = format!("{}/v1/users", self.baseurl,);
let res = self
.client
.post(url)
.json(body)
.send()
.await?
.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "whoami: GET /v1/whoami"]
pub async fn whoami(&self) -> Result<types::WhoamiResult> {
let url = format!("{}/v1/whoami", self.baseurl,);
let res = self.client.get(url).send().await?.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "worker_bootstrap: POST /v1/worker/bootstrap"]
pub async fn worker_bootstrap(
&self,
body: &types::WorkerBootstrap,
) -> Result<types::WorkerBootstrapResult> {
let url = format!("{}/v1/worker/bootstrap", self.baseurl,);
let res = self
.client
.post(url)
.json(body)
.send()
.await?
.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "worker_ping: GET /v1/worker/ping"]
pub async fn worker_ping(&self) -> Result<types::WorkerPingResult> {
let url = format!("{}/v1/worker/ping", self.baseurl,);
let res = self.client.get(url).send().await?.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "worker_task_append: POST /v1/worker/task/{task}/append"]
pub async fn worker_task_append(
&self,
task: &str,
body: &types::WorkerAppendTask,
) -> Result<()> {
let url = format!(
"{}/v1/worker/task/{}/append",
self.baseurl,
progenitor_support::encode_path(&task.to_string()),
);
let res = self
.client
.post(url)
.json(body)
.send()
.await?
.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "worker_task_upload_chunk: POST /v1/worker/task/{task}/chunk"]
pub async fn worker_task_upload_chunk<B: Into<reqwest::Body>>(
&self,
task: &str,
body: B,
) -> Result<types::UploadedChunk> {
let url = format!(
"{}/v1/worker/task/{}/chunk",
self.baseurl,
progenitor_support::encode_path(&task.to_string()),
);
let res = self
.client
.post(url)
.body(body)
.send()
.await?
.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "worker_task_complete: POST /v1/worker/task/{task}/complete"]
pub async fn worker_task_complete(
&self,
task: &str,
body: &types::WorkerCompleteTask,
) -> Result<()> {
let url = format!(
"{}/v1/worker/task/{}/complete",
self.baseurl,
progenitor_support::encode_path(&task.to_string()),
);
let res = self
.client
.post(url)
.json(body)
.send()
.await?
.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "worker_task_add_output: POST /v1/worker/task/{task}/output"]
pub async fn worker_task_add_output(
&self,
task: &str,
body: &types::WorkerAddOutput,
) -> Result<()> {
let url = format!(
"{}/v1/worker/task/{}/output",
self.baseurl,
progenitor_support::encode_path(&task.to_string()),
);
let res = self
.client
.post(url)
.json(body)
.send()
.await?
.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "workers_list: GET /v1/workers"]
pub async fn workers_list(&self) -> Result<types::WorkersResult> {
let url = format!("{}/v1/workers", self.baseurl,);
let res = self.client.get(url).send().await?.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "workers_recycle: POST /v1/workers/recycle"]
pub async fn workers_recycle(&self) -> Result<()> {
let url = format!("{}/v1/workers/recycle", self.baseurl,);
let res = self.client.post(url).send().await?.error_for_status()?;
Ok(res.json().await?)
}
}

View File

@ -0,0 +1,188 @@
use anyhow::Result;
mod progenitor_support {
use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS};
#[allow(dead_code)]
const PATH_SET: &AsciiSet = &CONTROLS
.add(b' ')
.add(b'"')
.add(b'#')
.add(b'<')
.add(b'>')
.add(b'?')
.add(b'`')
.add(b'{')
.add(b'}');
#[allow(dead_code)]
pub(crate) fn encode_path(pc: &str) -> String {
utf8_percent_encode(pc, PATH_SET).to_string()
}
}
pub mod types {
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct EnrolBody {
pub host: String,
pub key: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct GlobalJobsResult {
pub summary: Vec<ReportSummary>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct OutputRecord {
pub msg: String,
pub stream: String,
pub time: chrono::DateTime<chrono::offset::Utc>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct PingResult {
pub host: String,
pub ok: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReportFinishBody {
pub duration_millis: i32,
pub end_time: chrono::DateTime<chrono::offset::Utc>,
pub exit_status: i32,
pub id: ReportId,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReportId {
pub host: String,
pub job: String,
pub pid: u64,
pub time: chrono::DateTime<chrono::offset::Utc>,
pub uuid: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReportOutputBody {
pub id: ReportId,
pub record: OutputRecord,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReportResult {
pub existed_already: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReportStartBody {
pub id: ReportId,
pub script: String,
pub start_time: chrono::DateTime<chrono::offset::Utc>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReportSummary {
pub age_seconds: i32,
pub duration_seconds: i32,
pub host: String,
pub job: String,
pub status: i32,
pub when: chrono::DateTime<chrono::offset::Utc>,
}
}
#[derive(Clone)]
pub struct Client {
baseurl: String,
client: reqwest::Client,
}
impl Client {
pub fn new(baseurl: &str) -> Client {
let dur = std::time::Duration::from_secs(15);
let client = reqwest::ClientBuilder::new()
.connect_timeout(dur)
.timeout(dur)
.build()
.unwrap();
Client::new_with_client(baseurl, client)
}
pub fn new_with_client(baseurl: &str, client: reqwest::Client) -> Client {
Client {
baseurl: baseurl.to_string(),
client,
}
}
#[doc = "enrol: POST /enrol"]
pub async fn enrol(&self, body: &types::EnrolBody) -> Result<()> {
let url = format!("{}/enrol", self.baseurl,);
let res = self
.client
.post(url)
.json(body)
.send()
.await?
.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "global_jobs: GET /global/jobs"]
pub async fn global_jobs(&self) -> Result<types::GlobalJobsResult> {
let url = format!("{}/global/jobs", self.baseurl,);
let res = self.client.get(url).send().await?.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "ping: GET /ping"]
pub async fn ping(&self) -> Result<types::PingResult> {
let url = format!("{}/ping", self.baseurl,);
let res = self.client.get(url).send().await?.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "report_finish: POST /report/finish"]
pub async fn report_finish(
&self,
body: &types::ReportFinishBody,
) -> Result<types::ReportResult> {
let url = format!("{}/report/finish", self.baseurl,);
let res = self
.client
.post(url)
.json(body)
.send()
.await?
.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "report_output: POST /report/output"]
pub async fn report_output(
&self,
body: &types::ReportOutputBody,
) -> Result<types::ReportResult> {
let url = format!("{}/report/output", self.baseurl,);
let res = self
.client
.post(url)
.json(body)
.send()
.await?
.error_for_status()?;
Ok(res.json().await?)
}
#[doc = "report_start: POST /report/start"]
pub async fn report_start(&self, body: &types::ReportStartBody) -> Result<types::ReportResult> {
let url = format!("{}/report/start", self.baseurl,);
let res = self
.client
.post(url)
.json(body)
.send()
.await?
.error_for_status()?;
Ok(res.json().await?)
}
}

View File

@ -0,0 +1,39 @@
// Copyright 2021 Oxide Computer Company
use std::{fs::File, path::PathBuf};
use progenitor_impl::Generator;
#[track_caller]
fn verify_file(openapi_file: &str) {
let mut in_path = PathBuf::from("../sample_openapi");
in_path.push(format!("{}.json", openapi_file));
let file = File::open(in_path).unwrap();
let spec = serde_json::from_reader(file).unwrap();
let mut generator = Generator::new();
let output = generator.generate_text(&spec).unwrap();
expectorate::assert_contents(
format!("tests/output/{}.out", openapi_file),
&output,
)
}
#[test]
fn test_keeper() {
verify_file("keeper");
}
#[test]
fn test_buildomat() {
verify_file("buildomat");
}
// TODO this file is full of inconsistencies and incorrectly specified types.
// It's an interesting test to consider whether we try to do our best to
// interpret the intent or just fail.
#[ignore]
#[test]
fn test_github() {
verify_file("api.github.com");
}

View File

@ -0,0 +1,17 @@
[package]
name = "progenitor-macro"
version = "0.0.0"
edition = "2018"
license = "MPL-2.0"
repository = "https://github.com/oxidecomputer/progenitor.git"
description = "An OpenAPI client generator - macros"
[dependencies]
openapiv3 = "1.0.0-beta.2"
progenitor-impl = { path = "../progenitor-impl" }
quote = "1.0.10"
serde_json = "1.0.68"
syn = "1.0.80"
[lib]
proc-macro = true

View File

@ -0,0 +1,50 @@
// Copyright 2021 Oxide Computer Company
use std::path::Path;
use openapiv3::OpenAPI;
use proc_macro::TokenStream;
use progenitor_impl::Generator;
use syn::LitStr;
#[proc_macro]
pub fn generate_api(item: TokenStream) -> TokenStream {
match do_generate_api(item) {
Err(err) => err.to_compile_error().into(),
Ok(out) => out,
}
}
fn do_generate_api(item: TokenStream) -> Result<TokenStream, syn::Error> {
let arg = syn::parse::<LitStr>(item)?;
let dir = std::env::var("CARGO_MANIFEST_DIR").map_or_else(
|_| std::env::current_dir().unwrap(),
|s| Path::new(&s).to_path_buf(),
);
let path = dir.join(arg.value());
let content = std::fs::read_to_string(&path).map_err(|e| {
syn::Error::new(
arg.span(),
format!("couldn't read file {}: {}", arg.value(), e.to_string()),
)
})?;
let spec = serde_json::from_str::<OpenAPI>(&content).map_err(|e| {
syn::Error::new(
arg.span(),
format!("failed to parse {}: {}", arg.value(), e.to_string()),
)
})?;
let mut builder = Generator::new();
let ret = builder.generate_tokens(&spec).map_err(|e| {
syn::Error::new(
arg.span(),
format!("generation error for {}: {}", arg.value(), e.to_string()),
)
})?;
Ok(ret.into())
}

23
progenitor/Cargo.toml Normal file
View File

@ -0,0 +1,23 @@
[package]
name = "progenitor"
version = "0.0.0"
edition = "2018"
license = "MPL-2.0"
repository = "https://github.com/oxidecomputer/progenitor.git"
description = "An OpenAPI client generator"
[dependencies]
progenitor-macro = { path = "../progenitor-macro" }
progenitor-impl = { path = "../progenitor-impl" }
rustfmt-wrapper = "0.1.0"
anyhow = "1"
getopts = "0.2"
indexmap = "1.7.0"
openapiv3 = "1.0.0-beta.2"
#proc-macro2 = "1.0.29"
#quote = "1.0.9"
regex = "1.5.4"
#schemars = "0.8.5"
serde = { version = "1", features = [ "derive" ] }
serde_json = "1.0.68"
#typify = { git = "https://github.com/oxidecomputer/typify" }

7
progenitor/src/lib.rs Normal file
View File

@ -0,0 +1,7 @@
// Copyright 2021 Oxide Computer Company
//! TODO This crate does stuff!
pub use progenitor_impl::Error;
pub use progenitor_impl::Generator;
pub use progenitor_macro::generate_api;

249
progenitor/src/main.rs Normal file
View File

@ -0,0 +1,249 @@
// Copyright 2021 Oxide Computer Company
use std::{
collections::HashSet,
fs::{File, OpenOptions},
io::Write,
path::{Path, PathBuf},
};
use anyhow::{bail, Result};
use openapiv3::OpenAPI;
use progenitor::Generator;
use serde::Deserialize;
fn save<P>(p: P, data: &str) -> Result<()>
where
P: AsRef<Path>,
{
let p = p.as_ref();
let mut f = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(p)?;
f.write_all(data.as_bytes())?;
f.flush()?;
Ok(())
}
fn main() -> Result<()> {
let mut opts = getopts::Options::new();
opts.parsing_style(getopts::ParsingStyle::StopAtFirstFree);
opts.reqopt("i", "", "OpenAPI definition document (JSON)", "INPUT");
opts.reqopt("o", "", "Generated Rust crate directory", "OUTPUT");
opts.reqopt("n", "", "Target Rust crate name", "CRATE");
opts.reqopt("v", "", "Target Rust crate version", "VERSION");
let args = match opts.parse(std::env::args().skip(1)) {
Ok(args) => {
if !args.free.is_empty() {
eprintln!("{}", opts.usage("progenitor"));
bail!("unexpected positional arguments");
}
args
}
Err(e) => {
eprintln!("{}", opts.usage("progenitor"));
bail!(e);
}
};
let api = load_api(&args.opt_str("i").unwrap())?;
let mut builder = Generator::new();
let fail = match builder.generate_text(&api) {
Ok(out) => {
let type_space = builder.get_type_space();
println!("-----------------------------------------------------");
println!(" TYPE SPACE");
println!("-----------------------------------------------------");
for (idx, type_entry) in type_space.iter_types().enumerate() {
let n = type_entry.describe();
println!("{:>4} {}", idx, n);
}
println!("-----------------------------------------------------");
println!();
let name = args.opt_str("n").unwrap();
let version = args.opt_str("v").unwrap();
/*
* Create the top-level crate directory:
*/
let root = PathBuf::from(args.opt_str("o").unwrap());
std::fs::create_dir_all(&root)?;
/*
* Write the Cargo.toml file:
*/
let mut toml = root.clone();
toml.push("Cargo.toml");
let tomlout = format!(
"[package]\n\
name = \"{}\"\n\
version = \"{}\"\n\
edition = \"2018\"\n\
\n\
[dependencies]\n\
{}",
name,
version,
builder.dependencies().join("\n"),
);
save(&toml, tomlout.as_str())?;
/*
* Create the src/ directory:
*/
let mut src = root;
src.push("src");
std::fs::create_dir_all(&src)?;
/*
* Create the Rust source file containing the generated client:
*/
let mut librs = src;
librs.push("lib.rs");
save(librs, out.as_str())?;
false
}
Err(e) => {
println!("gen fail: {:?}", e);
true
}
};
if fail {
bail!("generation experienced errors");
}
Ok(())
}
fn load<P, T>(p: P) -> Result<T>
where
P: AsRef<Path>,
for<'de> T: Deserialize<'de>,
{
let p = p.as_ref();
let f = File::open(p)?;
Ok(serde_json::from_reader(f)?)
}
pub fn load_api<P>(p: P) -> Result<OpenAPI>
where
P: AsRef<Path>,
{
let api: OpenAPI = load(p)?;
if api.openapi != "3.0.3" {
/*
* XXX During development we are being very strict, but this should
* probably be relaxed.
*/
bail!("unexpected version {}", api.openapi);
}
if !api.servers.is_empty() {
bail!("servers not presently supported");
}
if api.security.is_some() {
bail!("security not presently supported");
}
if !api.tags.is_empty() {
bail!("tags not presently supported");
}
if let Some(components) = api.components.as_ref() {
if !components.security_schemes.is_empty() {
bail!("component security schemes not supported");
}
if !components.responses.is_empty() {
bail!("component responses not supported");
}
if !components.parameters.is_empty() {
bail!("component parameters not supported");
}
if !components.request_bodies.is_empty() {
bail!("component request bodies not supported");
}
if !components.headers.is_empty() {
bail!("component headers not supported");
}
if !components.links.is_empty() {
bail!("component links not supported");
}
if !components.callbacks.is_empty() {
bail!("component callbacks not supported");
}
/*
* XXX Ignoring "examples" and "extensions" for now.
* Explicitly allowing "schemas" through.
*/
}
/*
* XXX Ignoring "external_docs" and "extensions" for now, as they seem not
* to immediately affect our code generation.
*/
let mut opids = HashSet::new();
for p in api.paths.paths.iter() {
match p.1 {
openapiv3::ReferenceOr::Reference { reference: _ } => {
bail!("path {} uses reference, unsupported", p.0);
}
openapiv3::ReferenceOr::Item(item) => {
/*
* Make sure every operation has an operation ID, and that each
* operation ID is only used once in the document.
*/
item.iter().try_for_each(|(_, o)| {
if let Some(oid) = o.operation_id.as_ref() {
if !opids.insert(oid.to_string()) {
bail!("duplicate operation ID: {}", oid);
}
if !o.tags.is_empty() {
bail!("op {}: tags, unsupported", oid);
}
if !o.servers.is_empty() {
bail!("op {}: servers, unsupported", oid);
}
if o.security.is_some() {
bail!("op {}: security, unsupported", oid);
}
if o.responses.default.is_some() {
bail!("op {}: has response default", oid);
}
} else {
bail!("path {} is missing operation ID", p.0);
}
Ok(())
})?;
if !item.servers.is_empty() {
bail!("path {} has servers; unsupported", p.0);
}
}
}
}
Ok(api)
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,801 @@
{
"openapi": "3.0.3",
"info": {
"title": "Buildomat",
"version": "1.0"
},
"paths": {
"/v1/control/hold": {
"post": {
"operationId": "control_hold",
"responses": {
"200": {
"description": "successful operation"
}
}
}
},
"/v1/control/resume": {
"post": {
"operationId": "control_resume",
"responses": {
"200": {
"description": "successful operation"
}
}
}
},
"/v1/task/{task}": {
"get": {
"operationId": "task_get",
"parameters": [
{
"in": "path",
"name": "task",
"required": true,
"schema": {
"type": "string"
},
"style": "simple"
}
],
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Task"
}
}
}
}
}
}
},
"/v1/tasks": {
"get": {
"operationId": "tasks_get",
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"title": "Array_of_Task",
"type": "array",
"items": {
"$ref": "#/components/schemas/Task"
}
}
}
}
}
}
},
"post": {
"operationId": "task_submit",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/TaskSubmit"
}
}
},
"required": true
},
"responses": {
"201": {
"description": "successful creation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/TaskSubmitResult"
}
}
}
}
}
}
},
"/v1/tasks/{task}/events": {
"get": {
"operationId": "task_events_get",
"parameters": [
{
"in": "path",
"name": "task",
"required": true,
"schema": {
"type": "string"
},
"style": "simple"
},
{
"in": "query",
"name": "minseq",
"schema": {
"type": "integer",
"format": "uint",
"minimum": 0
},
"style": "form"
}
],
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"title": "Array_of_TaskEvent",
"type": "array",
"items": {
"$ref": "#/components/schemas/TaskEvent"
}
}
}
}
}
}
}
},
"/v1/tasks/{task}/outputs": {
"get": {
"operationId": "task_outputs_get",
"parameters": [
{
"in": "path",
"name": "task",
"required": true,
"schema": {
"type": "string"
},
"style": "simple"
}
],
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"title": "Array_of_TaskOutput",
"type": "array",
"items": {
"$ref": "#/components/schemas/TaskOutput"
}
}
}
}
}
}
}
},
"/v1/tasks/{task}/outputs/{output}": {
"get": {
"operationId": "task_output_download",
"parameters": [
{
"in": "path",
"name": "output",
"required": true,
"schema": {
"type": "string"
},
"style": "simple"
},
{
"in": "path",
"name": "task",
"required": true,
"schema": {
"type": "string"
},
"style": "simple"
}
],
"responses": {}
}
},
"/v1/users": {
"post": {
"operationId": "user_create",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/UserCreate"
}
}
},
"required": true
},
"responses": {
"201": {
"description": "successful creation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/UserCreateResult"
}
}
}
}
}
}
},
"/v1/whoami": {
"get": {
"operationId": "whoami",
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WhoamiResult"
}
}
}
}
}
}
},
"/v1/worker/bootstrap": {
"post": {
"operationId": "worker_bootstrap",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WorkerBootstrap"
}
}
},
"required": true
},
"responses": {
"201": {
"description": "successful creation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WorkerBootstrapResult"
}
}
}
}
}
}
},
"/v1/worker/ping": {
"get": {
"operationId": "worker_ping",
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WorkerPingResult"
}
}
}
}
}
}
},
"/v1/worker/task/{task}/append": {
"post": {
"operationId": "worker_task_append",
"parameters": [
{
"in": "path",
"name": "task",
"required": true,
"schema": {
"type": "string"
},
"style": "simple"
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WorkerAppendTask"
}
}
},
"required": true
},
"responses": {
"201": {
"description": "successful creation"
}
}
}
},
"/v1/worker/task/{task}/chunk": {
"post": {
"operationId": "worker_task_upload_chunk",
"parameters": [
{
"in": "path",
"name": "task",
"required": true,
"schema": {
"type": "string"
},
"style": "simple"
}
],
"requestBody": {
"content": {
"application/octet-stream": {
"schema": {
"type": "string",
"format": "binary"
}
}
},
"required": true
},
"responses": {
"201": {
"description": "successful creation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/UploadedChunk"
}
}
}
}
}
}
},
"/v1/worker/task/{task}/complete": {
"post": {
"operationId": "worker_task_complete",
"parameters": [
{
"in": "path",
"name": "task",
"required": true,
"schema": {
"type": "string"
},
"style": "simple"
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WorkerCompleteTask"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "successful operation"
}
}
}
},
"/v1/worker/task/{task}/output": {
"post": {
"operationId": "worker_task_add_output",
"parameters": [
{
"in": "path",
"name": "task",
"required": true,
"schema": {
"type": "string"
},
"style": "simple"
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WorkerAddOutput"
}
}
},
"required": true
},
"responses": {
"201": {
"description": "successful creation"
}
}
}
},
"/v1/workers": {
"get": {
"operationId": "workers_list",
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WorkersResult"
}
}
}
}
}
}
},
"/v1/workers/recycle": {
"post": {
"operationId": "workers_recycle",
"responses": {
"200": {
"description": "successful operation"
}
}
}
}
},
"components": {
"schemas": {
"Task": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"name": {
"type": "string"
},
"output_rules": {
"type": "array",
"items": {
"type": "string"
}
},
"script": {
"type": "string"
},
"state": {
"type": "string"
}
},
"required": [
"id",
"name",
"output_rules",
"script",
"state"
]
},
"TaskEvent": {
"type": "object",
"properties": {
"payload": {
"type": "string"
},
"seq": {
"type": "integer",
"format": "uint",
"minimum": 0
},
"stream": {
"type": "string"
},
"time": {
"type": "string",
"format": "date-time"
}
},
"required": [
"payload",
"seq",
"stream",
"time"
]
},
"TaskOutput": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"path": {
"type": "string"
},
"size": {
"type": "integer",
"format": "uint64",
"minimum": 0
}
},
"required": [
"id",
"path",
"size"
]
},
"TaskSubmit": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"output_rules": {
"type": "array",
"items": {
"type": "string"
}
},
"script": {
"type": "string"
}
},
"required": [
"name",
"script"
]
},
"TaskSubmitResult": {
"type": "object",
"properties": {
"id": {
"type": "string"
}
},
"required": [
"id"
]
},
"UploadedChunk": {
"type": "object",
"properties": {
"id": {
"type": "string"
}
},
"required": [
"id"
]
},
"UserCreate": {
"type": "object",
"properties": {
"name": {
"type": "string"
}
},
"required": [
"name"
]
},
"UserCreateResult": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"name": {
"type": "string"
},
"token": {
"type": "string"
}
},
"required": [
"id",
"name",
"token"
]
},
"WhoamiResult": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"name": {
"type": "string"
}
},
"required": [
"id",
"name"
]
},
"Worker": {
"type": "object",
"properties": {
"deleted": {
"type": "boolean"
},
"id": {
"type": "string"
},
"instance_id": {
"type": "string"
},
"lastping": {
"type": "string",
"format": "date-time"
},
"recycle": {
"type": "boolean"
},
"tasks": {
"type": "array",
"items": {
"$ref": "#/components/schemas/WorkerTask"
}
}
},
"required": [
"deleted",
"id",
"recycle",
"tasks"
]
},
"WorkerAddOutput": {
"type": "object",
"properties": {
"chunks": {
"type": "array",
"items": {
"type": "string"
}
},
"path": {
"type": "string"
},
"size": {
"type": "integer",
"format": "int64"
}
},
"required": [
"chunks",
"path",
"size"
]
},
"WorkerAppendTask": {
"type": "object",
"properties": {
"payload": {
"type": "string"
},
"stream": {
"type": "string"
},
"time": {
"type": "string",
"format": "date-time"
}
},
"required": [
"payload",
"stream",
"time"
]
},
"WorkerBootstrap": {
"type": "object",
"properties": {
"bootstrap": {
"type": "string"
},
"token": {
"type": "string"
}
},
"required": [
"bootstrap",
"token"
]
},
"WorkerBootstrapResult": {
"type": "object",
"properties": {
"id": {
"type": "string"
}
},
"required": [
"id"
]
},
"WorkerCompleteTask": {
"type": "object",
"properties": {
"failed": {
"type": "boolean"
}
},
"required": [
"failed"
]
},
"WorkerPingResult": {
"type": "object",
"properties": {
"poweroff": {
"type": "boolean"
},
"task": {
"$ref": "#/components/schemas/WorkerPingTask"
}
},
"required": [
"poweroff"
]
},
"WorkerPingTask": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"output_rules": {
"type": "array",
"items": {
"type": "string"
}
},
"script": {
"type": "string"
}
},
"required": [
"id",
"output_rules",
"script"
]
},
"WorkerTask": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"name": {
"type": "string"
},
"owner": {
"type": "string"
}
},
"required": [
"id",
"name",
"owner"
]
},
"WorkersResult": {
"type": "object",
"properties": {
"workers": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Worker"
}
}
},
"required": [
"workers"
]
}
}
}
}

358
sample_openapi/keeper.json Normal file
View File

@ -0,0 +1,358 @@
{
"openapi": "3.0.3",
"info": {
"title": "Keeper API",
"description": "report execution of cron jobs through a mechanism other than mail",
"contact": {
"name": "Joshua M. Clulow",
"url": "https://github.com/jclulow/keeper"
},
"version": "1.0"
},
"paths": {
"/enrol": {
"post": {
"operationId": "enrol",
"requestBody": {
"content": {
"application/json": {
"schema": {
"title": "EnrolBody",
"type": "object",
"properties": {
"host": {
"type": "string"
},
"key": {
"type": "string"
}
},
"required": [
"host",
"key"
]
}
}
},
"required": true
},
"responses": {
"201": {
"description": "successful creation"
}
}
}
},
"/global/jobs": {
"get": {
"operationId": "global_jobs",
"responses": {
"201": {
"description": "successful creation",
"content": {
"application/json": {
"schema": {
"title": "GlobalJobsResult",
"type": "object",
"properties": {
"summary": {
"type": "array",
"items": {
"$ref": "#/components/schemas/ReportSummary"
}
}
},
"required": [
"summary"
]
}
}
}
}
}
}
},
"/ping": {
"get": {
"operationId": "ping",
"responses": {
"201": {
"description": "successful creation",
"content": {
"application/json": {
"schema": {
"title": "PingResult",
"type": "object",
"properties": {
"host": {
"type": "string"
},
"ok": {
"type": "boolean"
}
},
"required": [
"host",
"ok"
]
}
}
}
}
}
}
},
"/report/finish": {
"post": {
"operationId": "report_finish",
"requestBody": {
"content": {
"application/json": {
"schema": {
"title": "ReportFinishBody",
"type": "object",
"properties": {
"duration_millis": {
"type": "integer",
"format": "int32"
},
"end_time": {
"type": "string",
"format": "date-time"
},
"exit_status": {
"type": "integer",
"format": "int32"
},
"id": {
"$ref": "#/components/schemas/ReportId"
}
},
"required": [
"duration_millis",
"end_time",
"exit_status",
"id"
]
}
}
},
"required": true
},
"responses": {
"201": {
"description": "successful creation",
"content": {
"application/json": {
"schema": {
"title": "ReportResult",
"type": "object",
"properties": {
"existed_already": {
"type": "boolean"
}
},
"required": [
"existed_already"
]
}
}
}
}
}
}
},
"/report/output": {
"post": {
"operationId": "report_output",
"requestBody": {
"content": {
"application/json": {
"schema": {
"title": "ReportOutputBody",
"type": "object",
"properties": {
"id": {
"$ref": "#/components/schemas/ReportId"
},
"record": {
"$ref": "#/components/schemas/OutputRecord"
}
},
"required": [
"id",
"record"
]
}
}
},
"required": true
},
"responses": {
"201": {
"description": "successful creation",
"content": {
"application/json": {
"schema": {
"title": "ReportResult",
"type": "object",
"properties": {
"existed_already": {
"type": "boolean"
}
},
"required": [
"existed_already"
]
}
}
}
}
}
}
},
"/report/start": {
"post": {
"operationId": "report_start",
"requestBody": {
"content": {
"application/json": {
"schema": {
"title": "ReportStartBody",
"type": "object",
"properties": {
"id": {
"$ref": "#/components/schemas/ReportId"
},
"script": {
"type": "string"
},
"start_time": {
"type": "string",
"format": "date-time"
}
},
"required": [
"id",
"script",
"start_time"
]
}
}
},
"required": true
},
"responses": {
"201": {
"description": "successful creation",
"content": {
"application/json": {
"schema": {
"title": "ReportResult",
"type": "object",
"properties": {
"existed_already": {
"type": "boolean"
}
},
"required": [
"existed_already"
]
}
}
}
}
}
}
}
},
"components": {
"schemas": {
"OutputRecord": {
"type": "object",
"properties": {
"msg": {
"type": "string"
},
"stream": {
"type": "string"
},
"time": {
"type": "string",
"format": "date-time"
}
},
"required": [
"msg",
"stream",
"time"
]
},
"ReportId": {
"type": "object",
"properties": {
"host": {
"type": "string"
},
"job": {
"type": "string"
},
"pid": {
"type": "integer",
"format": "uint64",
"minimum": 0
},
"time": {
"type": "string",
"format": "date-time"
},
"uuid": {
"type": "string"
}
},
"required": [
"host",
"job",
"pid",
"time",
"uuid"
]
},
"ReportSummary": {
"type": "object",
"properties": {
"age_seconds": {
"type": "integer",
"format": "int32"
},
"duration_seconds": {
"type": "integer",
"format": "int32"
},
"host": {
"type": "string"
},
"job": {
"type": "string"
},
"status": {
"type": "integer",
"format": "int32"
},
"when": {
"type": "string",
"format": "date-time"
}
},
"required": [
"age_seconds",
"duration_seconds",
"host",
"job",
"status",
"when"
]
}
}
}
}

View File

@ -1,827 +0,0 @@
#![allow(unused_imports)]
#![allow(clippy::single_match)]
use anyhow::{anyhow, bail, Context, Result};
use openapiv3::{OpenAPI, ReferenceOr, Schema, SchemaData, SchemaKind};
use serde::Deserialize;
use std::cell::Ref;
use std::cmp::Ordering;
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::fs::{File, OpenOptions};
use std::io::Write;
use std::path::{Path, PathBuf};
use quote::{format_ident, quote};
use typify::{TypeEntryIdentifier, TypeSpace};
use crate::to_schema::ToSchema;
mod template;
mod to_schema;
fn save<P>(p: P, data: &str) -> Result<()>
where
P: AsRef<Path>,
{
let p = p.as_ref();
let mut f = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(p)?;
f.write_all(data.as_bytes())?;
f.flush()?;
Ok(())
}
fn load<P, T>(p: P) -> Result<T>
where
P: AsRef<Path>,
for<'de> T: Deserialize<'de>,
{
let p = p.as_ref();
let f = File::open(p)?;
Ok(serde_json::from_reader(f)?)
}
fn load_api<P>(p: P) -> Result<OpenAPI>
where
P: AsRef<Path>,
{
let api: OpenAPI = load(p)?;
if api.openapi != "3.0.3" {
/*
* XXX During development we are being very strict, but this should
* probably be relaxed.
*/
bail!("unexpected version {}", api.openapi);
}
if !api.servers.is_empty() {
bail!("servers not presently supported");
}
if api.security.is_some() {
bail!("security not presently supported");
}
if !api.tags.is_empty() {
bail!("tags not presently supported");
}
if let Some(components) = api.components.as_ref() {
if !components.security_schemes.is_empty() {
bail!("component security schemes not supported");
}
if !components.responses.is_empty() {
bail!("component responses not supported");
}
if !components.parameters.is_empty() {
bail!("component parameters not supported");
}
if !components.request_bodies.is_empty() {
bail!("component request bodies not supported");
}
if !components.headers.is_empty() {
bail!("component headers not supported");
}
if !components.links.is_empty() {
bail!("component links not supported");
}
if !components.callbacks.is_empty() {
bail!("component callbacks not supported");
}
/*
* XXX Ignoring "examples" and "extensions" for now.
* Explicitly allowing "schemas" through.
*/
}
/*
* XXX Ignoring "external_docs" and "extensions" for now, as they seem not
* to immediately affect our code generation.
*/
let mut opids = HashSet::new();
for p in api.paths.paths.iter() {
match p.1 {
openapiv3::ReferenceOr::Reference { reference: _ } => {
bail!("path {} uses reference, unsupported", p.0);
}
openapiv3::ReferenceOr::Item(item) => {
/*
* Make sure every operation has an operation ID, and that each
* operation ID is only used once in the document.
*/
let mut id = |o: Option<&openapiv3::Operation>| -> Result<()> {
if let Some(o) = o {
if let Some(oid) = o.operation_id.as_ref() {
if !opids.insert(oid.to_string()) {
bail!("duplicate operation ID: {}", oid);
}
if !o.tags.is_empty() {
bail!("op {}: tags, unsupported", oid);
}
if !o.servers.is_empty() {
bail!("op {}: servers, unsupported", oid);
}
if o.security.is_some() {
bail!("op {}: security, unsupported", oid);
}
if o.responses.default.is_some() {
bail!("op {}: has response default", oid);
}
} else {
bail!("path {} is missing operation ID", p.0);
}
}
Ok(())
};
id(item.get.as_ref())?;
id(item.put.as_ref())?;
id(item.post.as_ref())?;
id(item.delete.as_ref())?;
id(item.options.as_ref())?;
id(item.head.as_ref())?;
id(item.patch.as_ref())?;
id(item.trace.as_ref())?;
if !item.servers.is_empty() {
bail!("path {} has servers; unsupported", p.0);
}
}
}
}
Ok(api)
}
trait ParameterDataExt {
fn schema(&self) -> Result<&openapiv3::ReferenceOr<openapiv3::Schema>>;
}
impl ParameterDataExt for openapiv3::ParameterData {
fn schema(&self) -> Result<&openapiv3::ReferenceOr<openapiv3::Schema>> {
match &self.format {
openapiv3::ParameterSchemaOrContent::Schema(s) => Ok(s),
x => bail!("XXX param format {:#?}", x),
}
}
}
trait ExtractJsonMediaType {
fn is_binary(&self) -> Result<bool>;
fn content_json(&self) -> Result<openapiv3::MediaType>;
}
impl ExtractJsonMediaType for openapiv3::Response {
fn content_json(&self) -> Result<openapiv3::MediaType> {
if self.content.len() != 1 {
bail!("expected one content entry, found {}", self.content.len());
}
if let Some(mt) = self.content.get("application/json") {
Ok(mt.clone())
} else {
bail!(
"could not find application/json, only found {}",
self.content.keys().next().unwrap()
);
}
}
fn is_binary(&self) -> Result<bool> {
if self.content.is_empty() {
/*
* XXX If there are no content types, I guess it is not binary?
*/
return Ok(false);
}
if self.content.len() != 1 {
bail!("expected one content entry, found {}", self.content.len());
}
if let Some(mt) = self.content.get("application/octet-stream") {
if !mt.encoding.is_empty() {
bail!("XXX encoding");
}
if let Some(s) = &mt.schema {
use openapiv3::{
SchemaKind, StringFormat, Type,
VariantOrUnknownOrEmpty::Item,
};
let s = s.item()?;
if s.schema_data.nullable {
bail!("XXX nullable binary?");
}
if s.schema_data.default.is_some() {
bail!("XXX default binary?");
}
if s.schema_data.discriminator.is_some() {
bail!("XXX binary discriminator?");
}
match &s.schema_kind {
SchemaKind::Type(Type::String(st)) => {
if st.min_length.is_some() || st.max_length.is_some() {
bail!("binary min/max length");
}
if !matches!(st.format, Item(StringFormat::Binary)) {
bail!(
"expected binary format string, got {:?}",
st.format
);
}
if st.pattern.is_some() {
bail!("XXX pattern");
}
if !st.enumeration.is_empty() {
bail!("XXX enumeration");
}
return Ok(true);
}
x => {
bail!("XXX schemakind type {:?}", x);
}
}
} else {
bail!("binary thing had no schema?");
}
}
Ok(false)
}
}
impl ExtractJsonMediaType for openapiv3::RequestBody {
fn content_json(&self) -> Result<openapiv3::MediaType> {
if self.content.len() != 1 {
bail!("expected one content entry, found {}", self.content.len());
}
if let Some(mt) = self.content.get("application/json") {
Ok(mt.clone())
} else {
bail!(
"could not find application/json, only found {}",
self.content.keys().next().unwrap()
);
}
}
fn is_binary(&self) -> Result<bool> {
if self.content.is_empty() {
/*
* XXX If there are no content types, I guess it is not binary?
*/
return Ok(false);
}
if self.content.len() != 1 {
bail!("expected one content entry, found {}", self.content.len());
}
if let Some(mt) = self.content.get("application/octet-stream") {
if !mt.encoding.is_empty() {
bail!("XXX encoding");
}
if let Some(s) = &mt.schema {
use openapiv3::{
SchemaKind, StringFormat, Type,
VariantOrUnknownOrEmpty::Item,
};
let s = s.item()?;
if s.schema_data.nullable {
bail!("XXX nullable binary?");
}
if s.schema_data.default.is_some() {
bail!("XXX default binary?");
}
if s.schema_data.discriminator.is_some() {
bail!("XXX binary discriminator?");
}
match &s.schema_kind {
SchemaKind::Type(Type::String(st)) => {
if st.min_length.is_some() || st.max_length.is_some() {
bail!("binary min/max length");
}
if !matches!(st.format, Item(StringFormat::Binary)) {
bail!(
"expected binary format string, got {:?}",
st.format
);
}
if st.pattern.is_some() {
bail!("XXX pattern");
}
if !st.enumeration.is_empty() {
bail!("XXX enumeration");
}
return Ok(true);
}
x => {
bail!("XXX schemakind type {:?}", x);
}
}
} else {
bail!("binary thing had no schema?");
}
}
Ok(false)
}
}
trait ReferenceOrExt<T> {
fn item(&self) -> Result<&T>;
}
impl<T> ReferenceOrExt<T> for openapiv3::ReferenceOr<T> {
fn item(&self) -> Result<&T> {
match self {
openapiv3::ReferenceOr::Item(i) => Ok(i),
openapiv3::ReferenceOr::Reference { reference: _ } => {
bail!("reference not supported here");
}
}
}
}
enum ParamType {
Path,
Query,
Body,
}
fn generate(api: &OpenAPI, ts: &mut TypeSpace) -> Result<String> {
let methods = api
.operations()
.map(|(path, method, operation)| {
let mut query: Vec<(String, bool)> = Vec::new();
let mut raw_params = operation
.parameters
.iter()
.map(|parameter| {
match parameter.item()? {
openapiv3::Parameter::Path {
parameter_data,
style: openapiv3::PathStyle::Simple,
} => {
/*
* Path parameters MUST be required.
*/
assert!(parameter_data.required);
let nam = parameter_data.name.clone();
let schema = parameter_data.schema()?.to_schema();
let typ = ts.add_type_details(&schema)?.parameter;
Ok((ParamType::Path, nam, typ))
}
openapiv3::Parameter::Query {
parameter_data,
allow_reserved: _,
style: openapiv3::QueryStyle::Form,
allow_empty_value,
} => {
if let Some(aev) = allow_empty_value {
if *aev {
bail!("allow empty value is a no go");
}
}
let nam = parameter_data.name.clone();
let schema = parameter_data.schema()?.to_schema();
let mut typ =
ts.add_type_details(&schema)?.parameter;
if !parameter_data.required {
typ = quote! { Option<#typ> };
}
query.push((
nam.to_string(),
!parameter_data.required,
));
Ok((ParamType::Query, nam, typ))
}
x => bail!("unhandled parameter type: {:#?}", x),
}
})
.collect::<Result<Vec<_>>>()?;
let mut bounds = Vec::new();
let (body_param, body_func) =
if let Some(b) = &operation.request_body {
let b = b.item()?;
if b.is_binary()? {
bounds.push(quote! {B: Into<reqwest::Body>});
(Some(quote! {B}), Some(quote! { .body(body) }))
} else {
let mt = b
.content_json()
.with_context(|| anyhow!("{} {}", method, path))?;
if !mt.encoding.is_empty() {
bail!("media type encoding not empty: {:#?}", mt);
}
if let Some(s) = &mt.schema {
let schema = s.to_schema();
let typ = ts.add_type_details(&schema)?.parameter;
(Some(typ), Some(quote! { .json(body) }))
} else {
bail!("media type encoding, no schema: {:#?}", mt);
}
}
} else {
(None, None)
};
if let Some(body) = body_param {
raw_params.push((ParamType::Body, "body".to_string(), body));
}
let tmp = template::parse(path)?;
let names = tmp.names();
let url_path = tmp.compile();
// Put parameters in a deterministic order.
raw_params.sort_by(|a, b| match (&a.0, &b.0) {
// Path params are first and are in positional order.
(ParamType::Path, ParamType::Path) => {
let aa = names.iter().position(|x| x == &a.1).unwrap();
let bb = names.iter().position(|x| x == &b.1).unwrap();
aa.cmp(&bb)
}
(ParamType::Path, ParamType::Query) => Ordering::Less,
(ParamType::Path, ParamType::Body) => Ordering::Less,
// Query params are in lexicographic order.
(ParamType::Query, ParamType::Body) => Ordering::Less,
(ParamType::Query, ParamType::Query) => a.1.cmp(&b.1),
(ParamType::Query, ParamType::Path) => Ordering::Greater,
// Body params are last and should be unique
(ParamType::Body, ParamType::Path) => Ordering::Greater,
(ParamType::Body, ParamType::Query) => Ordering::Greater,
(ParamType::Body, ParamType::Body) => {
panic!("should only be one body")
}
});
let (response_type, decode_response) = if operation
.responses
.responses
.len()
== 1
{
let only = operation.responses.responses.first().unwrap();
if !matches!(only.0, openapiv3::StatusCode::Code(200..=299)) {
bail!("code? {:#?}", only);
}
let i = only.1.item()?;
if !i.headers.is_empty() {
bail!("no response headers for now");
}
if !i.links.is_empty() {
bail!("no response links for now");
}
/*
* Look at the response content. For now, support a single
* JSON-formatted response.
*/
let typ = match (
i.content.len(),
i.content.get("application/json"),
) {
(0, _) => quote! { () },
(1, Some(mt)) => {
if !mt.encoding.is_empty() {
bail!("media type encoding not empty: {:#?}", mt);
}
if let Some(schema) = &mt.schema {
let schema = schema.to_schema();
ts.add_type_details(&schema)?.ident
} else {
bail!("media type encoding, no schema: {:#?}", mt);
}
}
(1, None) => {
bail!("response content not JSON: {:#?}", i.content);
}
(_, _) => {
bail!("too many response contents: {:#?}", i.content);
}
};
(typ, quote! { res.json().await? })
} else if operation.responses.responses.is_empty() {
(quote! { reqwest::Response }, quote! { res })
} else {
bail!("responses? {:#?}", operation.responses);
};
let operation_id =
format_ident!("{}", operation.operation_id.as_deref().unwrap());
let bounds = if bounds.is_empty() {
quote! {}
} else {
quote! {
< #(#bounds),* >
}
};
let params = raw_params.into_iter().map(|(_, name, typ)| {
let name = format_ident!("{}", name);
quote! {
#name: #typ
}
});
let (query_build, query_use) = if query.is_empty() {
(quote! {}, quote! {})
} else {
let query_items = query.iter().map(|(qn, opt)| {
if *opt {
let qn_ident = format_ident!("{}", qn);
quote! {
if let Some(v) = & #qn_ident {
query.push((#qn, v.to_string()));
}
}
} else {
quote! {
query.push((#qn, #qn.to_string()));
}
}
});
let query_build = quote! {
let mut query = Vec::new();
#(#query_items)*
};
let query_use = quote! {
.query(&query)
};
(query_build, query_use)
};
let doc_comment = format!(
"{}: {} {}",
operation.operation_id.as_deref().unwrap(),
method.to_ascii_uppercase(),
path
);
let method_func = format_ident!("{}", method);
let method = quote! {
#[doc = #doc_comment]
pub async fn #operation_id #bounds (
&self,
#(#params),*
) -> Result<#response_type> {
#url_path
#query_build
let res = self.client
. #method_func (url)
#body_func
#query_use
.send()
.await?
.error_for_status()?;
Ok(#decode_response)
}
};
Ok(method)
})
.collect::<Result<Vec<_>>>()?;
let mut types = ts
.iter_types()
.map(|type_entry| (type_entry.type_name(ts), type_entry.output(ts)))
.collect::<Vec<_>>();
types.sort_by(|a, b| a.0.cmp(&b.0));
let types = types.into_iter().map(|(_, def)| def);
println!("-----------------------------------------------------");
println!(" TYPE SPACE");
println!("-----------------------------------------------------");
for (idx, type_entry) in ts.iter_types().enumerate() {
let n = type_entry.describe();
println!("{:>4} {}", idx, n);
}
println!("-----------------------------------------------------");
println!();
let file = quote! {
use anyhow::Result;
mod progenitor_support {
use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS};
#[allow(dead_code)]
const PATH_SET: &AsciiSet = &CONTROLS
.add(b' ')
.add(b'"')
.add(b'#')
.add(b'<')
.add(b'>')
.add(b'?')
.add(b'`')
.add(b'{')
.add(b'}');
#[allow(dead_code)]
pub(crate) fn encode_path(pc: &str) -> String {
utf8_percent_encode(pc, PATH_SET).to_string()
}
}
pub mod types {
use serde::{Deserialize, Serialize};
#(#types)*
}
#[derive(Clone)]
pub struct Client {
baseurl: String,
client: reqwest::Client,
}
impl Client {
pub fn new(baseurl: &str) -> Client {
let dur = std::time::Duration::from_secs(15);
let client = reqwest::ClientBuilder::new()
.connect_timeout(dur)
.timeout(dur)
.build()
.unwrap();
Client::new_with_client(baseurl, client)
}
pub fn new_with_client(
baseurl: &str,
client: reqwest::Client,
) -> Client {
Client {
baseurl: baseurl.to_string(),
client,
}
}
#(#methods)*
}
};
let regex = regex::Regex::new(r#"(})(\n\s*[^} ])"#).unwrap();
let file = rustfmt_wrapper::rustfmt(file).unwrap();
let file = regex.replace_all(&file, "$1\n$2").to_string();
Ok(file)
}
fn main() -> Result<()> {
let mut opts = getopts::Options::new();
opts.parsing_style(getopts::ParsingStyle::StopAtFirstFree);
opts.reqopt("i", "", "OpenAPI definition document (JSON)", "INPUT");
opts.reqopt("o", "", "Generated Rust crate directory", "OUTPUT");
opts.reqopt("n", "", "Target Rust crate name", "CRATE");
opts.reqopt("v", "", "Target Rust crate version", "VERSION");
let args = match opts.parse(std::env::args().skip(1)) {
Ok(args) => {
if !args.free.is_empty() {
eprintln!("{}", opts.usage("progenitor"));
bail!("unexpected positional arguments");
}
args
}
Err(e) => {
eprintln!("{}", opts.usage("progenitor"));
bail!(e);
}
};
let api = load_api(&args.opt_str("i").unwrap())?;
// Convert our components dictionary to schemars
let schemas = api
.components
.iter()
.flat_map(|components| {
components.schemas.iter().map(|(name, ref_or_schema)| {
(name.clone(), ref_or_schema.to_schema())
})
})
.collect::<Vec<(String, _)>>();
// Create a new type space, prepopulated with our referenced schemas.
let mut ts = TypeSpace::default();
ts.set_type_mod("types");
ts.add_ref_types(schemas)?;
let fail = match generate(&api, &mut ts) {
Ok(out) => {
let name = args.opt_str("n").unwrap();
let version = args.opt_str("v").unwrap();
/*
* Create the top-level crate directory:
*/
let root = PathBuf::from(args.opt_str("o").unwrap());
std::fs::create_dir_all(&root)?;
/*
* Write the Cargo.toml file:
*/
let mut toml = root.clone();
toml.push("Cargo.toml");
let chrono = if ts.uses_chrono() {
"chrono = { version = \"0.4\", features = [\"serde\"] }\n"
} else {
""
};
let uuid = if ts.uses_uuid() {
"uuid = { version = \"0.8\", features = [\"serde\", \"v4\"] }\n"
} else {
""
};
let serde_json = if ts.uses_uuid() {
"serde_json = \"1\"\n"
} else {
""
};
let tomlout = format!(
"[package]\n\
name = \"{}\"\n\
version = \"{}\"\n\
edition = \"2018\"\n\
\n\
[dependencies]\n\
anyhow = \"1\"\n\
{}\
{}\
percent-encoding = \"2.1\"\n\
reqwest = {{ version = \"0.11\", features = [\"json\", \
\"stream\"] }}\n\
serde = {{ version = \"1\", features = [\"derive\"] }}\n\
{}",
name, version, chrono, uuid, serde_json,
);
save(&toml, tomlout.as_str())?;
/*
* Create the src/ directory:
*/
let mut src = root;
src.push("src");
std::fs::create_dir_all(&src)?;
/*
* Create the Rust source file containing the generated client:
*/
let mut librs = src;
librs.push("lib.rs");
save(librs, out.as_str())?;
false
}
Err(e) => {
println!("gen fail: {:?}", e);
true
}
};
if fail {
bail!("generation experienced errors");
}
Ok(())
}