2021-10-17 17:40:22 +00:00
|
|
|
// Copyright 2021 Oxide Computer Company
|
|
|
|
|
2022-01-05 20:02:46 +00:00
|
|
|
use std::{cmp::Ordering, collections::HashMap};
|
2021-10-17 17:40:22 +00:00
|
|
|
|
2021-10-29 14:16:39 +00:00
|
|
|
use convert_case::{Case, Casing};
|
|
|
|
use indexmap::IndexMap;
|
|
|
|
use openapiv3::{
|
|
|
|
Components, OpenAPI, Parameter, ReferenceOr, RequestBody, Response, Schema,
|
2021-12-10 02:15:24 +00:00
|
|
|
StatusCode,
|
2021-10-29 14:16:39 +00:00
|
|
|
};
|
2021-10-17 17:40:22 +00:00
|
|
|
use proc_macro2::TokenStream;
|
|
|
|
|
2022-01-05 20:02:46 +00:00
|
|
|
use quote::{format_ident, quote, ToTokens};
|
2021-12-10 02:15:24 +00:00
|
|
|
use template::PathTemplate;
|
2021-10-17 17:40:22 +00:00
|
|
|
use thiserror::Error;
|
2022-01-05 20:02:46 +00:00
|
|
|
use typify::{TypeId, TypeSpace};
|
2021-10-17 17:40:22 +00:00
|
|
|
|
|
|
|
use crate::to_schema::ToSchema;
|
|
|
|
|
|
|
|
mod template;
|
|
|
|
mod to_schema;
|
|
|
|
|
|
|
|
#[derive(Error, Debug)]
|
|
|
|
pub enum Error {
|
|
|
|
#[error("unexpected value type")]
|
|
|
|
BadValue(String, serde_json::Value),
|
|
|
|
#[error("type error")]
|
|
|
|
TypeError(#[from] typify::Error),
|
|
|
|
#[error("XXX")]
|
|
|
|
BadConversion(String),
|
|
|
|
#[error("invalid operation path")]
|
|
|
|
InvalidPath(String),
|
|
|
|
//#[error("unknown")]
|
|
|
|
//Unknown,
|
|
|
|
}
|
|
|
|
|
|
|
|
pub type Result<T> = std::result::Result<T, Error>;
|
|
|
|
|
|
|
|
#[derive(Default)]
|
|
|
|
pub struct Generator {
|
|
|
|
type_space: TypeSpace,
|
2021-10-29 14:16:39 +00:00
|
|
|
inner_type: Option<TokenStream>,
|
|
|
|
pre_hook: Option<TokenStream>,
|
|
|
|
post_hook: Option<TokenStream>,
|
2022-01-05 20:02:46 +00:00
|
|
|
uses_futures: bool,
|
2021-10-17 17:40:22 +00:00
|
|
|
}
|
|
|
|
|
2021-12-10 02:15:24 +00:00
|
|
|
struct OperationMethod {
|
|
|
|
operation_id: String,
|
|
|
|
method: String,
|
|
|
|
path: PathTemplate,
|
|
|
|
doc_comment: Option<String>,
|
|
|
|
params: Vec<OperationParameter>,
|
|
|
|
responses: Vec<OperationResponse>,
|
2022-01-05 20:02:46 +00:00
|
|
|
dropshot_paginated: Option<DropshotPagination>,
|
|
|
|
}
|
|
|
|
|
|
|
|
struct DropshotPagination {
|
|
|
|
item: TypeId,
|
2021-12-10 02:15:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, PartialEq, Eq)]
|
|
|
|
enum OperationParameterKind {
|
|
|
|
Path,
|
|
|
|
Query(bool),
|
|
|
|
Body,
|
|
|
|
}
|
|
|
|
struct OperationParameter {
|
|
|
|
name: String,
|
|
|
|
typ: OperationParameterType,
|
|
|
|
kind: OperationParameterKind,
|
|
|
|
}
|
|
|
|
|
|
|
|
enum OperationParameterType {
|
2022-01-05 20:02:46 +00:00
|
|
|
Type(TypeId),
|
2021-12-10 02:15:24 +00:00
|
|
|
RawBody,
|
|
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
|
|
struct OperationResponse {
|
|
|
|
status_code: StatusCode,
|
|
|
|
typ: OperationResponseType,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
enum OperationResponseType {
|
2022-01-05 20:02:46 +00:00
|
|
|
Type(TypeId),
|
2021-12-10 02:15:24 +00:00
|
|
|
None,
|
|
|
|
Raw,
|
|
|
|
}
|
|
|
|
|
2021-10-17 17:40:22 +00:00
|
|
|
impl Generator {
|
|
|
|
pub fn new() -> Self {
|
|
|
|
Self::default()
|
|
|
|
}
|
|
|
|
|
2021-10-29 14:16:39 +00:00
|
|
|
pub fn with_inner_type(&mut self, inner_type: TokenStream) -> &mut Self {
|
|
|
|
self.inner_type = Some(inner_type);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn with_pre_hook(&mut self, pre_hook: TokenStream) -> &mut Self {
|
|
|
|
self.pre_hook = Some(pre_hook);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn with_post_hook(&mut self, post_hook: TokenStream) -> &mut Self {
|
|
|
|
self.post_hook = Some(post_hook);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2021-10-17 17:40:22 +00:00
|
|
|
pub fn generate_tokens(&mut self, spec: &OpenAPI) -> Result<TokenStream> {
|
|
|
|
// Convert our components dictionary to schemars
|
|
|
|
let schemas = spec
|
|
|
|
.components
|
|
|
|
.iter()
|
|
|
|
.flat_map(|components| {
|
|
|
|
components.schemas.iter().map(|(name, ref_or_schema)| {
|
|
|
|
(name.clone(), ref_or_schema.to_schema())
|
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect::<Vec<(String, _)>>();
|
|
|
|
|
|
|
|
self.type_space.set_type_mod("types");
|
|
|
|
self.type_space.add_ref_types(schemas)?;
|
|
|
|
|
2021-12-10 02:15:24 +00:00
|
|
|
let raw_methods = spec
|
2021-10-29 14:16:39 +00:00
|
|
|
.paths
|
|
|
|
.iter()
|
|
|
|
.flat_map(|(path, ref_or_item)| {
|
2021-12-10 02:15:24 +00:00
|
|
|
// Exclude externally defined path items.
|
2021-10-29 14:16:39 +00:00
|
|
|
let item = ref_or_item.as_item().unwrap();
|
2021-12-10 02:15:24 +00:00
|
|
|
// TODO punt on paramters that apply to all path items for now.
|
2021-10-29 14:16:39 +00:00
|
|
|
assert!(item.parameters.is_empty());
|
|
|
|
item.iter().map(move |(method, operation)| {
|
|
|
|
(path.as_str(), method, operation)
|
|
|
|
})
|
|
|
|
})
|
2021-10-17 17:40:22 +00:00
|
|
|
.map(|(path, method, operation)| {
|
2021-10-29 14:16:39 +00:00
|
|
|
self.process_operation(
|
|
|
|
operation,
|
|
|
|
&spec.components,
|
|
|
|
path,
|
|
|
|
method,
|
|
|
|
)
|
2021-10-17 17:40:22 +00:00
|
|
|
})
|
|
|
|
.collect::<Result<Vec<_>>>()?;
|
|
|
|
|
2021-12-10 02:15:24 +00:00
|
|
|
let methods = raw_methods
|
|
|
|
.iter()
|
|
|
|
.map(|method| self.process_method(method))
|
|
|
|
.collect::<Result<Vec<_>>>()?;
|
|
|
|
|
2021-10-17 17:40:22 +00:00
|
|
|
let mut types = self
|
|
|
|
.type_space
|
|
|
|
.iter_types()
|
2021-11-02 18:16:55 +00:00
|
|
|
.map(|t| (t.name(), t.definition()))
|
2021-10-17 17:40:22 +00:00
|
|
|
.collect::<Vec<_>>();
|
2021-10-29 14:16:39 +00:00
|
|
|
types.sort_by(|(a_name, _), (b_name, _)| a_name.cmp(b_name));
|
2021-10-17 17:40:22 +00:00
|
|
|
let types = types.into_iter().map(|(_, def)| def);
|
|
|
|
|
2021-10-29 14:16:39 +00:00
|
|
|
let inner_property = self.inner_type.as_ref().map(|inner| {
|
|
|
|
quote! {
|
|
|
|
inner: #inner,
|
|
|
|
}
|
|
|
|
});
|
|
|
|
let inner_value = self.inner_type.as_ref().map(|_| {
|
|
|
|
quote! {
|
|
|
|
inner
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2021-10-17 17:40:22 +00:00
|
|
|
let file = quote! {
|
|
|
|
use anyhow::Result;
|
|
|
|
|
|
|
|
mod progenitor_support {
|
|
|
|
use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS};
|
|
|
|
|
|
|
|
#[allow(dead_code)]
|
|
|
|
const PATH_SET: &AsciiSet = &CONTROLS
|
|
|
|
.add(b' ')
|
|
|
|
.add(b'"')
|
|
|
|
.add(b'#')
|
|
|
|
.add(b'<')
|
|
|
|
.add(b'>')
|
|
|
|
.add(b'?')
|
|
|
|
.add(b'`')
|
|
|
|
.add(b'{')
|
|
|
|
.add(b'}');
|
|
|
|
|
|
|
|
#[allow(dead_code)]
|
|
|
|
pub(crate) fn encode_path(pc: &str) -> String {
|
|
|
|
utf8_percent_encode(pc, PATH_SET).to_string()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub mod types {
|
|
|
|
use serde::{Deserialize, Serialize};
|
|
|
|
#(#types)*
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct Client {
|
|
|
|
baseurl: String,
|
|
|
|
client: reqwest::Client,
|
2021-10-29 14:16:39 +00:00
|
|
|
#inner_property
|
2021-10-17 17:40:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Client {
|
2021-10-29 14:16:39 +00:00
|
|
|
pub fn new(
|
|
|
|
baseurl: &str,
|
|
|
|
#inner_property
|
|
|
|
) -> Self {
|
2021-10-17 17:40:22 +00:00
|
|
|
let dur = std::time::Duration::from_secs(15);
|
|
|
|
let client = reqwest::ClientBuilder::new()
|
|
|
|
.connect_timeout(dur)
|
|
|
|
.timeout(dur)
|
|
|
|
.build()
|
|
|
|
.unwrap();
|
2021-10-29 14:16:39 +00:00
|
|
|
Self::new_with_client(baseurl, client, #inner_value)
|
2021-10-17 17:40:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn new_with_client(
|
|
|
|
baseurl: &str,
|
|
|
|
client: reqwest::Client,
|
2021-10-29 14:16:39 +00:00
|
|
|
#inner_property
|
|
|
|
) -> Self {
|
|
|
|
Self {
|
2021-10-17 17:40:22 +00:00
|
|
|
baseurl: baseurl.to_string(),
|
|
|
|
client,
|
2021-10-29 14:16:39 +00:00
|
|
|
#inner_value
|
2021-10-17 17:40:22 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-07 06:24:03 +00:00
|
|
|
pub fn baseurl(&self) -> &String {
|
|
|
|
&self.baseurl
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn client(&self) -> &reqwest::Client {
|
|
|
|
&self.client
|
|
|
|
}
|
|
|
|
|
2021-10-17 17:40:22 +00:00
|
|
|
#(#methods)*
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(file)
|
|
|
|
}
|
|
|
|
|
2021-10-29 14:16:39 +00:00
|
|
|
fn process_operation(
|
|
|
|
&mut self,
|
|
|
|
operation: &openapiv3::Operation,
|
|
|
|
components: &Option<Components>,
|
|
|
|
path: &str,
|
|
|
|
method: &str,
|
2021-12-10 02:15:24 +00:00
|
|
|
) -> Result<OperationMethod> {
|
|
|
|
let operation_id = operation.operation_id.as_ref().unwrap();
|
2021-10-29 14:16:39 +00:00
|
|
|
|
|
|
|
let mut query: Vec<(String, bool)> = Vec::new();
|
|
|
|
let mut raw_params = operation
|
|
|
|
.parameters
|
|
|
|
.iter()
|
|
|
|
.map(|parameter| {
|
|
|
|
match parameter.item(components)? {
|
|
|
|
openapiv3::Parameter::Path {
|
|
|
|
parameter_data,
|
|
|
|
style: openapiv3::PathStyle::Simple,
|
|
|
|
} => {
|
|
|
|
// Path parameters MUST be required.
|
|
|
|
assert!(parameter_data.required);
|
|
|
|
|
|
|
|
let nam = parameter_data.name.clone();
|
|
|
|
let schema = parameter_data.schema()?.to_schema();
|
|
|
|
let name = format!(
|
|
|
|
"{}{}",
|
2021-12-10 02:15:24 +00:00
|
|
|
sanitize(operation_id, Case::Pascal),
|
2021-10-29 14:16:39 +00:00
|
|
|
sanitize(&nam, Case::Pascal),
|
|
|
|
);
|
|
|
|
let typ = self
|
|
|
|
.type_space
|
2022-01-05 20:02:46 +00:00
|
|
|
.add_type_with_name(&schema, Some(name))?;
|
2021-10-29 14:16:39 +00:00
|
|
|
|
2021-12-10 02:15:24 +00:00
|
|
|
Ok(OperationParameter {
|
|
|
|
name: sanitize(¶meter_data.name, Case::Snake),
|
2022-01-05 20:02:46 +00:00
|
|
|
typ: OperationParameterType::Type(typ),
|
2021-12-10 02:15:24 +00:00
|
|
|
kind: OperationParameterKind::Path,
|
|
|
|
})
|
2021-10-29 14:16:39 +00:00
|
|
|
}
|
|
|
|
openapiv3::Parameter::Query {
|
|
|
|
parameter_data,
|
|
|
|
allow_reserved: _,
|
|
|
|
style: openapiv3::QueryStyle::Form,
|
|
|
|
allow_empty_value,
|
|
|
|
} => {
|
|
|
|
if let Some(true) = allow_empty_value {
|
|
|
|
todo!("allow empty value is a no go");
|
|
|
|
}
|
|
|
|
|
|
|
|
let nam = parameter_data.name.clone();
|
|
|
|
let mut schema = parameter_data.schema()?.to_schema();
|
|
|
|
let name = format!(
|
|
|
|
"{}{}",
|
|
|
|
sanitize(
|
|
|
|
operation.operation_id.as_ref().unwrap(),
|
|
|
|
Case::Pascal
|
|
|
|
),
|
|
|
|
sanitize(&nam, Case::Pascal),
|
|
|
|
);
|
|
|
|
|
|
|
|
if !parameter_data.required {
|
|
|
|
schema = make_optional(schema);
|
|
|
|
}
|
|
|
|
|
|
|
|
let typ = self
|
|
|
|
.type_space
|
2022-01-05 20:02:46 +00:00
|
|
|
.add_type_with_name(&schema, Some(name))?;
|
2021-10-29 14:16:39 +00:00
|
|
|
|
2021-12-10 02:15:24 +00:00
|
|
|
query.push((nam, !parameter_data.required));
|
|
|
|
Ok(OperationParameter {
|
|
|
|
name: sanitize(¶meter_data.name, Case::Snake),
|
2022-01-05 20:02:46 +00:00
|
|
|
typ: OperationParameterType::Type(typ),
|
2021-12-10 02:15:24 +00:00
|
|
|
kind: OperationParameterKind::Query(
|
|
|
|
parameter_data.required,
|
|
|
|
),
|
|
|
|
})
|
2021-10-29 14:16:39 +00:00
|
|
|
}
|
|
|
|
x => todo!("unhandled parameter type: {:#?}", x),
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect::<Result<Vec<_>>>()?;
|
2021-12-10 02:15:24 +00:00
|
|
|
if let Some(b) = &operation.request_body {
|
2021-10-29 14:16:39 +00:00
|
|
|
let b = b.item(components)?;
|
2021-12-10 02:15:24 +00:00
|
|
|
let typ = if b.is_binary(components)? {
|
|
|
|
OperationParameterType::RawBody
|
2021-10-29 14:16:39 +00:00
|
|
|
} else {
|
|
|
|
let mt = b.content_json()?;
|
|
|
|
if !mt.encoding.is_empty() {
|
|
|
|
todo!("media type encoding not empty: {:#?}", mt);
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(s) = &mt.schema {
|
|
|
|
let schema = s.to_schema();
|
|
|
|
let name = format!(
|
|
|
|
"{}Body",
|
|
|
|
sanitize(
|
|
|
|
operation.operation_id.as_ref().unwrap(),
|
|
|
|
Case::Pascal
|
|
|
|
)
|
|
|
|
);
|
|
|
|
let typ = self
|
|
|
|
.type_space
|
2022-01-05 20:02:46 +00:00
|
|
|
.add_type_with_name(&schema, Some(name))?;
|
|
|
|
OperationParameterType::Type(typ)
|
2021-10-29 14:16:39 +00:00
|
|
|
} else {
|
|
|
|
todo!("media type encoding, no schema: {:#?}", mt);
|
|
|
|
}
|
2021-12-10 02:15:24 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
raw_params.push(OperationParameter {
|
|
|
|
name: "body".to_string(),
|
|
|
|
typ,
|
|
|
|
kind: OperationParameterKind::Body,
|
|
|
|
});
|
2021-10-29 14:16:39 +00:00
|
|
|
}
|
|
|
|
let tmp = template::parse(path)?;
|
|
|
|
let names = tmp.names();
|
2021-12-10 02:15:24 +00:00
|
|
|
raw_params.sort_by(
|
|
|
|
|OperationParameter {
|
|
|
|
kind: a_kind,
|
|
|
|
name: a_name,
|
|
|
|
..
|
|
|
|
},
|
|
|
|
OperationParameter {
|
|
|
|
kind: b_kind,
|
|
|
|
name: b_name,
|
|
|
|
..
|
|
|
|
}| {
|
|
|
|
match (a_kind, b_kind) {
|
|
|
|
// Path params are first and are in positional order.
|
|
|
|
(
|
|
|
|
OperationParameterKind::Path,
|
|
|
|
OperationParameterKind::Path,
|
|
|
|
) => {
|
|
|
|
let a_index =
|
|
|
|
names.iter().position(|x| x == a_name).unwrap();
|
|
|
|
let b_index =
|
|
|
|
names.iter().position(|x| x == b_name).unwrap();
|
|
|
|
a_index.cmp(&b_index)
|
|
|
|
}
|
|
|
|
(
|
|
|
|
OperationParameterKind::Path,
|
|
|
|
OperationParameterKind::Query(_),
|
|
|
|
) => Ordering::Less,
|
|
|
|
(
|
|
|
|
OperationParameterKind::Path,
|
|
|
|
OperationParameterKind::Body,
|
|
|
|
) => Ordering::Less,
|
|
|
|
|
|
|
|
// Query params are in lexicographic order.
|
|
|
|
(
|
|
|
|
OperationParameterKind::Query(_),
|
|
|
|
OperationParameterKind::Body,
|
|
|
|
) => Ordering::Less,
|
|
|
|
(
|
|
|
|
OperationParameterKind::Query(_),
|
|
|
|
OperationParameterKind::Query(_),
|
|
|
|
) => a_name.cmp(b_name),
|
|
|
|
(
|
|
|
|
OperationParameterKind::Query(_),
|
|
|
|
OperationParameterKind::Path,
|
|
|
|
) => Ordering::Greater,
|
|
|
|
|
|
|
|
// Body params are last and should be unique
|
|
|
|
(
|
|
|
|
OperationParameterKind::Body,
|
|
|
|
OperationParameterKind::Path,
|
|
|
|
) => Ordering::Greater,
|
|
|
|
(
|
|
|
|
OperationParameterKind::Body,
|
|
|
|
OperationParameterKind::Query(_),
|
|
|
|
) => Ordering::Greater,
|
|
|
|
(
|
|
|
|
OperationParameterKind::Body,
|
|
|
|
OperationParameterKind::Body,
|
|
|
|
) => {
|
|
|
|
panic!("should only be one body")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
);
|
|
|
|
|
|
|
|
let mut success = false;
|
|
|
|
|
|
|
|
let mut responses = operation
|
|
|
|
.responses
|
|
|
|
.responses
|
|
|
|
.iter()
|
|
|
|
.map(|(status_code, response_or_ref)| {
|
|
|
|
let response = response_or_ref.item(components)?;
|
|
|
|
|
|
|
|
let typ = if let Some(mt) =
|
|
|
|
response.content.get("application/json")
|
|
|
|
{
|
|
|
|
assert!(mt.encoding.is_empty());
|
|
|
|
|
|
|
|
let typ = if let Some(schema) = &mt.schema {
|
|
|
|
let schema = schema.to_schema();
|
|
|
|
let name = format!(
|
|
|
|
"{}Response",
|
|
|
|
sanitize(
|
|
|
|
operation.operation_id.as_ref().unwrap(),
|
|
|
|
Case::Pascal
|
|
|
|
)
|
|
|
|
);
|
|
|
|
self.type_space
|
|
|
|
.add_type_with_name(&schema, Some(name))?
|
|
|
|
} else {
|
|
|
|
todo!("media type encoding, no schema: {:#?}", mt);
|
|
|
|
};
|
|
|
|
|
2022-01-05 20:02:46 +00:00
|
|
|
OperationResponseType::Type(typ)
|
2021-12-10 02:15:24 +00:00
|
|
|
} else if response.content.first().is_some() {
|
|
|
|
OperationResponseType::Raw
|
|
|
|
} else {
|
|
|
|
OperationResponseType::None
|
|
|
|
};
|
2021-10-29 14:16:39 +00:00
|
|
|
|
2021-12-10 02:15:24 +00:00
|
|
|
if matches!(
|
|
|
|
status_code,
|
|
|
|
StatusCode::Code(200..=299) | StatusCode::Range(2)
|
2021-10-29 14:16:39 +00:00
|
|
|
) {
|
2021-12-10 02:15:24 +00:00
|
|
|
success = true;
|
2021-10-29 14:16:39 +00:00
|
|
|
}
|
|
|
|
|
2021-12-10 02:15:24 +00:00
|
|
|
Ok(OperationResponse {
|
|
|
|
status_code: status_code.clone(),
|
|
|
|
typ,
|
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect::<Result<Vec<_>>>()?;
|
2021-10-29 14:16:39 +00:00
|
|
|
|
2021-12-10 02:15:24 +00:00
|
|
|
// If the API has declined to specify the characteristics of a
|
2022-01-05 20:02:46 +00:00
|
|
|
// successful response, we cons up a generic one. Note that this is
|
|
|
|
// technically permissible within OpenAPI, but advised against in the
|
|
|
|
// spec.
|
2021-12-10 02:15:24 +00:00
|
|
|
if !success {
|
|
|
|
responses.push(OperationResponse {
|
|
|
|
status_code: StatusCode::Range(2),
|
|
|
|
typ: OperationResponseType::Raw,
|
|
|
|
});
|
|
|
|
}
|
2021-10-29 14:16:39 +00:00
|
|
|
|
2022-01-05 20:02:46 +00:00
|
|
|
let dropshot_paginated =
|
|
|
|
self.dropshot_pagination_data(operation, &raw_params, &responses);
|
|
|
|
|
2021-12-10 02:15:24 +00:00
|
|
|
Ok(OperationMethod {
|
|
|
|
operation_id: sanitize(operation_id, Case::Snake),
|
|
|
|
method: method.to_string(),
|
|
|
|
path: tmp,
|
|
|
|
doc_comment: operation.description.clone(),
|
|
|
|
params: raw_params,
|
|
|
|
responses,
|
2022-01-05 20:02:46 +00:00
|
|
|
dropshot_paginated,
|
2021-12-10 02:15:24 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-01-05 20:02:46 +00:00
|
|
|
fn process_method(
|
|
|
|
&mut self,
|
|
|
|
method: &OperationMethod,
|
|
|
|
) -> Result<TokenStream> {
|
|
|
|
let operation_id = format_ident!("{}", method.operation_id);
|
2021-12-10 02:15:24 +00:00
|
|
|
let mut bounds_items: Vec<TokenStream> = Vec::new();
|
2022-01-05 20:02:46 +00:00
|
|
|
let typed_params = method
|
2021-12-10 02:15:24 +00:00
|
|
|
.params
|
|
|
|
.iter()
|
|
|
|
.map(|param| {
|
|
|
|
let name = format_ident!("{}", param.name);
|
|
|
|
let typ = match ¶m.typ {
|
2022-01-05 20:02:46 +00:00
|
|
|
OperationParameterType::Type(type_id) => self
|
|
|
|
.type_space
|
|
|
|
.get_type(type_id)
|
|
|
|
.unwrap()
|
|
|
|
.parameter_ident_with_lifetime("a"),
|
2021-12-10 02:15:24 +00:00
|
|
|
OperationParameterType::RawBody => {
|
2022-01-05 20:02:46 +00:00
|
|
|
bounds_items.push(quote! { B: Into<reqwest::Body> });
|
|
|
|
quote! { B }
|
2021-10-29 14:16:39 +00:00
|
|
|
}
|
2021-12-10 02:15:24 +00:00
|
|
|
};
|
2022-01-05 20:02:46 +00:00
|
|
|
(
|
|
|
|
param,
|
|
|
|
quote! {
|
|
|
|
#name: #typ
|
|
|
|
},
|
|
|
|
)
|
2021-12-10 02:15:24 +00:00
|
|
|
})
|
|
|
|
.collect::<Vec<_>>();
|
2022-01-05 20:02:46 +00:00
|
|
|
|
|
|
|
let params = typed_params.iter().map(|(_, stream)| stream);
|
|
|
|
|
|
|
|
let bounds = quote! { < 'a, #(#bounds_items),* > };
|
2021-12-10 02:15:24 +00:00
|
|
|
|
|
|
|
let query_items = method
|
|
|
|
.params
|
|
|
|
.iter()
|
|
|
|
.filter_map(|param| match ¶m.kind {
|
|
|
|
OperationParameterKind::Query(required) => {
|
|
|
|
let qn = ¶m.name;
|
|
|
|
Some(if *required {
|
|
|
|
quote! {
|
|
|
|
query.push((#qn, #qn.to_string()));
|
2021-10-29 14:16:39 +00:00
|
|
|
}
|
2021-12-10 02:15:24 +00:00
|
|
|
} else {
|
|
|
|
let qn_ident = format_ident!("{}", qn);
|
|
|
|
quote! {
|
|
|
|
if let Some(v) = & #qn_ident {
|
|
|
|
query.push((#qn, v.to_string()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
2021-10-29 14:16:39 +00:00
|
|
|
}
|
2021-12-10 02:15:24 +00:00
|
|
|
_ => None,
|
|
|
|
})
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
let (query_build, query_use) = if query_items.is_empty() {
|
|
|
|
(quote! {}, quote! {})
|
|
|
|
} else {
|
2021-10-29 14:16:39 +00:00
|
|
|
let query_build = quote! {
|
|
|
|
let mut query = Vec::new();
|
|
|
|
#(#query_items)*
|
|
|
|
};
|
|
|
|
let query_use = quote! {
|
|
|
|
.query(&query)
|
|
|
|
};
|
|
|
|
|
|
|
|
(query_build, query_use)
|
|
|
|
};
|
2021-12-10 02:15:24 +00:00
|
|
|
|
|
|
|
let url_path = method.path.compile();
|
|
|
|
|
|
|
|
let body_func =
|
|
|
|
method.params.iter().filter_map(|param| match ¶m.kind {
|
|
|
|
OperationParameterKind::Body => match ¶m.typ {
|
2022-01-05 20:02:46 +00:00
|
|
|
OperationParameterType::Type(_) => {
|
2021-12-10 02:15:24 +00:00
|
|
|
Some(quote! { .json(body) })
|
|
|
|
}
|
|
|
|
OperationParameterType::RawBody => {
|
2022-01-05 20:02:46 +00:00
|
|
|
Some(quote! { .body(body) })
|
2021-12-10 02:15:24 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
_ => None,
|
|
|
|
});
|
|
|
|
|
|
|
|
assert!(body_func.clone().count() <= 1);
|
|
|
|
|
|
|
|
let mut success_response_items =
|
|
|
|
method.responses.iter().filter(|response| {
|
|
|
|
matches!(
|
|
|
|
response.status_code,
|
|
|
|
StatusCode::Code(200..=299) | StatusCode::Range(2)
|
|
|
|
)
|
|
|
|
});
|
|
|
|
|
|
|
|
assert_eq!(success_response_items.clone().count(), 1);
|
|
|
|
|
|
|
|
let (response_type, decode_response) = success_response_items
|
|
|
|
.next()
|
|
|
|
.map(|response| match &response.typ {
|
2022-01-05 20:02:46 +00:00
|
|
|
OperationResponseType::Type(type_id) => (
|
|
|
|
self.type_space.get_type(type_id).unwrap().ident(),
|
|
|
|
quote! { res.json().await? },
|
|
|
|
),
|
2021-12-10 02:15:24 +00:00
|
|
|
OperationResponseType::None => {
|
2022-01-05 20:02:46 +00:00
|
|
|
(quote! { reqwest::Response }, quote! { res })
|
2021-12-10 02:15:24 +00:00
|
|
|
}
|
|
|
|
OperationResponseType::Raw => {
|
|
|
|
(quote! { reqwest::Response }, quote! { res })
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
// TODO document parameters
|
2021-10-29 14:16:39 +00:00
|
|
|
let doc_comment = format!(
|
2021-12-10 02:15:24 +00:00
|
|
|
"{}{}: {} {}",
|
|
|
|
method
|
|
|
|
.doc_comment
|
|
|
|
.as_ref()
|
|
|
|
.map(|s| format!("{}\n\n", s))
|
|
|
|
.unwrap_or_else(String::new),
|
|
|
|
method.operation_id,
|
|
|
|
method.method.to_ascii_uppercase(),
|
|
|
|
method.path.to_string(),
|
2021-10-29 14:16:39 +00:00
|
|
|
);
|
|
|
|
|
|
|
|
let pre_hook = self.pre_hook.as_ref().map(|hook| {
|
|
|
|
quote! {
|
|
|
|
(#hook)(&self.inner, &request);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
let post_hook = self.post_hook.as_ref().map(|hook| {
|
|
|
|
quote! {
|
|
|
|
(#hook)(&self.inner, &result);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
// TODO validate that method is one of the expected methods.
|
2021-12-10 02:15:24 +00:00
|
|
|
let method_func = format_ident!("{}", method.method.to_lowercase());
|
|
|
|
|
|
|
|
let method_impl = quote! {
|
2021-10-29 14:16:39 +00:00
|
|
|
#[doc = #doc_comment]
|
|
|
|
pub async fn #operation_id #bounds (
|
2022-01-05 20:02:46 +00:00
|
|
|
&'a self,
|
2021-10-29 14:16:39 +00:00
|
|
|
#(#params),*
|
|
|
|
) -> Result<#response_type> {
|
|
|
|
#url_path
|
|
|
|
#query_build
|
|
|
|
|
|
|
|
let request = self.client
|
|
|
|
. #method_func (url)
|
2021-12-10 02:15:24 +00:00
|
|
|
#(#body_func)*
|
2021-10-29 14:16:39 +00:00
|
|
|
#query_use
|
|
|
|
.build()?;
|
|
|
|
#pre_hook
|
|
|
|
let result = self.client
|
|
|
|
.execute(request)
|
|
|
|
.await;
|
|
|
|
#post_hook
|
|
|
|
|
|
|
|
// TODO we should do a match here for result?.status().as_u16()
|
|
|
|
let res = result?.error_for_status()?;
|
|
|
|
|
|
|
|
Ok(#decode_response)
|
|
|
|
}
|
|
|
|
};
|
2022-01-05 20:02:46 +00:00
|
|
|
|
|
|
|
let stream_impl = method.dropshot_paginated.as_ref().map(|page_data| {
|
|
|
|
// We're now using futures.
|
|
|
|
self.uses_futures = true;
|
|
|
|
|
|
|
|
let stream_id = format_ident!("{}_stream", method.operation_id);
|
|
|
|
|
|
|
|
// The parameters are the same as those to the paged method, but
|
|
|
|
// without "page_token"
|
|
|
|
let stream_params =
|
|
|
|
typed_params.iter().filter_map(|(param, stream)| {
|
|
|
|
if param.name.as_str() == "page_token" {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(stream)
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
// The values passed to get the first page are the inputs to the
|
|
|
|
// stream method with "None" for the page_token.
|
|
|
|
let first_params = typed_params.iter().map(|(param, _)| {
|
|
|
|
if param.name.as_str() == "page_token" {
|
|
|
|
// The page_token is None when getting the first page.
|
|
|
|
quote! { None }
|
|
|
|
} else {
|
|
|
|
// All other parameters are passed through directly.
|
|
|
|
format_ident!("{}", param.name).to_token_stream()
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
// The values passed to get subsequent pages are...
|
|
|
|
// - the state variable for the page_token
|
|
|
|
// - None for all other query parameters
|
|
|
|
// - The method inputs for non-query parameters
|
|
|
|
let step_params = typed_params.iter().map(|(param, _)| {
|
|
|
|
if param.name.as_str() == "page_token" {
|
|
|
|
quote! { state.as_deref() }
|
|
|
|
} else if let OperationParameterKind::Query(_) = param.kind {
|
|
|
|
// Query parameters are None; having page_token as Some(_)
|
|
|
|
// is mutually exclusive with other query parameters.
|
|
|
|
quote! { None }
|
|
|
|
} else {
|
|
|
|
// Non-query parameters are passed in; this is necessary
|
|
|
|
// e.g. to specify the right path. (We don't really expect
|
|
|
|
// to see a body parameter here, but we pass it through
|
|
|
|
// regardless.)
|
|
|
|
format_ident!("{}", param.name).to_token_stream()
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
// The item type that we've saved (by picking apart the original
|
|
|
|
// function's return type) will be the Item type parameter for the
|
|
|
|
// Stream type we return.
|
|
|
|
let item = self.type_space.get_type(&page_data.item).unwrap();
|
|
|
|
let item_type = item.ident();
|
|
|
|
|
|
|
|
// TODO document parameters
|
|
|
|
let doc_comment = format!(
|
|
|
|
"{}returns a Stream<Item = {}> by making successive calls to {}",
|
|
|
|
method
|
|
|
|
.doc_comment
|
|
|
|
.as_ref()
|
|
|
|
.map(|s| format!("{}\n\n", s))
|
|
|
|
.unwrap_or_else(String::new),
|
|
|
|
item.name(),
|
|
|
|
method.operation_id,
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
|
|
quote! {
|
|
|
|
#[doc = #doc_comment]
|
|
|
|
pub fn #stream_id #bounds (
|
|
|
|
&'a self,
|
|
|
|
#(#stream_params),*
|
|
|
|
) -> impl futures::Stream<Item = Result<#item_type>> + Unpin + '_ {
|
|
|
|
use futures::StreamExt;
|
|
|
|
use futures::TryFutureExt;
|
|
|
|
use futures::TryStreamExt;
|
|
|
|
|
|
|
|
// Execute the operation with the basic parameters
|
|
|
|
// (omitting page_token) to get the first page.
|
|
|
|
self.#operation_id(
|
|
|
|
#(#first_params,)*
|
|
|
|
)
|
|
|
|
.map_ok(move |page| {
|
|
|
|
// The first page is just an iter
|
|
|
|
let first = futures::stream::iter(
|
|
|
|
page.items.into_iter().map(Ok)
|
|
|
|
);
|
|
|
|
|
|
|
|
// We unfold subsequent pages using page.next_page as
|
|
|
|
// the seed value. Each iteration returns its items and
|
|
|
|
// the next page token.
|
|
|
|
let rest = futures::stream::try_unfold(
|
|
|
|
page.next_page,
|
|
|
|
move |state| async move {
|
|
|
|
if state.is_none() {
|
|
|
|
// The page_token was None so we've reached
|
|
|
|
// the end.
|
|
|
|
Ok(None)
|
|
|
|
} else {
|
|
|
|
// Get the next page; here we set all query
|
|
|
|
// parameters to None (except for the
|
|
|
|
// page_token), and all other parameters as
|
|
|
|
// specified at the start of this method.
|
|
|
|
self.#operation_id(
|
|
|
|
#(#step_params,)*
|
|
|
|
)
|
|
|
|
.map_ok(|page| {
|
|
|
|
Some((
|
|
|
|
futures::stream::iter(
|
|
|
|
page
|
|
|
|
.items
|
|
|
|
.into_iter()
|
|
|
|
.map(Ok),
|
|
|
|
),
|
|
|
|
page.next_page,
|
|
|
|
))
|
|
|
|
})
|
|
|
|
.await
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)
|
|
|
|
.try_flatten();
|
|
|
|
|
|
|
|
first.chain(rest)
|
|
|
|
})
|
|
|
|
.try_flatten_stream()
|
|
|
|
.boxed()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
let all = quote! {
|
|
|
|
#method_impl
|
|
|
|
#stream_impl
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(all)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Validates all the necessary conditions for Dropshot pagination. Returns
|
|
|
|
// the paginated item type data if all conditions are met.
|
|
|
|
fn dropshot_pagination_data(
|
|
|
|
&self,
|
|
|
|
operation: &openapiv3::Operation,
|
|
|
|
parameters: &[OperationParameter],
|
|
|
|
responses: &[OperationResponse],
|
|
|
|
) -> Option<DropshotPagination> {
|
|
|
|
if operation
|
|
|
|
.extensions
|
|
|
|
.get("x-dropshot-pagination")
|
|
|
|
.and_then(|v| v.as_bool())
|
|
|
|
!= Some(true)
|
|
|
|
{
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We expect to see at least "page_token" and "limit" parameters.
|
|
|
|
if parameters
|
|
|
|
.iter()
|
|
|
|
.filter(|param| {
|
|
|
|
matches!(
|
|
|
|
(param.name.as_str(), ¶m.kind),
|
|
|
|
("page_token", OperationParameterKind::Query(_))
|
|
|
|
| ("limit", OperationParameterKind::Query(_))
|
|
|
|
)
|
|
|
|
})
|
|
|
|
.count()
|
|
|
|
!= 2
|
|
|
|
{
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
// All query parameters must be optional since page_token may not be
|
|
|
|
// specified in conjunction with other query parameters.
|
|
|
|
if !parameters.iter().all(|param| match ¶m.kind {
|
|
|
|
OperationParameterKind::Query(required) => !required,
|
|
|
|
_ => true,
|
|
|
|
}) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
// There must be exactly one successful response type.
|
|
|
|
let mut success_response_items =
|
|
|
|
responses.iter().filter_map(|response| {
|
|
|
|
match (&response.status_code, &response.typ) {
|
|
|
|
(
|
|
|
|
StatusCode::Code(200..=299) | StatusCode::Range(2),
|
|
|
|
OperationResponseType::Type(type_id),
|
|
|
|
) => Some(type_id),
|
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
let success_response = match (
|
|
|
|
success_response_items.next(),
|
|
|
|
success_response_items.next(),
|
|
|
|
) {
|
|
|
|
(None, _) | (_, Some(_)) => return None,
|
|
|
|
(Some(success), None) => success,
|
|
|
|
};
|
|
|
|
|
|
|
|
let typ = self.type_space.get_type(success_response).ok()?;
|
|
|
|
let details = match typ.details() {
|
|
|
|
typify::TypeDetails::Struct(details) => details,
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
|
|
|
|
let properties = details.properties().collect::<HashMap<_, _>>();
|
|
|
|
|
|
|
|
// There should be exactly two properties: items and next_page
|
|
|
|
if properties.len() != 2 {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We need a next_page property that's an Option<String>.
|
|
|
|
if let typify::TypeDetails::Option(ref opt_id) = self
|
|
|
|
.type_space
|
|
|
|
.get_type(properties.get("next_page")?)
|
|
|
|
.ok()?
|
|
|
|
.details()
|
|
|
|
{
|
|
|
|
if !matches!(
|
|
|
|
self.type_space.get_type(opt_id).ok()?.details(),
|
|
|
|
typify::TypeDetails::Builtin("String")
|
|
|
|
) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
match self
|
|
|
|
.type_space
|
|
|
|
.get_type(properties.get("items")?)
|
|
|
|
.ok()?
|
|
|
|
.details()
|
|
|
|
{
|
|
|
|
typify::TypeDetails::Array(item) => {
|
|
|
|
Some(DropshotPagination { item })
|
|
|
|
}
|
|
|
|
_ => None,
|
|
|
|
}
|
2021-10-29 14:16:39 +00:00
|
|
|
}
|
|
|
|
|
2021-10-17 17:40:22 +00:00
|
|
|
pub fn generate_text(&mut self, spec: &OpenAPI) -> Result<String> {
|
|
|
|
let output = self.generate_tokens(spec)?;
|
|
|
|
|
2022-01-05 20:02:46 +00:00
|
|
|
// Format the file with rustfmt.
|
2021-10-17 17:40:22 +00:00
|
|
|
let content = rustfmt_wrapper::rustfmt(output).unwrap();
|
|
|
|
|
2022-01-05 20:02:46 +00:00
|
|
|
// Add newlines after end-braces at <= two levels of indentation.
|
2021-10-17 17:40:22 +00:00
|
|
|
Ok(if cfg!(not(windows)) {
|
2022-01-05 20:02:46 +00:00
|
|
|
let regex = regex::Regex::new(r#"(})(\n\s{0,8}[^} ])"#).unwrap();
|
2021-10-17 17:40:22 +00:00
|
|
|
regex.replace_all(&content, "$1\n$2").to_string()
|
|
|
|
} else {
|
2022-01-05 20:02:46 +00:00
|
|
|
let regex = regex::Regex::new(r#"(})(\r\n\s{0,8}[^} ])"#).unwrap();
|
2021-10-17 17:40:22 +00:00
|
|
|
regex.replace_all(&content, "$1\r\n$2").to_string()
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn dependencies(&self) -> Vec<String> {
|
|
|
|
let mut deps = vec![
|
2021-11-02 18:16:55 +00:00
|
|
|
"anyhow = \"1.0\"",
|
|
|
|
"percent-encoding = \"2.1\"",
|
|
|
|
"serde = { version = \"1.0\", features = [\"derive\"] }",
|
|
|
|
"reqwest = { version = \"0.11\", features = [\"json\", \"stream\"] }",
|
2021-10-17 17:40:22 +00:00
|
|
|
];
|
|
|
|
if self.type_space.uses_uuid() {
|
|
|
|
deps.push(
|
2021-11-02 18:16:55 +00:00
|
|
|
"uuid = { version = \"0.8\", features = [\"serde\", \"v4\"] }",
|
2021-10-17 17:40:22 +00:00
|
|
|
)
|
|
|
|
}
|
|
|
|
if self.type_space.uses_chrono() {
|
2021-11-02 18:16:55 +00:00
|
|
|
deps.push("chrono = { version = \"0.4\", features = [\"serde\"] }")
|
|
|
|
}
|
2022-01-05 20:02:46 +00:00
|
|
|
if self.uses_futures {
|
|
|
|
deps.push("futures = \"0.3\"")
|
|
|
|
}
|
2021-11-02 18:16:55 +00:00
|
|
|
if self.type_space.uses_serde_json() {
|
|
|
|
deps.push("serde_json = \"1.0\"")
|
2021-10-17 17:40:22 +00:00
|
|
|
}
|
|
|
|
deps.sort_unstable();
|
|
|
|
deps.iter().map(ToString::to_string).collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_type_space(&self) -> &TypeSpace {
|
|
|
|
&self.type_space
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-29 14:16:39 +00:00
|
|
|
/// Make the schema optional if it isn't already.
|
|
|
|
pub fn make_optional(
|
|
|
|
schema: schemars::schema::Schema,
|
|
|
|
) -> schemars::schema::Schema {
|
|
|
|
match &schema {
|
|
|
|
// If the instance_type already includes Null then this is already
|
|
|
|
// optional.
|
|
|
|
schemars::schema::Schema::Object(schemars::schema::SchemaObject {
|
|
|
|
instance_type: Some(schemars::schema::SingleOrVec::Vec(types)),
|
|
|
|
..
|
|
|
|
}) if types.contains(&schemars::schema::InstanceType::Null) => schema,
|
|
|
|
|
|
|
|
// Otherwise, create a oneOf where one of the branches is the null
|
|
|
|
// type. We could potentially check to see if the schema already
|
|
|
|
// conforms to this pattern as well, but it doesn't hurt as typify will
|
|
|
|
// already reduce nested Options to a single Option.
|
|
|
|
_ => {
|
|
|
|
let null_schema = schemars::schema::Schema::Object(
|
|
|
|
schemars::schema::SchemaObject {
|
|
|
|
instance_type: Some(schemars::schema::SingleOrVec::Single(
|
|
|
|
Box::new(schemars::schema::InstanceType::Null),
|
|
|
|
)),
|
|
|
|
..Default::default()
|
|
|
|
},
|
|
|
|
);
|
|
|
|
schemars::schema::Schema::Object(schemars::schema::SchemaObject {
|
|
|
|
subschemas: Some(Box::new(
|
|
|
|
schemars::schema::SubschemaValidation {
|
|
|
|
one_of: Some(vec![schema, null_schema]),
|
|
|
|
..Default::default()
|
|
|
|
},
|
|
|
|
)),
|
|
|
|
..Default::default()
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-17 17:40:22 +00:00
|
|
|
trait ParameterDataExt {
|
|
|
|
fn schema(&self) -> Result<&openapiv3::ReferenceOr<openapiv3::Schema>>;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ParameterDataExt for openapiv3::ParameterData {
|
|
|
|
fn schema(&self) -> Result<&openapiv3::ReferenceOr<openapiv3::Schema>> {
|
|
|
|
match &self.format {
|
|
|
|
openapiv3::ParameterSchemaOrContent::Schema(s) => Ok(s),
|
|
|
|
x => {
|
|
|
|
Err(Error::BadConversion(format!("XXX param format {:#?}", x)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
trait ExtractJsonMediaType {
|
2021-10-29 14:16:39 +00:00
|
|
|
fn is_binary(&self, components: &Option<Components>) -> Result<bool>;
|
2021-10-17 17:40:22 +00:00
|
|
|
fn content_json(&self) -> Result<openapiv3::MediaType>;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ExtractJsonMediaType for openapiv3::Response {
|
|
|
|
fn content_json(&self) -> Result<openapiv3::MediaType> {
|
|
|
|
if self.content.len() != 1 {
|
|
|
|
todo!("expected one content entry, found {}", self.content.len());
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(mt) = self.content.get("application/json") {
|
|
|
|
Ok(mt.clone())
|
|
|
|
} else {
|
|
|
|
todo!(
|
|
|
|
"could not find application/json, only found {}",
|
|
|
|
self.content.keys().next().unwrap()
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-29 14:16:39 +00:00
|
|
|
fn is_binary(&self, _components: &Option<Components>) -> Result<bool> {
|
2021-10-17 17:40:22 +00:00
|
|
|
if self.content.is_empty() {
|
|
|
|
/*
|
|
|
|
* XXX If there are no content types, I guess it is not binary?
|
|
|
|
*/
|
|
|
|
return Ok(false);
|
|
|
|
}
|
|
|
|
|
|
|
|
if self.content.len() != 1 {
|
|
|
|
todo!("expected one content entry, found {}", self.content.len());
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(mt) = self.content.get("application/octet-stream") {
|
|
|
|
if !mt.encoding.is_empty() {
|
|
|
|
todo!("XXX encoding");
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(s) = &mt.schema {
|
|
|
|
use openapiv3::{
|
|
|
|
SchemaKind, StringFormat, Type,
|
|
|
|
VariantOrUnknownOrEmpty::Item,
|
|
|
|
};
|
|
|
|
|
2021-10-29 14:16:39 +00:00
|
|
|
let s = s.item(&None)?;
|
2021-10-17 17:40:22 +00:00
|
|
|
if s.schema_data.nullable {
|
|
|
|
todo!("XXX nullable binary?");
|
|
|
|
}
|
|
|
|
if s.schema_data.default.is_some() {
|
|
|
|
todo!("XXX default binary?");
|
|
|
|
}
|
|
|
|
if s.schema_data.discriminator.is_some() {
|
|
|
|
todo!("XXX binary discriminator?");
|
|
|
|
}
|
|
|
|
match &s.schema_kind {
|
|
|
|
SchemaKind::Type(Type::String(st)) => {
|
|
|
|
if st.min_length.is_some() || st.max_length.is_some() {
|
|
|
|
todo!("binary min/max length");
|
|
|
|
}
|
|
|
|
if !matches!(st.format, Item(StringFormat::Binary)) {
|
|
|
|
todo!(
|
|
|
|
"expected binary format string, got {:?}",
|
|
|
|
st.format
|
|
|
|
);
|
|
|
|
}
|
|
|
|
if st.pattern.is_some() {
|
|
|
|
todo!("XXX pattern");
|
|
|
|
}
|
|
|
|
if !st.enumeration.is_empty() {
|
|
|
|
todo!("XXX enumeration");
|
|
|
|
}
|
|
|
|
return Ok(true);
|
|
|
|
}
|
|
|
|
x => {
|
|
|
|
todo!("XXX schemakind type {:?}", x);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
todo!("binary thing had no schema?");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(false)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ExtractJsonMediaType for openapiv3::RequestBody {
|
|
|
|
fn content_json(&self) -> Result<openapiv3::MediaType> {
|
|
|
|
if self.content.len() != 1 {
|
|
|
|
todo!("expected one content entry, found {}", self.content.len());
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(mt) = self.content.get("application/json") {
|
|
|
|
Ok(mt.clone())
|
|
|
|
} else {
|
|
|
|
todo!(
|
|
|
|
"could not find application/json, only found {}",
|
|
|
|
self.content.keys().next().unwrap()
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-29 14:16:39 +00:00
|
|
|
fn is_binary(&self, components: &Option<Components>) -> Result<bool> {
|
2021-10-17 17:40:22 +00:00
|
|
|
if self.content.is_empty() {
|
|
|
|
/*
|
|
|
|
* XXX If there are no content types, I guess it is not binary?
|
|
|
|
*/
|
|
|
|
return Ok(false);
|
|
|
|
}
|
|
|
|
|
|
|
|
if self.content.len() != 1 {
|
|
|
|
todo!("expected one content entry, found {}", self.content.len());
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(mt) = self.content.get("application/octet-stream") {
|
|
|
|
if !mt.encoding.is_empty() {
|
|
|
|
todo!("XXX encoding");
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(s) = &mt.schema {
|
|
|
|
use openapiv3::{
|
|
|
|
SchemaKind, StringFormat, Type,
|
|
|
|
VariantOrUnknownOrEmpty::Item,
|
|
|
|
};
|
|
|
|
|
2021-10-29 14:16:39 +00:00
|
|
|
let s = s.item(components)?;
|
2021-10-17 17:40:22 +00:00
|
|
|
if s.schema_data.nullable {
|
|
|
|
todo!("XXX nullable binary?");
|
|
|
|
}
|
|
|
|
if s.schema_data.default.is_some() {
|
|
|
|
todo!("XXX default binary?");
|
|
|
|
}
|
|
|
|
if s.schema_data.discriminator.is_some() {
|
|
|
|
todo!("XXX binary discriminator?");
|
|
|
|
}
|
|
|
|
match &s.schema_kind {
|
|
|
|
SchemaKind::Type(Type::String(st)) => {
|
|
|
|
if st.min_length.is_some() || st.max_length.is_some() {
|
|
|
|
todo!("binary min/max length");
|
|
|
|
}
|
|
|
|
if !matches!(st.format, Item(StringFormat::Binary)) {
|
|
|
|
todo!(
|
|
|
|
"expected binary format string, got {:?}",
|
|
|
|
st.format
|
|
|
|
);
|
|
|
|
}
|
|
|
|
if st.pattern.is_some() {
|
|
|
|
todo!("XXX pattern");
|
|
|
|
}
|
|
|
|
if !st.enumeration.is_empty() {
|
|
|
|
todo!("XXX enumeration");
|
|
|
|
}
|
|
|
|
return Ok(true);
|
|
|
|
}
|
|
|
|
x => {
|
|
|
|
todo!("XXX schemakind type {:?}", x);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
todo!("binary thing had no schema?");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(false)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-29 14:16:39 +00:00
|
|
|
trait ReferenceOrExt<T: ComponentLookup> {
|
|
|
|
fn item<'a>(&'a self, components: &'a Option<Components>) -> Result<&'a T>;
|
|
|
|
}
|
|
|
|
trait ComponentLookup: Sized {
|
|
|
|
fn get_components(
|
|
|
|
components: &Components,
|
|
|
|
) -> &IndexMap<String, ReferenceOr<Self>>;
|
2021-10-17 17:40:22 +00:00
|
|
|
}
|
|
|
|
|
2021-10-29 14:16:39 +00:00
|
|
|
impl<T: ComponentLookup> ReferenceOrExt<T> for openapiv3::ReferenceOr<T> {
|
|
|
|
fn item<'a>(&'a self, components: &'a Option<Components>) -> Result<&'a T> {
|
2021-10-17 17:40:22 +00:00
|
|
|
match self {
|
|
|
|
ReferenceOr::Item(item) => Ok(item),
|
2021-10-29 14:16:39 +00:00
|
|
|
ReferenceOr::Reference { reference } => {
|
|
|
|
let idx = reference.rfind('/').unwrap();
|
|
|
|
let key = &reference[idx + 1..];
|
|
|
|
let parameters =
|
|
|
|
T::get_components(components.as_ref().unwrap());
|
|
|
|
parameters.get(key).unwrap().item(components)
|
|
|
|
}
|
2021-10-17 17:40:22 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-10-29 14:16:39 +00:00
|
|
|
|
|
|
|
impl ComponentLookup for Parameter {
|
|
|
|
fn get_components(
|
|
|
|
components: &Components,
|
|
|
|
) -> &IndexMap<String, ReferenceOr<Self>> {
|
|
|
|
&components.parameters
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ComponentLookup for RequestBody {
|
|
|
|
fn get_components(
|
|
|
|
components: &Components,
|
|
|
|
) -> &IndexMap<String, ReferenceOr<Self>> {
|
|
|
|
&components.request_bodies
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ComponentLookup for Response {
|
|
|
|
fn get_components(
|
|
|
|
components: &Components,
|
|
|
|
) -> &IndexMap<String, ReferenceOr<Self>> {
|
|
|
|
&components.responses
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ComponentLookup for Schema {
|
|
|
|
fn get_components(
|
|
|
|
components: &Components,
|
|
|
|
) -> &IndexMap<String, ReferenceOr<Self>> {
|
|
|
|
&components.schemas
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn sanitize(input: &str, case: Case) -> String {
|
|
|
|
input.replace('/', "-").to_case(case)
|
|
|
|
}
|