generate iterators over dropshot paginated interfaces (#19)

* Adds support for interating with Streams over interfaces that are tagged
with the x-dropshot-pagination extension. This requires clients to use
the futures crate.

Adds tests that compile generated clients.

Updates nexus.json to reflect a more recent omicron API.

Changes all generated methods to have a lifetime ('a) bound on all
references. This isn't necessary for most methods, but greatly
simplifies generation of the associated paginated interface.
This commit is contained in:
Adam Leventhal 2022-01-05 12:02:46 -08:00 committed by GitHub
parent e58ebd18fa
commit e47fc93748
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 2314 additions and 608 deletions

85
Cargo.lock generated
View File

@ -185,43 +185,92 @@ dependencies = [
] ]
[[package]] [[package]]
name = "futures-channel" name = "futures"
version = "0.3.17" version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5da6ba8c3bb3c165d3c7319fc1cc8304facf1fb8db99c5de877183c08a273888" checksum = "28560757fe2bb34e79f907794bb6b22ae8b0e5c669b638a1132f2592b19035b4"
dependencies = [
"futures-channel",
"futures-core",
"futures-executor",
"futures-io",
"futures-sink",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-channel"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba3dda0b6588335f360afc675d0564c17a77a2bda81ca178a4b6081bd86c7f0b"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-sink",
] ]
[[package]] [[package]]
name = "futures-core" name = "futures-core"
version = "0.3.17" version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88d1c26957f23603395cd326b0ffe64124b818f4449552f960d815cfba83a53d" checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7"
[[package]]
name = "futures-executor"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29d6d2ff5bb10fb95c85b8ce46538a2e5f5e7fdc755623a7d4529ab8a4ed9d2a"
dependencies = [
"futures-core",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-io"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1f9d34af5a1aac6fb380f735fe510746c38067c5bf16c7fd250280503c971b2"
[[package]]
name = "futures-macro"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbd947adfffb0efc70599b3ddcf7b5597bb5fa9e245eb99f62b3a5f7bb8bd3c"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "futures-sink" name = "futures-sink"
version = "0.3.17" version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36ea153c13024fe480590b3e3d4cad89a0cfacecc24577b68f86c6ced9c2bc11" checksum = "e3055baccb68d74ff6480350f8d6eb8fcfa3aa11bdc1a1ae3afdd0514617d508"
[[package]] [[package]]
name = "futures-task" name = "futures-task"
version = "0.3.17" version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d3d00f4eddb73e498a54394f228cd55853bdf059259e8e7bc6e69d408892e99" checksum = "6ee7c6485c30167ce4dfb83ac568a849fe53274c831081476ee13e0dce1aad72"
[[package]] [[package]]
name = "futures-util" name = "futures-util"
version = "0.3.17" version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36568465210a3a6ee45e1f165136d68671471a501e632e9a98d96872222b5481" checksum = "d9b5cf40b47a271f77a8b1bec03ca09044d99d2372c0de244e66430761127164"
dependencies = [ dependencies = [
"autocfg", "futures-channel",
"futures-core", "futures-core",
"futures-io",
"futures-macro",
"futures-sink",
"futures-task", "futures-task",
"memchr",
"pin-project-lite", "pin-project-lite",
"pin-utils", "pin-utils",
"slab",
] ]
[[package]] [[package]]
@ -608,12 +657,17 @@ name = "progenitor"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"chrono",
"futures",
"getopts", "getopts",
"openapiv3", "openapiv3",
"percent-encoding",
"progenitor-impl", "progenitor-impl",
"progenitor-macro", "progenitor-macro",
"reqwest",
"serde", "serde",
"serde_json", "serde_json",
"uuid",
] ]
[[package]] [[package]]
@ -1104,7 +1158,7 @@ checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642"
[[package]] [[package]]
name = "typify" name = "typify"
version = "0.0.6-dev" version = "0.0.6-dev"
source = "git+https://github.com/oxidecomputer/typify#58bfcd02a2cd74bff047e9e8ad6e4f2b4f84f3af" source = "git+https://github.com/oxidecomputer/typify#df983c2981fc055efeba3fc360e724221703d4bd"
dependencies = [ dependencies = [
"typify-impl", "typify-impl",
"typify-macro", "typify-macro",
@ -1113,9 +1167,10 @@ dependencies = [
[[package]] [[package]]
name = "typify-impl" name = "typify-impl"
version = "0.0.6-dev" version = "0.0.6-dev"
source = "git+https://github.com/oxidecomputer/typify#58bfcd02a2cd74bff047e9e8ad6e4f2b4f84f3af" source = "git+https://github.com/oxidecomputer/typify#df983c2981fc055efeba3fc360e724221703d4bd"
dependencies = [ dependencies = [
"convert_case", "convert_case",
"log",
"proc-macro2", "proc-macro2",
"quote", "quote",
"rustfmt-wrapper", "rustfmt-wrapper",
@ -1128,7 +1183,7 @@ dependencies = [
[[package]] [[package]]
name = "typify-macro" name = "typify-macro"
version = "0.0.6-dev" version = "0.0.6-dev"
source = "git+https://github.com/oxidecomputer/typify#58bfcd02a2cd74bff047e9e8ad6e4f2b4f84f3af" source = "git+https://github.com/oxidecomputer/typify#df983c2981fc055efeba3fc360e724221703d4bd"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",

View File

@ -1,6 +1,6 @@
// Copyright 2021 Oxide Computer Company // Copyright 2021 Oxide Computer Company
use std::cmp::Ordering; use std::{cmp::Ordering, collections::HashMap};
use convert_case::{Case, Casing}; use convert_case::{Case, Casing};
use indexmap::IndexMap; use indexmap::IndexMap;
@ -10,10 +10,10 @@ use openapiv3::{
}; };
use proc_macro2::TokenStream; use proc_macro2::TokenStream;
use quote::{format_ident, quote}; use quote::{format_ident, quote, ToTokens};
use template::PathTemplate; use template::PathTemplate;
use thiserror::Error; use thiserror::Error;
use typify::TypeSpace; use typify::{TypeId, TypeSpace};
use crate::to_schema::ToSchema; use crate::to_schema::ToSchema;
@ -42,6 +42,7 @@ pub struct Generator {
inner_type: Option<TokenStream>, inner_type: Option<TokenStream>,
pre_hook: Option<TokenStream>, pre_hook: Option<TokenStream>,
post_hook: Option<TokenStream>, post_hook: Option<TokenStream>,
uses_futures: bool,
} }
struct OperationMethod { struct OperationMethod {
@ -51,6 +52,11 @@ struct OperationMethod {
doc_comment: Option<String>, doc_comment: Option<String>,
params: Vec<OperationParameter>, params: Vec<OperationParameter>,
responses: Vec<OperationResponse>, responses: Vec<OperationResponse>,
dropshot_paginated: Option<DropshotPagination>,
}
struct DropshotPagination {
item: TypeId,
} }
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
@ -66,7 +72,7 @@ struct OperationParameter {
} }
enum OperationParameterType { enum OperationParameterType {
TokenStream(TokenStream), Type(TypeId),
RawBody, RawBody,
} }
#[derive(Debug)] #[derive(Debug)]
@ -77,7 +83,7 @@ struct OperationResponse {
#[derive(Debug)] #[derive(Debug)]
enum OperationResponseType { enum OperationResponseType {
TokenStream(TokenStream), Type(TypeId),
None, None,
Raw, Raw,
} }
@ -271,12 +277,11 @@ impl Generator {
); );
let typ = self let typ = self
.type_space .type_space
.add_type_with_name(&schema, Some(name))? .add_type_with_name(&schema, Some(name))?;
.parameter_ident();
Ok(OperationParameter { Ok(OperationParameter {
name: sanitize(&parameter_data.name, Case::Snake), name: sanitize(&parameter_data.name, Case::Snake),
typ: OperationParameterType::TokenStream(typ), typ: OperationParameterType::Type(typ),
kind: OperationParameterKind::Path, kind: OperationParameterKind::Path,
}) })
} }
@ -307,13 +312,12 @@ impl Generator {
let typ = self let typ = self
.type_space .type_space
.add_type_with_name(&schema, Some(name))? .add_type_with_name(&schema, Some(name))?;
.parameter_ident();
query.push((nam, !parameter_data.required)); query.push((nam, !parameter_data.required));
Ok(OperationParameter { Ok(OperationParameter {
name: sanitize(&parameter_data.name, Case::Snake), name: sanitize(&parameter_data.name, Case::Snake),
typ: OperationParameterType::TokenStream(typ), typ: OperationParameterType::Type(typ),
kind: OperationParameterKind::Query( kind: OperationParameterKind::Query(
parameter_data.required, parameter_data.required,
), ),
@ -344,9 +348,8 @@ impl Generator {
); );
let typ = self let typ = self
.type_space .type_space
.add_type_with_name(&schema, Some(name))? .add_type_with_name(&schema, Some(name))?;
.parameter_ident(); OperationParameterType::Type(typ)
OperationParameterType::TokenStream(typ)
} else { } else {
todo!("media type encoding, no schema: {:#?}", mt); todo!("media type encoding, no schema: {:#?}", mt);
} }
@ -450,12 +453,11 @@ impl Generator {
); );
self.type_space self.type_space
.add_type_with_name(&schema, Some(name))? .add_type_with_name(&schema, Some(name))?
.ident()
} else { } else {
todo!("media type encoding, no schema: {:#?}", mt); todo!("media type encoding, no schema: {:#?}", mt);
}; };
OperationResponseType::TokenStream(typ) OperationResponseType::Type(typ)
} else if response.content.first().is_some() { } else if response.content.first().is_some() {
OperationResponseType::Raw OperationResponseType::Raw
} else { } else {
@ -477,7 +479,9 @@ impl Generator {
.collect::<Result<Vec<_>>>()?; .collect::<Result<Vec<_>>>()?;
// If the API has declined to specify the characteristics of a // If the API has declined to specify the characteristics of a
// successful response, we cons up a generic one. // successful response, we cons up a generic one. Note that this is
// technically permissible within OpenAPI, but advised against in the
// spec.
if !success { if !success {
responses.push(OperationResponse { responses.push(OperationResponse {
status_code: StatusCode::Range(2), status_code: StatusCode::Range(2),
@ -485,6 +489,9 @@ impl Generator {
}); });
} }
let dropshot_paginated =
self.dropshot_pagination_data(operation, &raw_params, &responses);
Ok(OperationMethod { Ok(OperationMethod {
operation_id: sanitize(operation_id, Case::Snake), operation_id: sanitize(operation_id, Case::Snake),
method: method.to_string(), method: method.to_string(),
@ -492,36 +499,44 @@ impl Generator {
doc_comment: operation.description.clone(), doc_comment: operation.description.clone(),
params: raw_params, params: raw_params,
responses, responses,
dropshot_paginated,
}) })
} }
fn process_method(&self, method: &OperationMethod) -> Result<TokenStream> { fn process_method(
let operation_id = format_ident!("{}", method.operation_id,); &mut self,
method: &OperationMethod,
) -> Result<TokenStream> {
let operation_id = format_ident!("{}", method.operation_id);
let mut bounds_items: Vec<TokenStream> = Vec::new(); let mut bounds_items: Vec<TokenStream> = Vec::new();
let params = method let typed_params = method
.params .params
.iter() .iter()
.map(|param| { .map(|param| {
let name = format_ident!("{}", param.name); let name = format_ident!("{}", param.name);
let typ = match &param.typ { let typ = match &param.typ {
OperationParameterType::TokenStream(t) => t.clone(), OperationParameterType::Type(type_id) => self
.type_space
.get_type(type_id)
.unwrap()
.parameter_ident_with_lifetime("a"),
OperationParameterType::RawBody => { OperationParameterType::RawBody => {
bounds_items.push(quote! { B: Into<reqwest::Body>}); bounds_items.push(quote! { B: Into<reqwest::Body> });
quote! {B} quote! { B }
} }
}; };
quote! { (
#name: #typ param,
} quote! {
#name: #typ
},
)
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let bounds = if bounds_items.is_empty() {
quote! {} let params = typed_params.iter().map(|(_, stream)| stream);
} else {
quote! { let bounds = quote! { < 'a, #(#bounds_items),* > };
< #(#bounds_items),* >
}
};
let query_items = method let query_items = method
.params .params
@ -564,11 +579,11 @@ impl Generator {
let body_func = let body_func =
method.params.iter().filter_map(|param| match &param.kind { method.params.iter().filter_map(|param| match &param.kind {
OperationParameterKind::Body => match &param.typ { OperationParameterKind::Body => match &param.typ {
OperationParameterType::TokenStream(_) => { OperationParameterType::Type(_) => {
Some(quote! { .json(body) }) Some(quote! { .json(body) })
} }
OperationParameterType::RawBody => { OperationParameterType::RawBody => {
Some(quote! { .body(body )}) Some(quote! { .body(body) })
} }
}, },
_ => None, _ => None,
@ -589,12 +604,12 @@ impl Generator {
let (response_type, decode_response) = success_response_items let (response_type, decode_response) = success_response_items
.next() .next()
.map(|response| match &response.typ { .map(|response| match &response.typ {
OperationResponseType::TokenStream(typ) => { OperationResponseType::Type(type_id) => (
(typ.clone(), quote! {res.json().await?}) self.type_space.get_type(type_id).unwrap().ident(),
} quote! { res.json().await? },
),
OperationResponseType::None => { OperationResponseType::None => {
// TODO this doesn't seem quite right; I think we still want to return the raw response structure here. (quote! { reqwest::Response }, quote! { res })
(quote! { () }, quote! { () })
} }
OperationResponseType::Raw => { OperationResponseType::Raw => {
(quote! { reqwest::Response }, quote! { res }) (quote! { reqwest::Response }, quote! { res })
@ -632,7 +647,7 @@ impl Generator {
let method_impl = quote! { let method_impl = quote! {
#[doc = #doc_comment] #[doc = #doc_comment]
pub async fn #operation_id #bounds ( pub async fn #operation_id #bounds (
&self, &'a self,
#(#params),* #(#params),*
) -> Result<#response_type> { ) -> Result<#response_type> {
#url_path #url_path
@ -655,20 +670,264 @@ impl Generator {
Ok(#decode_response) Ok(#decode_response)
} }
}; };
Ok(method_impl)
let stream_impl = method.dropshot_paginated.as_ref().map(|page_data| {
// We're now using futures.
self.uses_futures = true;
let stream_id = format_ident!("{}_stream", method.operation_id);
// The parameters are the same as those to the paged method, but
// without "page_token"
let stream_params =
typed_params.iter().filter_map(|(param, stream)| {
if param.name.as_str() == "page_token" {
None
} else {
Some(stream)
}
});
// The values passed to get the first page are the inputs to the
// stream method with "None" for the page_token.
let first_params = typed_params.iter().map(|(param, _)| {
if param.name.as_str() == "page_token" {
// The page_token is None when getting the first page.
quote! { None }
} else {
// All other parameters are passed through directly.
format_ident!("{}", param.name).to_token_stream()
}
});
// The values passed to get subsequent pages are...
// - the state variable for the page_token
// - None for all other query parameters
// - The method inputs for non-query parameters
let step_params = typed_params.iter().map(|(param, _)| {
if param.name.as_str() == "page_token" {
quote! { state.as_deref() }
} else if let OperationParameterKind::Query(_) = param.kind {
// Query parameters are None; having page_token as Some(_)
// is mutually exclusive with other query parameters.
quote! { None }
} else {
// Non-query parameters are passed in; this is necessary
// e.g. to specify the right path. (We don't really expect
// to see a body parameter here, but we pass it through
// regardless.)
format_ident!("{}", param.name).to_token_stream()
}
});
// The item type that we've saved (by picking apart the original
// function's return type) will be the Item type parameter for the
// Stream type we return.
let item = self.type_space.get_type(&page_data.item).unwrap();
let item_type = item.ident();
// TODO document parameters
let doc_comment = format!(
"{}returns a Stream<Item = {}> by making successive calls to {}",
method
.doc_comment
.as_ref()
.map(|s| format!("{}\n\n", s))
.unwrap_or_else(String::new),
item.name(),
method.operation_id,
);
quote! {
#[doc = #doc_comment]
pub fn #stream_id #bounds (
&'a self,
#(#stream_params),*
) -> impl futures::Stream<Item = Result<#item_type>> + Unpin + '_ {
use futures::StreamExt;
use futures::TryFutureExt;
use futures::TryStreamExt;
// Execute the operation with the basic parameters
// (omitting page_token) to get the first page.
self.#operation_id(
#(#first_params,)*
)
.map_ok(move |page| {
// The first page is just an iter
let first = futures::stream::iter(
page.items.into_iter().map(Ok)
);
// We unfold subsequent pages using page.next_page as
// the seed value. Each iteration returns its items and
// the next page token.
let rest = futures::stream::try_unfold(
page.next_page,
move |state| async move {
if state.is_none() {
// The page_token was None so we've reached
// the end.
Ok(None)
} else {
// Get the next page; here we set all query
// parameters to None (except for the
// page_token), and all other parameters as
// specified at the start of this method.
self.#operation_id(
#(#step_params,)*
)
.map_ok(|page| {
Some((
futures::stream::iter(
page
.items
.into_iter()
.map(Ok),
),
page.next_page,
))
})
.await
}
},
)
.try_flatten();
first.chain(rest)
})
.try_flatten_stream()
.boxed()
}
}
});
let all = quote! {
#method_impl
#stream_impl
};
Ok(all)
}
// Validates all the necessary conditions for Dropshot pagination. Returns
// the paginated item type data if all conditions are met.
fn dropshot_pagination_data(
&self,
operation: &openapiv3::Operation,
parameters: &[OperationParameter],
responses: &[OperationResponse],
) -> Option<DropshotPagination> {
if operation
.extensions
.get("x-dropshot-pagination")
.and_then(|v| v.as_bool())
!= Some(true)
{
return None;
}
// We expect to see at least "page_token" and "limit" parameters.
if parameters
.iter()
.filter(|param| {
matches!(
(param.name.as_str(), &param.kind),
("page_token", OperationParameterKind::Query(_))
| ("limit", OperationParameterKind::Query(_))
)
})
.count()
!= 2
{
return None;
}
// All query parameters must be optional since page_token may not be
// specified in conjunction with other query parameters.
if !parameters.iter().all(|param| match &param.kind {
OperationParameterKind::Query(required) => !required,
_ => true,
}) {
return None;
}
// There must be exactly one successful response type.
let mut success_response_items =
responses.iter().filter_map(|response| {
match (&response.status_code, &response.typ) {
(
StatusCode::Code(200..=299) | StatusCode::Range(2),
OperationResponseType::Type(type_id),
) => Some(type_id),
_ => None,
}
});
let success_response = match (
success_response_items.next(),
success_response_items.next(),
) {
(None, _) | (_, Some(_)) => return None,
(Some(success), None) => success,
};
let typ = self.type_space.get_type(success_response).ok()?;
let details = match typ.details() {
typify::TypeDetails::Struct(details) => details,
_ => return None,
};
let properties = details.properties().collect::<HashMap<_, _>>();
// There should be exactly two properties: items and next_page
if properties.len() != 2 {
return None;
}
// We need a next_page property that's an Option<String>.
if let typify::TypeDetails::Option(ref opt_id) = self
.type_space
.get_type(properties.get("next_page")?)
.ok()?
.details()
{
if !matches!(
self.type_space.get_type(opt_id).ok()?.details(),
typify::TypeDetails::Builtin("String")
) {
return None;
}
} else {
return None;
}
match self
.type_space
.get_type(properties.get("items")?)
.ok()?
.details()
{
typify::TypeDetails::Array(item) => {
Some(DropshotPagination { item })
}
_ => None,
}
} }
pub fn generate_text(&mut self, spec: &OpenAPI) -> Result<String> { pub fn generate_text(&mut self, spec: &OpenAPI) -> Result<String> {
let output = self.generate_tokens(spec)?; let output = self.generate_tokens(spec)?;
// Format the file with rustfmt and some whitespace niceties. // Format the file with rustfmt.
let content = rustfmt_wrapper::rustfmt(output).unwrap(); let content = rustfmt_wrapper::rustfmt(output).unwrap();
// Add newlines after end-braces at <= two levels of indentation.
Ok(if cfg!(not(windows)) { Ok(if cfg!(not(windows)) {
let regex = regex::Regex::new(r#"(})(\n\s*[^} ])"#).unwrap(); let regex = regex::Regex::new(r#"(})(\n\s{0,8}[^} ])"#).unwrap();
regex.replace_all(&content, "$1\n$2").to_string() regex.replace_all(&content, "$1\n$2").to_string()
} else { } else {
let regex = regex::Regex::new(r#"(})(\r\n\s*[^} ])"#).unwrap(); let regex = regex::Regex::new(r#"(})(\r\n\s{0,8}[^} ])"#).unwrap();
regex.replace_all(&content, "$1\r\n$2").to_string() regex.replace_all(&content, "$1\r\n$2").to_string()
}) })
} }
@ -688,6 +947,9 @@ impl Generator {
if self.type_space.uses_chrono() { if self.type_space.uses_chrono() {
deps.push("chrono = { version = \"0.4\", features = [\"serde\"] }") deps.push("chrono = { version = \"0.4\", features = [\"serde\"] }")
} }
if self.uses_futures {
deps.push("futures = \"0.3\"")
}
if self.type_space.uses_serde_json() { if self.type_space.uses_serde_json() {
deps.push("serde_json = \"1.0\"") deps.push("serde_json = \"1.0\"")
} }

View File

@ -182,7 +182,7 @@ impl Client {
} }
#[doc = "control_hold: POST /v1/control/hold"] #[doc = "control_hold: POST /v1/control/hold"]
pub async fn control_hold(&self) -> Result<()> { pub async fn control_hold<'a>(&'a self) -> Result<()> {
let url = format!("{}/v1/control/hold", self.baseurl,); let url = format!("{}/v1/control/hold", self.baseurl,);
let request = self.client.post(url).build()?; let request = self.client.post(url).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
@ -191,16 +191,16 @@ impl Client {
} }
#[doc = "control_resume: POST /v1/control/resume"] #[doc = "control_resume: POST /v1/control/resume"]
pub async fn control_resume(&self) -> Result<()> { pub async fn control_resume<'a>(&'a self) -> Result<reqwest::Response> {
let url = format!("{}/v1/control/resume", self.baseurl,); let url = format!("{}/v1/control/resume", self.baseurl,);
let request = self.client.post(url).build()?; let request = self.client.post(url).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
let res = result?.error_for_status()?; let res = result?.error_for_status()?;
Ok(()) Ok(res)
} }
#[doc = "task_get: GET /v1/task/{task}"] #[doc = "task_get: GET /v1/task/{task}"]
pub async fn task_get(&self, task: &str) -> Result<types::Task> { pub async fn task_get<'a>(&'a self, task: &'a str) -> Result<types::Task> {
let url = format!( let url = format!(
"{}/v1/task/{}", "{}/v1/task/{}",
self.baseurl, self.baseurl,
@ -213,7 +213,7 @@ impl Client {
} }
#[doc = "tasks_get: GET /v1/tasks"] #[doc = "tasks_get: GET /v1/tasks"]
pub async fn tasks_get(&self) -> Result<Vec<types::Task>> { pub async fn tasks_get<'a>(&'a self) -> Result<Vec<types::Task>> {
let url = format!("{}/v1/tasks", self.baseurl,); let url = format!("{}/v1/tasks", self.baseurl,);
let request = self.client.get(url).build()?; let request = self.client.get(url).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
@ -222,7 +222,10 @@ impl Client {
} }
#[doc = "task_submit: POST /v1/tasks"] #[doc = "task_submit: POST /v1/tasks"]
pub async fn task_submit(&self, body: &types::TaskSubmit) -> Result<types::TaskSubmitResult> { pub async fn task_submit<'a>(
&'a self,
body: &'a types::TaskSubmit,
) -> Result<types::TaskSubmitResult> {
let url = format!("{}/v1/tasks", self.baseurl,); let url = format!("{}/v1/tasks", self.baseurl,);
let request = self.client.post(url).json(body).build()?; let request = self.client.post(url).json(body).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
@ -231,9 +234,9 @@ impl Client {
} }
#[doc = "task_events_get: GET /v1/tasks/{task}/events"] #[doc = "task_events_get: GET /v1/tasks/{task}/events"]
pub async fn task_events_get( pub async fn task_events_get<'a>(
&self, &'a self,
task: &str, task: &'a str,
minseq: Option<u32>, minseq: Option<u32>,
) -> Result<Vec<types::TaskEvent>> { ) -> Result<Vec<types::TaskEvent>> {
let url = format!( let url = format!(
@ -253,7 +256,7 @@ impl Client {
} }
#[doc = "task_outputs_get: GET /v1/tasks/{task}/outputs"] #[doc = "task_outputs_get: GET /v1/tasks/{task}/outputs"]
pub async fn task_outputs_get(&self, task: &str) -> Result<Vec<types::TaskOutput>> { pub async fn task_outputs_get<'a>(&'a self, task: &'a str) -> Result<Vec<types::TaskOutput>> {
let url = format!( let url = format!(
"{}/v1/tasks/{}/outputs", "{}/v1/tasks/{}/outputs",
self.baseurl, self.baseurl,
@ -266,10 +269,10 @@ impl Client {
} }
#[doc = "task_output_download: GET /v1/tasks/{task}/outputs/{output}"] #[doc = "task_output_download: GET /v1/tasks/{task}/outputs/{output}"]
pub async fn task_output_download( pub async fn task_output_download<'a>(
&self, &'a self,
task: &str, task: &'a str,
output: &str, output: &'a str,
) -> Result<reqwest::Response> { ) -> Result<reqwest::Response> {
let url = format!( let url = format!(
"{}/v1/tasks/{}/outputs/{}", "{}/v1/tasks/{}/outputs/{}",
@ -284,7 +287,10 @@ impl Client {
} }
#[doc = "user_create: POST /v1/users"] #[doc = "user_create: POST /v1/users"]
pub async fn user_create(&self, body: &types::UserCreate) -> Result<types::UserCreateResult> { pub async fn user_create<'a>(
&'a self,
body: &'a types::UserCreate,
) -> Result<types::UserCreateResult> {
let url = format!("{}/v1/users", self.baseurl,); let url = format!("{}/v1/users", self.baseurl,);
let request = self.client.post(url).json(body).build()?; let request = self.client.post(url).json(body).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
@ -293,7 +299,7 @@ impl Client {
} }
#[doc = "whoami: GET /v1/whoami"] #[doc = "whoami: GET /v1/whoami"]
pub async fn whoami(&self) -> Result<types::WhoamiResult> { pub async fn whoami<'a>(&'a self) -> Result<types::WhoamiResult> {
let url = format!("{}/v1/whoami", self.baseurl,); let url = format!("{}/v1/whoami", self.baseurl,);
let request = self.client.get(url).build()?; let request = self.client.get(url).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
@ -302,9 +308,9 @@ impl Client {
} }
#[doc = "worker_bootstrap: POST /v1/worker/bootstrap"] #[doc = "worker_bootstrap: POST /v1/worker/bootstrap"]
pub async fn worker_bootstrap( pub async fn worker_bootstrap<'a>(
&self, &'a self,
body: &types::WorkerBootstrap, body: &'a types::WorkerBootstrap,
) -> Result<types::WorkerBootstrapResult> { ) -> Result<types::WorkerBootstrapResult> {
let url = format!("{}/v1/worker/bootstrap", self.baseurl,); let url = format!("{}/v1/worker/bootstrap", self.baseurl,);
let request = self.client.post(url).json(body).build()?; let request = self.client.post(url).json(body).build()?;
@ -314,7 +320,7 @@ impl Client {
} }
#[doc = "worker_ping: GET /v1/worker/ping"] #[doc = "worker_ping: GET /v1/worker/ping"]
pub async fn worker_ping(&self) -> Result<types::WorkerPingResult> { pub async fn worker_ping<'a>(&'a self) -> Result<types::WorkerPingResult> {
let url = format!("{}/v1/worker/ping", self.baseurl,); let url = format!("{}/v1/worker/ping", self.baseurl,);
let request = self.client.get(url).build()?; let request = self.client.get(url).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
@ -323,11 +329,11 @@ impl Client {
} }
#[doc = "worker_task_append: POST /v1/worker/task/{task}/append"] #[doc = "worker_task_append: POST /v1/worker/task/{task}/append"]
pub async fn worker_task_append( pub async fn worker_task_append<'a>(
&self, &'a self,
task: &str, task: &'a str,
body: &types::WorkerAppendTask, body: &'a types::WorkerAppendTask,
) -> Result<()> { ) -> Result<reqwest::Response> {
let url = format!( let url = format!(
"{}/v1/worker/task/{}/append", "{}/v1/worker/task/{}/append",
self.baseurl, self.baseurl,
@ -336,13 +342,13 @@ impl Client {
let request = self.client.post(url).json(body).build()?; let request = self.client.post(url).json(body).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
let res = result?.error_for_status()?; let res = result?.error_for_status()?;
Ok(()) Ok(res)
} }
#[doc = "worker_task_upload_chunk: POST /v1/worker/task/{task}/chunk"] #[doc = "worker_task_upload_chunk: POST /v1/worker/task/{task}/chunk"]
pub async fn worker_task_upload_chunk<B: Into<reqwest::Body>>( pub async fn worker_task_upload_chunk<'a, B: Into<reqwest::Body>>(
&self, &'a self,
task: &str, task: &'a str,
body: B, body: B,
) -> Result<types::UploadedChunk> { ) -> Result<types::UploadedChunk> {
let url = format!( let url = format!(
@ -357,11 +363,11 @@ impl Client {
} }
#[doc = "worker_task_complete: POST /v1/worker/task/{task}/complete"] #[doc = "worker_task_complete: POST /v1/worker/task/{task}/complete"]
pub async fn worker_task_complete( pub async fn worker_task_complete<'a>(
&self, &'a self,
task: &str, task: &'a str,
body: &types::WorkerCompleteTask, body: &'a types::WorkerCompleteTask,
) -> Result<()> { ) -> Result<reqwest::Response> {
let url = format!( let url = format!(
"{}/v1/worker/task/{}/complete", "{}/v1/worker/task/{}/complete",
self.baseurl, self.baseurl,
@ -370,15 +376,15 @@ impl Client {
let request = self.client.post(url).json(body).build()?; let request = self.client.post(url).json(body).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
let res = result?.error_for_status()?; let res = result?.error_for_status()?;
Ok(()) Ok(res)
} }
#[doc = "worker_task_add_output: POST /v1/worker/task/{task}/output"] #[doc = "worker_task_add_output: POST /v1/worker/task/{task}/output"]
pub async fn worker_task_add_output( pub async fn worker_task_add_output<'a>(
&self, &'a self,
task: &str, task: &'a str,
body: &types::WorkerAddOutput, body: &'a types::WorkerAddOutput,
) -> Result<()> { ) -> Result<reqwest::Response> {
let url = format!( let url = format!(
"{}/v1/worker/task/{}/output", "{}/v1/worker/task/{}/output",
self.baseurl, self.baseurl,
@ -387,11 +393,11 @@ impl Client {
let request = self.client.post(url).json(body).build()?; let request = self.client.post(url).json(body).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
let res = result?.error_for_status()?; let res = result?.error_for_status()?;
Ok(()) Ok(res)
} }
#[doc = "workers_list: GET /v1/workers"] #[doc = "workers_list: GET /v1/workers"]
pub async fn workers_list(&self) -> Result<types::WorkersResult> { pub async fn workers_list<'a>(&'a self) -> Result<types::WorkersResult> {
let url = format!("{}/v1/workers", self.baseurl,); let url = format!("{}/v1/workers", self.baseurl,);
let request = self.client.get(url).build()?; let request = self.client.get(url).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
@ -400,11 +406,11 @@ impl Client {
} }
#[doc = "workers_recycle: POST /v1/workers/recycle"] #[doc = "workers_recycle: POST /v1/workers/recycle"]
pub async fn workers_recycle(&self) -> Result<()> { pub async fn workers_recycle<'a>(&'a self) -> Result<reqwest::Response> {
let url = format!("{}/v1/workers/recycle", self.baseurl,); let url = format!("{}/v1/workers/recycle", self.baseurl,);
let request = self.client.post(url).build()?; let request = self.client.post(url).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
let res = result?.error_for_status()?; let res = result?.error_for_status()?;
Ok(()) Ok(res)
} }
} }

View File

@ -123,16 +123,16 @@ impl Client {
} }
#[doc = "enrol: POST /enrol"] #[doc = "enrol: POST /enrol"]
pub async fn enrol(&self, body: &types::EnrolBody) -> Result<()> { pub async fn enrol<'a>(&'a self, body: &'a types::EnrolBody) -> Result<reqwest::Response> {
let url = format!("{}/enrol", self.baseurl,); let url = format!("{}/enrol", self.baseurl,);
let request = self.client.post(url).json(body).build()?; let request = self.client.post(url).json(body).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
let res = result?.error_for_status()?; let res = result?.error_for_status()?;
Ok(()) Ok(res)
} }
#[doc = "global_jobs: GET /global/jobs"] #[doc = "global_jobs: GET /global/jobs"]
pub async fn global_jobs(&self) -> Result<types::GlobalJobsResult> { pub async fn global_jobs<'a>(&'a self) -> Result<types::GlobalJobsResult> {
let url = format!("{}/global/jobs", self.baseurl,); let url = format!("{}/global/jobs", self.baseurl,);
let request = self.client.get(url).build()?; let request = self.client.get(url).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
@ -141,7 +141,7 @@ impl Client {
} }
#[doc = "ping: GET /ping"] #[doc = "ping: GET /ping"]
pub async fn ping(&self) -> Result<types::PingResult> { pub async fn ping<'a>(&'a self) -> Result<types::PingResult> {
let url = format!("{}/ping", self.baseurl,); let url = format!("{}/ping", self.baseurl,);
let request = self.client.get(url).build()?; let request = self.client.get(url).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;
@ -150,9 +150,9 @@ impl Client {
} }
#[doc = "report_finish: POST /report/finish"] #[doc = "report_finish: POST /report/finish"]
pub async fn report_finish( pub async fn report_finish<'a>(
&self, &'a self,
body: &types::ReportFinishBody, body: &'a types::ReportFinishBody,
) -> Result<types::ReportResult> { ) -> Result<types::ReportResult> {
let url = format!("{}/report/finish", self.baseurl,); let url = format!("{}/report/finish", self.baseurl,);
let request = self.client.post(url).json(body).build()?; let request = self.client.post(url).json(body).build()?;
@ -162,9 +162,9 @@ impl Client {
} }
#[doc = "report_output: POST /report/output"] #[doc = "report_output: POST /report/output"]
pub async fn report_output( pub async fn report_output<'a>(
&self, &'a self,
body: &types::ReportOutputBody, body: &'a types::ReportOutputBody,
) -> Result<types::ReportResult> { ) -> Result<types::ReportResult> {
let url = format!("{}/report/output", self.baseurl,); let url = format!("{}/report/output", self.baseurl,);
let request = self.client.post(url).json(body).build()?; let request = self.client.post(url).json(body).build()?;
@ -174,7 +174,10 @@ impl Client {
} }
#[doc = "report_start: POST /report/start"] #[doc = "report_start: POST /report/start"]
pub async fn report_start(&self, body: &types::ReportStartBody) -> Result<types::ReportResult> { pub async fn report_start<'a>(
&'a self,
body: &'a types::ReportStartBody,
) -> Result<types::ReportResult> {
let url = format!("{}/report/start", self.baseurl,); let url = format!("{}/report/start", self.baseurl,);
let request = self.client.post(url).json(body).build()?; let request = self.client.post(url).json(body).build()?;
let result = self.client.execute(request).await; let result = self.client.execute(request).await;

File diff suppressed because it is too large Load Diff

View File

@ -14,3 +14,10 @@ getopts = "0.2"
openapiv3 = "1.0.0-beta.5" openapiv3 = "1.0.0-beta.5"
serde = { version = "1.0", features = [ "derive" ] } serde = { version = "1.0", features = [ "derive" ] }
serde_json = "1.0" serde_json = "1.0"
[dev-dependencies]
chrono = { version = "0.4", features = ["serde"] }
futures = "0.3"
percent-encoding = "2.1"
reqwest = { version = "0.11", features = ["json", "stream"] }
uuid = { version = "0.8", features = ["serde", "v4"] }

View File

@ -0,0 +1,3 @@
// Copyright 2021 Oxide Computer Company
progenitor::generate_api!("../sample_openapi/buildomat.json");

View File

@ -0,0 +1,3 @@
// Copyright 2021 Oxide Computer Company
progenitor::generate_api!("../sample_openapi/keeper.json");

View File

@ -0,0 +1,3 @@
// Copyright 2021 Oxide Computer Company
progenitor::generate_api!("../sample_openapi/nexus.json");

View File

@ -187,13 +187,21 @@
}, },
"required": true "required": true
}, },
"responses": {} "responses": {
"default": {
"description": ""
}
}
} }
}, },
"/logout": { "/logout": {
"post": { "post": {
"operationId": "logout", "operationId": "logout",
"responses": {} "responses": {
"default": {
"description": ""
}
}
} }
}, },
"/organizations": { "/organizations": {
@ -974,6 +982,36 @@
"description": "List disks attached to this instance.", "description": "List disks attached to this instance.",
"operationId": "instance_disks_get", "operationId": "instance_disks_get",
"parameters": [ "parameters": [
{
"in": "query",
"name": "limit",
"schema": {
"nullable": true,
"description": "Maximum number of items returned by a single call",
"type": "integer",
"format": "uint32",
"minimum": 1
},
"style": "form"
},
{
"in": "query",
"name": "page_token",
"schema": {
"nullable": true,
"description": "Token returned by previous call to retreive the subsequent page",
"type": "string"
},
"style": "form"
},
{
"in": "query",
"name": "sort_by",
"schema": {
"$ref": "#/components/schemas/NameSortMode"
},
"style": "form"
},
{ {
"in": "path", "in": "path",
"name": "instance_name", "name": "instance_name",
@ -1008,11 +1046,64 @@
"content": { "content": {
"application/json": { "application/json": {
"schema": { "schema": {
"title": "Array_of_DiskAttachment", "$ref": "#/components/schemas/DiskResultsPage"
"type": "array", }
"items": { }
"$ref": "#/components/schemas/DiskAttachment" }
} }
},
"x-dropshot-pagination": true
}
},
"/organizations/{organization_name}/projects/{project_name}/instances/{instance_name}/disks/attach": {
"post": {
"operationId": "instance_disks_attach",
"parameters": [
{
"in": "path",
"name": "instance_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
},
{
"in": "path",
"name": "organization_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
},
{
"in": "path",
"name": "project_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/DiskIdentifier"
}
}
},
"required": true
},
"responses": {
"202": {
"description": "successfully enqueued operation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Disk"
} }
} }
} }
@ -1020,20 +1111,10 @@
} }
} }
}, },
"/organizations/{organization_name}/projects/{project_name}/instances/{instance_name}/disks/{disk_name}": { "/organizations/{organization_name}/projects/{project_name}/instances/{instance_name}/disks/detach": {
"get": { "post": {
"description": "Fetch a description of the attachment of this disk to this instance.", "operationId": "instance_disks_detach",
"operationId": "instance_disks_get_disk",
"parameters": [ "parameters": [
{
"in": "path",
"name": "disk_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
},
{ {
"in": "path", "in": "path",
"name": "instance_name", "name": "instance_name",
@ -1062,119 +1143,28 @@
"style": "simple" "style": "simple"
} }
], ],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/DiskIdentifier"
}
}
},
"required": true
},
"responses": { "responses": {
"200": { "202": {
"description": "successful operation", "description": "successfully enqueued operation",
"content": { "content": {
"application/json": { "application/json": {
"schema": { "schema": {
"$ref": "#/components/schemas/DiskAttachment" "$ref": "#/components/schemas/Disk"
} }
} }
} }
} }
} }
},
"put": {
"description": "Attach a disk to this instance.",
"operationId": "instance_disks_put_disk",
"parameters": [
{
"in": "path",
"name": "disk_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
},
{
"in": "path",
"name": "instance_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
},
{
"in": "path",
"name": "organization_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
},
{
"in": "path",
"name": "project_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
}
],
"responses": {
"201": {
"description": "successful creation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/DiskAttachment"
}
}
}
}
}
},
"delete": {
"description": "Detach a disk from this instance.",
"operationId": "instance_disks_delete_disk",
"parameters": [
{
"in": "path",
"name": "disk_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
},
{
"in": "path",
"name": "instance_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
},
{
"in": "path",
"name": "organization_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
},
{
"in": "path",
"name": "project_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
}
],
"responses": {
"204": {
"description": "successful deletion"
}
}
} }
}, },
"/organizations/{organization_name}/projects/{project_name}/instances/{instance_name}/reboot": { "/organizations/{organization_name}/projects/{project_name}/instances/{instance_name}/reboot": {
@ -1522,8 +1512,8 @@
"required": true "required": true
}, },
"responses": { "responses": {
"200": { "204": {
"description": "successful operation" "description": "resource updated"
} }
} }
}, },
@ -1939,8 +1929,8 @@
"required": true "required": true
}, },
"responses": { "responses": {
"200": { "204": {
"description": "successful operation" "description": "resource updated"
} }
} }
}, },
@ -2268,8 +2258,8 @@
"required": true "required": true
}, },
"responses": { "responses": {
"200": { "204": {
"description": "successful operation" "description": "resource updated"
} }
} }
}, },
@ -2570,8 +2560,8 @@
"required": true "required": true
}, },
"responses": { "responses": {
"200": { "204": {
"description": "successful operation" "description": "resource updated"
} }
} }
}, },
@ -2623,6 +2613,166 @@
} }
} }
}, },
"/organizations/{organization_name}/projects/{project_name}/vpcs/{vpc_name}/subnets/{subnet_name}/ips": {
"get": {
"description": "List IP addresses on a VPC subnet.",
"operationId": "subnets_ips_get",
"parameters": [
{
"in": "query",
"name": "limit",
"schema": {
"nullable": true,
"description": "Maximum number of items returned by a single call",
"type": "integer",
"format": "uint32",
"minimum": 1
},
"style": "form"
},
{
"in": "query",
"name": "page_token",
"schema": {
"nullable": true,
"description": "Token returned by previous call to retreive the subsequent page",
"type": "string"
},
"style": "form"
},
{
"in": "query",
"name": "sort_by",
"schema": {
"$ref": "#/components/schemas/NameSortMode"
},
"style": "form"
},
{
"in": "path",
"name": "organization_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
},
{
"in": "path",
"name": "project_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
},
{
"in": "path",
"name": "subnet_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
},
{
"in": "path",
"name": "vpc_name",
"required": true,
"schema": {
"$ref": "#/components/schemas/Name"
},
"style": "simple"
}
],
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/NetworkInterfaceResultsPage"
}
}
}
}
},
"x-dropshot-pagination": true
}
},
"/roles": {
"get": {
"description": "List the built-in roles",
"operationId": "roles_get",
"parameters": [
{
"in": "query",
"name": "limit",
"schema": {
"nullable": true,
"description": "Maximum number of items returned by a single call",
"type": "integer",
"format": "uint32",
"minimum": 1
},
"style": "form"
},
{
"in": "query",
"name": "page_token",
"schema": {
"nullable": true,
"description": "Token returned by previous call to retreive the subsequent page",
"type": "string"
},
"style": "form"
}
],
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/RoleResultsPage"
}
}
}
}
},
"x-dropshot-pagination": true
}
},
"/roles/{role_name}": {
"get": {
"description": "Fetch a specific built-in role",
"operationId": "roles_get_role",
"parameters": [
{
"in": "path",
"name": "role_name",
"required": true,
"schema": {
"description": "The built-in role's unique name.",
"type": "string"
},
"style": "simple"
}
],
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Role"
}
}
}
}
}
}
},
"/sagas": { "/sagas": {
"get": { "get": {
"description": "List all sagas (for debugging)", "description": "List all sagas (for debugging)",
@ -2704,6 +2854,49 @@
} }
} }
}, },
"/timeseries/schema": {
"get": {
"description": "List all timeseries schema",
"operationId": "timeseries_schema_get",
"parameters": [
{
"in": "query",
"name": "limit",
"schema": {
"nullable": true,
"description": "Maximum number of items returned by a single call",
"type": "integer",
"format": "uint32",
"minimum": 1
},
"style": "form"
},
{
"in": "query",
"name": "page_token",
"schema": {
"nullable": true,
"description": "Token returned by previous call to retreive the subsequent page",
"type": "string"
},
"style": "form"
}
],
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/TimeseriesSchemaResultsPage"
}
}
}
}
},
"x-dropshot-pagination": true
}
},
"/users": { "/users": {
"get": { "get": {
"description": "List the built-in system users", "description": "List the built-in system users",
@ -2793,6 +2986,21 @@
"format": "uint64", "format": "uint64",
"minimum": 0 "minimum": 0
}, },
"DatumType": {
"description": "The type of an individual datum of a metric.",
"type": "string",
"enum": [
"Bool",
"I64",
"F64",
"String",
"Bytes",
"CumulativeI64",
"CumulativeF64",
"HistogramI64",
"HistogramF64"
]
},
"Disk": { "Disk": {
"description": "Client view of an [`Disk`]", "description": "Client view of an [`Disk`]",
"type": "object", "type": "object",
@ -2855,32 +3063,6 @@
"timeModified" "timeModified"
] ]
}, },
"DiskAttachment": {
"description": "Describes a Disk's attachment to an Instance",
"type": "object",
"properties": {
"diskId": {
"type": "string",
"format": "uuid"
},
"diskName": {
"$ref": "#/components/schemas/Name"
},
"diskState": {
"$ref": "#/components/schemas/DiskState"
},
"instanceId": {
"type": "string",
"format": "uuid"
}
},
"required": [
"diskId",
"diskName",
"diskState",
"instanceId"
]
},
"DiskCreate": { "DiskCreate": {
"description": "Create-time parameters for a [`Disk`]", "description": "Create-time parameters for a [`Disk`]",
"type": "object", "type": "object",
@ -2912,6 +3094,18 @@
"size" "size"
] ]
}, },
"DiskIdentifier": {
"description": "Parameters for the [`Disk`] to be attached or detached to an instance",
"type": "object",
"properties": {
"disk": {
"$ref": "#/components/schemas/Name"
}
},
"required": [
"disk"
]
},
"DiskResultsPage": { "DiskResultsPage": {
"description": "A single page of results", "description": "A single page of results",
"type": "object", "type": "object",
@ -3058,6 +3252,45 @@
} }
] ]
}, },
"FieldSchema": {
"description": "The name and type information for a field of a timeseries schema.",
"type": "object",
"properties": {
"name": {
"type": "string"
},
"source": {
"$ref": "#/components/schemas/FieldSource"
},
"ty": {
"$ref": "#/components/schemas/FieldType"
}
},
"required": [
"name",
"source",
"ty"
]
},
"FieldSource": {
"description": "The source from which a field is derived, the target or metric.",
"type": "string",
"enum": [
"Target",
"Metric"
]
},
"FieldType": {
"description": "The `FieldType` identifies the data type of a target or metric field.",
"type": "string",
"enum": [
"String",
"I64",
"IpAddr",
"Uuid",
"Bool"
]
},
"IdentityMetadata": { "IdentityMetadata": {
"description": "Identity-related metadata that's included in nearly all public API objects", "description": "Identity-related metadata that's included in nearly all public API objects",
"type": "object", "type": "object",
@ -3279,6 +3512,14 @@
"username" "username"
] ]
}, },
"MacAddr": {
"title": "A MAC address",
"description": "A Media Access Control address, in EUI-48 format",
"type": "string",
"pattern": "^([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$",
"minLength": 17,
"maxLength": 17
},
"Name": { "Name": {
"title": "A name used in the API", "title": "A name used in the API",
"description": "Names must begin with a lower case ASCII letter, be composed exclusively of lowercase ASCII, uppercase ASCII, numbers, and '-', and may not end with a '-'.", "description": "Names must begin with a lower case ASCII letter, be composed exclusively of lowercase ASCII, uppercase ASCII, numbers, and '-', and may not end with a '-'.",
@ -3286,6 +3527,77 @@
"pattern": "[a-z](|[a-zA-Z0-9-]*[a-zA-Z0-9])", "pattern": "[a-z](|[a-zA-Z0-9-]*[a-zA-Z0-9])",
"maxLength": 63 "maxLength": 63
}, },
"NetworkInterface": {
"description": "A `NetworkInterface` represents a virtual network interface device.",
"type": "object",
"properties": {
"identity": {
"description": "common identifying metadata",
"allOf": [
{
"$ref": "#/components/schemas/IdentityMetadata"
}
]
},
"instance_id": {
"description": "The Instance to which the interface belongs.",
"type": "string",
"format": "uuid"
},
"ip": {
"description": "The IP address assigned to this interface.",
"type": "string",
"format": "ip"
},
"mac": {
"description": "The MAC address assigned to this interface.",
"allOf": [
{
"$ref": "#/components/schemas/MacAddr"
}
]
},
"subnet_id": {
"description": "The subnet to which the interface belongs.",
"type": "string",
"format": "uuid"
},
"vpc_id": {
"description": "The VPC to which the interface belongs.",
"type": "string",
"format": "uuid"
}
},
"required": [
"identity",
"instance_id",
"ip",
"mac",
"subnet_id",
"vpc_id"
]
},
"NetworkInterfaceResultsPage": {
"description": "A single page of results",
"type": "object",
"properties": {
"items": {
"description": "list of items on this page of results",
"type": "array",
"items": {
"$ref": "#/components/schemas/NetworkInterface"
}
},
"next_page": {
"nullable": true,
"description": "token used to fetch the next page of results (if any)",
"type": "string"
}
},
"required": [
"items"
]
},
"Organization": { "Organization": {
"description": "Client view of an [`Organization`]", "description": "Client view of an [`Organization`]",
"type": "object", "type": "object",
@ -3514,6 +3826,50 @@
"items" "items"
] ]
}, },
"Role": {
"description": "Client view of a [`Role`]",
"type": "object",
"properties": {
"description": {
"type": "string"
},
"name": {
"$ref": "#/components/schemas/RoleName"
}
},
"required": [
"description",
"name"
]
},
"RoleName": {
"title": "A name for a built-in role",
"description": "Role names consist of two string components separated by dot (\".\").",
"type": "string",
"pattern": "[a-z-]+\\.[a-z-]+",
"maxLength": 63
},
"RoleResultsPage": {
"description": "A single page of results",
"type": "object",
"properties": {
"items": {
"description": "list of items on this page of results",
"type": "array",
"items": {
"$ref": "#/components/schemas/Role"
}
},
"next_page": {
"nullable": true,
"description": "token used to fetch the next page of results (if any)",
"type": "string"
}
},
"required": [
"items"
]
},
"RouteDestination": { "RouteDestination": {
"description": "A subset of [`NetworkTarget`], `RouteDestination` specifies the kind of network traffic that will be matched to be forwarded to the [`RouteTarget`].", "description": "A subset of [`NetworkTarget`], `RouteDestination` specifies the kind of network traffic that will be matched to be forwarded to the [`RouteTarget`].",
"oneOf": [ "oneOf": [
@ -4037,6 +4393,61 @@
"items" "items"
] ]
}, },
"TimeseriesName": {
"title": "The name of a timeseries",
"description": "Names are constructed by concatenating the target and metric names with ':'. Target and metric names must be lowercase alphanumeric characters with '_' separating words.",
"type": "string",
"pattern": "(([a-z]+[a-z0-9]*)(_([a-z0-9]+))*):(([a-z]+[a-z0-9]*)(_([a-z0-9]+))*)"
},
"TimeseriesSchema": {
"description": "The schema for a timeseries.\n\nThis includes the name of the timeseries, as well as the datum type of its metric and the schema for each field.",
"type": "object",
"properties": {
"created": {
"type": "string",
"format": "date-time"
},
"datum_type": {
"$ref": "#/components/schemas/DatumType"
},
"field_schema": {
"type": "array",
"items": {
"$ref": "#/components/schemas/FieldSchema"
}
},
"timeseries_name": {
"$ref": "#/components/schemas/TimeseriesName"
}
},
"required": [
"created",
"datum_type",
"field_schema",
"timeseries_name"
]
},
"TimeseriesSchemaResultsPage": {
"description": "A single page of results",
"type": "object",
"properties": {
"items": {
"description": "list of items on this page of results",
"type": "array",
"items": {
"$ref": "#/components/schemas/TimeseriesSchema"
}
},
"next_page": {
"nullable": true,
"description": "token used to fetch the next page of results (if any)",
"type": "string"
}
},
"required": [
"items"
]
},
"User": { "User": {
"description": "Client view of a [`User`]", "description": "Client view of a [`User`]",
"type": "object", "type": "object",