Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CLI plugin #1

Merged
merged 13 commits into from
Mar 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
155 changes: 90 additions & 65 deletions Cargo.lock

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ members = [
"crates/dc-api",
"crates/dc-api-types",
"crates/dc-api-test-helpers",
"crates/ndc-test-helpers"
"crates/ndc-test-helpers",
"crates/cli"
]
resolver = "2"

Expand Down
21 changes: 21 additions & 0 deletions connector-definition/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
.DEFAULT_GOAL := build
SHELL = /usr/bin/env bash

.PHONY: build
build: dist/connector-definition.tgz

.PHONY: clean
clean:
rm -rf dist

dist dist/.hasura-connector:
mkdir dist
mkdir dist/.hasura-connector

dist/.hasura-connector/connector-metadata.yaml: DOCKER_IMAGE ?= $(error The DOCKER_IMAGE variable must be defined)
dist/.hasura-connector/connector-metadata.yaml: connector-metadata.yaml dist/.hasura-connector
cp -f connector-metadata.yaml dist/.hasura-connector/
yq -i '.packagingDefinition.dockerImage = "$(DOCKER_IMAGE)"' dist/.hasura-connector/connector-metadata.yaml

dist/connector-definition.tgz: dist/.hasura-connector/connector-metadata.yaml
shopt -s dotglob && cd dist && tar -czvf connector-definition.tgz *
15 changes: 15 additions & 0 deletions connector-definition/connector-metadata.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
packagingDefinition:
type: PrebuiltDockerImage
dockerImage:
supportedEnvironmentVariables:
- name: MONGODB_DATABASE_URI
description: The URI for the MongoDB database
commands:
update: hasura-mongodb update
cliPlugin:
name: hasura-mongodb
version: "0.0.1"
dockerComposeWatch:
- path: ./
target: /etc/connector
action: sync+restart
23 changes: 23 additions & 0 deletions crates/cli/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
[package]
name = "mongodb-cli-plugin"
edition = "2021"
version.workspace = true

[[bin]]
name = "hasura-mongodb"
path = "./src/main.rs"

[dependencies]
configuration = { path = "../configuration" }
mongodb-agent-common = { path = "../mongodb-agent-common" }
mongodb = "2.8"
mongodb-support = { path = "../mongodb-support" }

anyhow = "1.0.80"
clap = { version = "4.5.1", features = ["derive", "env"] }
futures-util = "0.3.28"
indexmap = { version = "1", features = ["serde"] } # must match the version that ndc-client uses
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0.113", features = ["raw_value"] }
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Something I often ask people to do is not to set exact dependency versions in Cargo.toml - that's what Cargo.lock is for. But it's less of a problem in a program vs a library.

thiserror = "1.0.57"
tokio = { version = "1.36.0", features = ["full"] }
180 changes: 180 additions & 0 deletions crates/cli/src/introspection.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
use configuration::{
metadata::{Collection, ObjectField, ObjectType, Type},
Metadata,
};
use futures_util::{StreamExt, TryStreamExt};
use indexmap::IndexMap;
use mongodb::bson::from_bson;
use mongodb_agent_common::schema::{get_property_description, Property, ValidatorSchema};
use mongodb_support::{BsonScalarType, BsonType};

use mongodb_agent_common::interface_types::{MongoAgentError, MongoConfig};

pub async fn get_metadata_from_validation_schema(
config: &MongoConfig,
) -> Result<Metadata, MongoAgentError> {
let db = config.client.database(&config.database);
let collections_cursor = db.list_collections(None, None).await?;

let (object_types, collections) = collections_cursor
.into_stream()
.map(
|collection_spec| -> Result<(Vec<ObjectType>, Collection), MongoAgentError> {
let collection_spec_value = collection_spec?;
let name = &collection_spec_value.name;
let schema_bson_option = collection_spec_value
.options
.validator
.as_ref()
.and_then(|x| x.get("$jsonSchema"));

match schema_bson_option {
Some(schema_bson) => {
from_bson::<ValidatorSchema>(schema_bson.clone()).map_err(|err| {
MongoAgentError::BadCollectionSchema(
name.to_owned(),
schema_bson.clone(),
err,
)
})
}
None => Ok(ValidatorSchema {
bson_type: BsonType::Object,
description: None,
required: Vec::new(),
properties: IndexMap::new(),
}),
}
.map(|validator_schema| make_collection(name, &validator_schema))
},
)
.try_collect::<(Vec<Vec<ObjectType>>, Vec<Collection>)>()
.await?;

Ok(Metadata {
collections,
object_types: object_types.concat(),
})
}

fn make_collection(
collection_name: &str,
validator_schema: &ValidatorSchema,
) -> (Vec<ObjectType>, Collection) {
let properties = &validator_schema.properties;
let required_labels = &validator_schema.required;

let (mut object_type_defs, object_fields) = {
let type_prefix = format!("{collection_name}_");
let id_field = ObjectField {
name: "_id".to_string(),
description: Some("primary key _id".to_string()),
r#type: Type::Scalar(BsonScalarType::ObjectId),
};
let (object_type_defs, mut object_fields): (Vec<Vec<ObjectType>>, Vec<ObjectField>) =
properties
.iter()
.map(|prop| make_object_field(&type_prefix, required_labels, prop))
.unzip();
if !object_fields.iter().any(|info| info.name == "_id") {
// There should always be an _id field, so add it unless it was already specified in
// the validator.
object_fields.push(id_field);
}
(object_type_defs.concat(), object_fields)
};

let collection_type = ObjectType {
name: collection_name.to_string(),
description: Some(format!("Object type for collection {collection_name}")),
fields: object_fields,
};

object_type_defs.push(collection_type);

let collection_info = Collection {
name: collection_name.to_string(),
description: validator_schema.description.clone(),
r#type: collection_name.to_string(),
};

(object_type_defs, collection_info)
}

fn make_object_field(
type_prefix: &str,
required_labels: &[String],
(prop_name, prop_schema): (&String, &Property),
) -> (Vec<ObjectType>, ObjectField) {
let description = get_property_description(prop_schema);

let object_type_name = format!("{type_prefix}{prop_name}");
let (collected_otds, field_type) = make_field_type(&object_type_name, prop_schema);

let object_field = ObjectField {
name: prop_name.clone(),
description,
r#type: maybe_nullable(field_type, !required_labels.contains(prop_name)),
};

(collected_otds, object_field)
}

fn maybe_nullable(
t: configuration::metadata::Type,
is_nullable: bool,
) -> configuration::metadata::Type {
if is_nullable {
configuration::metadata::Type::Nullable(Box::new(t))
} else {
t
}
}

fn make_field_type(object_type_name: &str, prop_schema: &Property) -> (Vec<ObjectType>, Type) {
let mut collected_otds: Vec<ObjectType> = vec![];

match prop_schema {
Property::Object {
bson_type: _,
description: _,
required,
properties,
} => {
let type_prefix = format!("{object_type_name}_");
let (otds, otd_fields): (Vec<Vec<ObjectType>>, Vec<ObjectField>) = properties
.iter()
.map(|prop| make_object_field(&type_prefix, required, prop))
.unzip();

let object_type_definition = ObjectType {
name: object_type_name.to_string(),
description: Some("generated from MongoDB validation schema".to_string()),
fields: otd_fields,
};

collected_otds.append(&mut otds.concat());
collected_otds.push(object_type_definition);

(collected_otds, Type::Object(object_type_name.to_string()))
}
Property::Array {
bson_type: _,
description: _,
items,
} => {
let item_schemas = *items.clone();

let (mut otds, element_type) = make_field_type(object_type_name, &item_schemas);
let field_type = Type::ArrayOf(Box::new(element_type));

collected_otds.append(&mut otds);

(collected_otds, field_type)
}
Property::Scalar {
bson_type,
description: _,
} => (collected_otds, Type::Scalar(bson_type.to_owned())),
}
}
40 changes: 40 additions & 0 deletions crates/cli/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
//! The interpretation of the commands that the CLI can handle.

mod introspection;

use std::path::PathBuf;

use clap::Subcommand;

use configuration::Configuration;
use mongodb_agent_common::interface_types::MongoConfig;

/// The command invoked by the user.
#[derive(Debug, Clone, Subcommand)]
pub enum Command {
/// Update the configuration by introspecting the database, using the configuration options.
Update,
}

pub struct Context {
pub path: PathBuf,
pub mongo_config: MongoConfig,
}

/// Run a command in a given directory.
pub async fn run(command: Command, context: &Context) -> anyhow::Result<()> {
match command {
Command::Update => update(context).await?,
};
Ok(())
}

/// Update the configuration in the current directory by introspecting the database.
async fn update(context: &Context) -> anyhow::Result<()> {
let metadata = introspection::get_metadata_from_validation_schema(&context.mongo_config).await?;
let configuration = Configuration { metadata };

configuration::write_directory(&context.path, &configuration).await?;

Ok(())
}
54 changes: 54 additions & 0 deletions crates/cli/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
//! The CLI application. This is used to configure a deployment of mongo-agent-v3.
//!
//! This is intended to be automatically downloaded and invoked via the Hasura CLI, as a plugin.
//! It is unlikely that end-users will use it directly.

use anyhow::anyhow;
use std::env;
use std::path::PathBuf;

use clap::Parser;
use mongodb_agent_common::state::{try_init_state_from_uri, DATABASE_URI_ENV_VAR};
use mongodb_cli_plugin::{run, Command, Context};

/// The command-line arguments.
#[derive(Debug, Parser)]
pub struct Args {
/// The path to the configuration. Defaults to the current directory.
#[arg(
long = "context-path",
env = "HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH",
value_name = "DIRECTORY"
)]
pub context_path: Option<PathBuf>,

#[arg(
long = "connection-uri",
env = DATABASE_URI_ENV_VAR,
required = true,
value_name = "URI"
)]
pub connection_uri: String,

/// The command to invoke.
#[command(subcommand)]
pub subcommand: Command,
}

/// The application entrypoint. It pulls information from the environment and then calls the [run]
/// function. The library remains unaware of the environment, so that we can more easily test it.
#[tokio::main]
pub async fn main() -> anyhow::Result<()> {
let args = Args::parse();
// Default the context path to the current directory.
let path = match args.context_path {
Some(path) => path,
None => env::current_dir()?,
};
let mongo_config = try_init_state_from_uri(&args.connection_uri)
.await
.map_err(|e| anyhow!("Error initializing MongoDB state {}", e))?;
let context = Context { path, mongo_config };
run(args.subcommand, &context).await?;
Ok(())
}
4 changes: 2 additions & 2 deletions crates/configuration/src/configuration.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
use std::{io, path::Path};

use schemars::JsonSchema;
use serde::Deserialize;
use serde::{Deserialize, Serialize};

use crate::{read_directory, Metadata};

#[derive(Clone, Debug, Default, Deserialize, JsonSchema)]
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct Configuration {
pub metadata: Metadata,
Expand Down
Loading
Loading