feat: generate with serializers

Signed-off-by: kjuulh <contact@kjuulh.io>
This commit is contained in:
Kasper Juul Hermansen 2023-09-24 21:09:40 +02:00
parent fb20207593
commit c32aab5630
Signed by: kjuulh
GPG Key ID: 9AA7BC13CE474394
11 changed files with 242 additions and 126 deletions

2
Cargo.lock generated
View File

@ -194,6 +194,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"crunch", "crunch",
"prost 0.12.1",
"tokio", "tokio",
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
@ -560,6 +561,7 @@ dependencies = [
"prost 0.12.1", "prost 0.12.1",
"prost-build", "prost-build",
"prost-types 0.12.1", "prost-types 0.12.1",
"regex",
"tempfile", "tempfile",
"tokio", "tokio",
"tracing", "tracing",

View File

@ -31,6 +31,6 @@ bytes = {version = "0.4"}
tempfile = {version = "3.8.0"} tempfile = {version = "3.8.0"}
genco = {version = "0.17.5"} genco = {version = "0.17.5"}
walkdir = {version = "2.4.0"} walkdir = {version = "2.4.0"}
regex = {version = "1.9.5"}
pretty_assertions = "1.4.0" pretty_assertions = "1.4.0"

View File

@ -20,6 +20,7 @@ bytes.workspace = true
tempfile.workspace = true tempfile.workspace = true
genco.workspace = true genco.workspace = true
walkdir.workspace = true walkdir.workspace = true
regex.workspace = true
[dev-dependencies] [dev-dependencies]
pretty_assertions.workspace = true pretty_assertions.workspace = true

View File

@ -1,9 +1,125 @@
use anyhow::anyhow; use anyhow::anyhow;
use genco::prelude::*; use genco::prelude::*;
use std::path::{Path, PathBuf}; use regex::Regex;
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use tokio::io::AsyncWriteExt; use tokio::io::AsyncWriteExt;
use walkdir::WalkDir; use walkdir::WalkDir;
#[derive(Debug)]
struct Node {
file: Option<String>,
messages: Option<Vec<String>>,
segment: String,
children: HashMap<String, Node>,
}
impl Node {
fn new(segment: String, file: Option<String>, messages: Option<Vec<String>>) -> Self {
Node {
file,
messages,
segment,
children: HashMap::new(),
}
}
fn insert(&mut self, file_name: &str, messages: Vec<String>) {
let mut node = self;
let file_name_content = PathBuf::from(file_name);
let file_name_content = file_name_content.file_stem().unwrap();
let file_name_content = file_name_content.to_string_lossy().to_lowercase();
let segments = file_name_content.split(".").collect::<Vec<_>>();
for (i, segment) in segments.iter().enumerate() {
node = node.children.entry(segment.to_string()).or_insert_with(|| {
Node::new(
segment.to_string(),
if i + 1 == segments.len() {
Some(file_name.into())
} else {
None
},
if i + 1 == segments.len() {
Some(messages.clone())
} else {
None
},
)
});
}
}
fn traverse(&self) -> genco::lang::rust::Tokens {
for (_, node) in self.children.iter() {
return node.traverse_indent(0);
}
self.traverse_indent(0)
}
fn traverse_indent(&self, indent: usize) -> genco::lang::rust::Tokens {
let padding = " ".repeat(indent * 4);
let mut message_tokens = Vec::new();
if let Some(file) = &self.file {
if let Some(messages) = &self.messages {
for message in messages.iter() {
let tokens: genco::lang::rust::Tokens = quote! {
$['\r']$(&padding)impl ::crunch::Serializer for $(message) {
$['\r']$(&padding) fn serialize(&self) -> Result<Vec<u8>, ::crunch::errors::SerializeError> {
$['\r']$(&padding) todo!()
$['\r']$(&padding) }
$['\r']$(&padding)}
$['\r']$(&padding)impl ::crunch::Deserializer for $(message) {
$['\r']$(&padding) fn deserialize(_raw: Vec<u8>) -> Result<Self, ::crunch::errors::DeserializeError>
$['\r']$(&padding) where
$['\r']$(&padding) Self: Sized,
$['\r']$(&padding) {
$['\r']$(&padding) todo!()
$['\r']$(&padding) }
$['\r']$(&padding)}
$['\r']$(&padding)
$['\r']$(&padding)impl Event for $(message) {
$['\r']$(&padding) fn event_info() -> ::crunch::traits::EventInfo {
$['\r']$(&padding) EventInfo {
$['\r']$(&padding) domain: "my-domain",
$['\r']$(&padding) entity_type: "my-entity-type",
$['\r']$(&padding) event_name: "my-event-name",
$['\r']$(&padding) }
$['\r']$(&padding) }
$['\r']$(&padding)}
};
message_tokens.push(tokens);
}
}
quote! {
$['\r']$(&padding)pub mod $(&self.segment) {
$['\r']$(&padding)include!($(quoted(file)));
$['\r']$(&padding)$(for tokens in message_tokens join ($['\r']) => $tokens)
$['\r']$(&padding)}
}
} else {
let mut child_tokens = Vec::new();
for (_children, nodes) in &self.children {
let tokens = nodes.traverse_indent(indent + 1);
child_tokens.push(tokens);
}
quote! {
$['\r']$(&padding)pub mod $(&self.segment) {
$(&padding)$(for tokens in child_tokens join ($['\r']) => $tokens)
$['\r']$(&padding)}
}
}
}
}
pub struct Codegen {} pub struct Codegen {}
impl Codegen { impl Codegen {
@ -12,6 +128,10 @@ impl Codegen {
} }
pub async fn generate_rust(&self, input_path: &Path, output_path: &Path) -> anyhow::Result<()> { pub async fn generate_rust(&self, input_path: &Path, output_path: &Path) -> anyhow::Result<()> {
if output_path.exists() {
tokio::fs::remove_dir_all(output_path).await?;
}
let input_protos = self.discover_files(input_path, "proto")?; let input_protos = self.discover_files(input_path, "proto")?;
let (input_proto_paths, input_dir) = self.copy_protos(input_protos, input_path).await?; let (input_proto_paths, input_dir) = self.copy_protos(input_protos, input_path).await?;
let (output_proto_paths, temp_output_dir) = self let (output_proto_paths, temp_output_dir) = self
@ -110,30 +230,26 @@ impl Codegen {
) -> anyhow::Result<PathBuf> { ) -> anyhow::Result<PathBuf> {
let mod_path = output_tempdir_path.join("mod.rs"); let mod_path = output_tempdir_path.join("mod.rs");
let mut mod_file = tokio::fs::File::create(&mod_path).await?; let mut mod_file = tokio::fs::File::create(&mod_path).await?;
let mut node = Node::new("root".into(), None, None);
let regex = Regex::new(r"pub struct (?P<eventName>[a-zA-Z0-9-_]+)")
.expect("regex to be well formed");
let mut includes: Vec<genco::lang::rust::Tokens> = Vec::new();
for generated_file in output_paths { for generated_file in output_paths {
if let Some(name) = generated_file.file_name() { if let Some(name) = generated_file.file_name() {
let mod_name = generated_file
.file_stem()
.unwrap()
.to_ascii_lowercase()
.to_string_lossy()
.replace(".", "_")
.replace("-", "_");
let file_name = name.to_str().unwrap(); let file_name = name.to_str().unwrap();
let file = tokio::fs::read_to_string(generated_file).await?;
let messages = regex
.captures_iter(&file)
.map(|m| m.name("eventName").unwrap())
.map(|m| m.as_str().to_string())
.collect();
includes.push(genco::quote! { node.insert(file_name, messages);
pub mod $(mod_name) {
include!($(quoted(file_name)));
}
});
} }
} }
let mod_tokens: genco::lang::rust::Tokens = genco::quote! { let mod_tokens: genco::lang::rust::Tokens = genco::quote! {
$(for tokens in includes join($['\n']) => $tokens) $(node.traverse())
}; };
let mod_contents = mod_tokens.to_file_string()?; let mod_contents = mod_tokens.to_file_string()?;
mod_file.write_all(mod_contents.as_bytes()).await?; mod_file.write_all(mod_contents.as_bytes()).await?;
@ -178,109 +294,22 @@ impl Default for Codegen {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use genco::prelude::*; use super::*;
use tokio::io::AsyncWriteExt; #[test]
fn test_node() {
let mut root = Node::new("root".into(), None, None);
#[tokio::test] root.insert("basic.my_event.rs", vec!["One".into(), "Two".into()]);
async fn test_can_generate_output_rust() -> anyhow::Result<()> { root.insert("basic.includes.includes.rs", vec!["Three".into()]);
// Generate from protobuf root.insert("basic.includes.includes-two.rs", Vec::new());
let proto_spec = r#"
syntax = "proto3";
import "includes/test_include.proto"; let res = root
.traverse()
.to_file_string()
.expect("to generate rust code");
package test.can.generate.output.rust; pretty_assertions::assert_eq!(res, r#""#);
message MyEvent { panic!();
string name = 1;
}
"#;
let proto_include_spec = r#"
syntax = "proto3";
package test.can.generate.output.rust.include.test_include;
message MyInclude {
string name = 1;
}
"#;
let out_tempdir = tempfile::TempDir::new()?;
let in_tempdir = tempfile::TempDir::new()?;
let proto_path = in_tempdir.path().join("test.proto");
let mut proto_file = tokio::fs::File::create(&proto_path).await?;
proto_file.write_all(proto_spec.as_bytes()).await?;
proto_file.sync_all().await?;
tokio::fs::create_dir_all(in_tempdir.path().join("includes")).await?;
let proto_include_path = in_tempdir.path().join("includes/test_include.proto");
let mut proto_file = tokio::fs::File::create(&proto_include_path).await?;
proto_file.write_all(proto_include_spec.as_bytes()).await?;
proto_file.sync_all().await?;
let out_tempdir_path = out_tempdir.into_path();
let handle = tokio::task::spawn_blocking({
let out_tempdir_path = out_tempdir_path.clone();
move || {
prost_build::Config::new()
.out_dir(out_tempdir_path)
.compile_protos(&[proto_path, proto_include_path], &[in_tempdir.into_path()])?;
Ok(())
}
});
let result: anyhow::Result<()> = handle.await?;
result?;
let mut entries = tokio::fs::read_dir(&out_tempdir_path).await?;
let mut file_paths = Vec::new();
while let Some(entry) = entries.next_entry().await? {
if let Some(ext) = entry.path().extension().and_then(|e| e.to_str()) {
if ext == "rs" {
file_paths.push(entry.path());
}
}
}
// Generate mod.rs
let mod_path = out_tempdir_path.join("mod.rs");
let mut mod_file = tokio::fs::File::create(&mod_path).await?;
let mut includes: Vec<genco::lang::rust::Tokens> = Vec::new();
for generated_file in &file_paths {
if let Some(name) = generated_file.file_name() {
let mod_name = generated_file
.file_stem()
.unwrap()
.to_ascii_lowercase()
.to_string_lossy()
.replace(".", "_")
.replace("-", "_");
let file_name = name.to_str().unwrap();
includes.push(genco::quote! {
pub mod $(mod_name) {
include!($(quoted(file_name)));
}
});
}
}
let mod_tokens: genco::lang::rust::Tokens = genco::quote! {
$(for tokens in includes join($['\n']) => $tokens)
};
let mod_contents = mod_tokens.to_file_string()?;
pretty_assertions::assert_eq!("", mod_contents);
mod_file.write_all(mod_contents.as_bytes()).await?;
assert_eq!(1, file_paths.len());
Ok(())
} }
} }

View File

@ -12,3 +12,4 @@ tracing.workspace = true
tokio.workspace = true tokio.workspace = true
tracing-subscriber.workspace = true tracing-subscriber.workspace = true
anyhow.workspace = true anyhow.workspace = true
prost.workspace = true

View File

@ -0,0 +1,7 @@
syntax = "proto3";
package basic.includes.my_include;
message MyInclude {
string name = 1;
}

View File

@ -1,7 +1,10 @@
syntax = "proto3"; syntax = "proto3";
package test.can.generate.output.rust; import "includes/my_include.proto";
package basic.my_event;
message MyEvent { message MyEvent {
string name = 1; string name = 1;
basic.includes.my_include.MyInclude include = 2;
} }

View File

@ -1,5 +1,5 @@
#[derive(Clone, PartialEq, ::prost::Message)] #[derive(Clone, PartialEq, ::prost::Message)]
pub struct MyEvent { pub struct MyInclude {
#[prost(string, tag="1")] #[prost(string, tag="1")]
pub name: std::string::String, pub name: std::string::String,
} }

View File

@ -0,0 +1,7 @@
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MyEvent {
#[prost(string, tag="1")]
pub name: std::string::String,
#[prost(message, optional, tag="2")]
pub include: ::std::option::Option<super::includes::my_include::MyInclude>,
}

View File

@ -1 +1,58 @@
pub mod test_can_generate_output_rust { include!("test.can.generate.output.rust.rs"); } pub mod basic {
pub mod my_event {
include!("basic.my_event.rs");
impl ::crunch::Serializer for MyEvent {
fn serialize(&self) -> Result<Vec<u8>, ::crunch::errors::SerializeError> {
todo!()
}
}
impl ::crunch::Deserializer for MyEvent {
fn deserialize(_raw: Vec<u8>) -> Result<Self, ::crunch::errors::DeserializeError>
where
Self: Sized,
{
todo!()
}
}
impl Event for MyEvent {
fn event_info() -> ::crunch::traits::EventInfo {
EventInfo {
domain: "my-domain",
entity_type: "my-entity-type",
event_name: "my-event-name",
}
}
}
}
pub mod includes {
pub mod my_include {
include!("basic.includes.my_include.rs");
impl ::crunch::Serializer for MyInclude {
fn serialize(&self) -> Result<Vec<u8>, ::crunch::errors::SerializeError> {
todo!()
}
}
impl ::crunch::Deserializer for MyInclude {
fn deserialize(_raw: Vec<u8>) -> Result<Self, ::crunch::errors::DeserializeError>
where
Self: Sized,
{
todo!()
}
}
impl Event for MyInclude {
fn event_info() -> ::crunch::traits::EventInfo {
EventInfo {
domain: "my-domain",
entity_type: "my-entity-type",
event_name: "my-event-name",
}
}
}
}
}
}

View File

@ -1,14 +1,16 @@
use crunch::traits::{Deserializer, Event, EventInfo, Serializer}; mod crunch;
use ::crunch::traits::{Deserializer, Event, EventInfo, Serializer};
struct MyEvent {} struct MyEvent {}
impl Serializer for MyEvent { impl Serializer for MyEvent {
fn serialize(&self) -> Result<Vec<u8>, crunch::errors::SerializeError> { fn serialize(&self) -> Result<Vec<u8>, ::crunch::errors::SerializeError> {
todo!() todo!()
} }
} }
impl Deserializer for MyEvent { impl Deserializer for MyEvent {
fn deserialize(_raw: Vec<u8>) -> Result<Self, crunch::errors::DeserializeError> fn deserialize(_raw: Vec<u8>) -> Result<Self, ::crunch::errors::DeserializeError>
where where
Self: Sized, Self: Sized,
{ {
@ -17,7 +19,7 @@ impl Deserializer for MyEvent {
} }
impl Event for MyEvent { impl Event for MyEvent {
fn event_info() -> crunch::traits::EventInfo { fn event_info() -> ::crunch::traits::EventInfo {
EventInfo { EventInfo {
domain: "my-domain", domain: "my-domain",
entity_type: "my-entity-type", entity_type: "my-entity-type",
@ -28,7 +30,14 @@ impl Event for MyEvent {
#[tokio::main] #[tokio::main]
async fn main() -> anyhow::Result<()> { async fn main() -> anyhow::Result<()> {
let crunch = crunch::builder::Builder::default().build()?; crunch::basic::my_event::MyEvent {
name: "some-name".into(),
include: Some(crunch::basic::includes::my_include::MyInclude {
name: "some-name".into(),
}),
};
let crunch = ::crunch::builder::Builder::default().build()?;
crunch crunch
.subscribe(|_item: MyEvent| async move { Ok(()) }) .subscribe(|_item: MyEvent| async move { Ok(()) })