feat: with beginning of omnia
Signed-off-by: kjuulh <contact@kjuulh.io>
This commit is contained in:
parent
1f88524c16
commit
c718124e85
@ -1,3 +0,0 @@
|
||||
[target.x86_64-unknown-linux-gnu]
|
||||
linker = "/usr/bin/clang"
|
||||
rustflags = ["-C", "link-arg=--ld-path=/usr/bin/mold"]
|
@ -1,3 +0,0 @@
|
||||
kind: template
|
||||
load: drone-template.yaml
|
||||
name: como
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,4 +0,0 @@
|
||||
/target
|
||||
.cuddle/
|
||||
node_modules/
|
||||
.env
|
@ -1,8 +0,0 @@
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
plugins:
|
||||
- "@semantic-release/commit-analyzer"
|
||||
- "@semantic-release/release-notes-generator"
|
||||
- "@semantic-release/changelog"
|
||||
- "@semantic-release/git"
|
5515
Cargo.lock
generated
5515
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
53
Cargo.toml
53
Cargo.toml
@ -1,53 +1,2 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"crates/*",
|
||||
"ci"
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.dependencies]
|
||||
como_bin = { path = "./crates/como_bin/" }
|
||||
como_core = { path = "./crates/como_core/" }
|
||||
como_domain = { path = "./crates/como_domain/" }
|
||||
como_infrastructure = { path = "./crates/como_infrastructure/" }
|
||||
como_gql = { path = "./crates/como_gql/" }
|
||||
como_api = { path = "./crates/como_api/" }
|
||||
como_auth = { path = "./crates/como_auth/" }
|
||||
|
||||
async-trait = "0.1.68"
|
||||
async-graphql = { version = "5.0.9", features = ["uuid"] }
|
||||
async-graphql-axum = "5.0.9"
|
||||
|
||||
axum = { version = "0.6.18", features = ["headers", "macros"] }
|
||||
axum-extra = { version = "0.7.4", features = ["cookie", "cookie-private"] }
|
||||
axum-sessions = { version = "0.5.0" }
|
||||
async-sqlx-session = { version = "0.4.0", features = ["pg"] }
|
||||
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0.68"
|
||||
|
||||
sqlx = { version = "0.6.2", features = [
|
||||
"runtime-tokio-rustls",
|
||||
"postgres",
|
||||
"migrate",
|
||||
"uuid",
|
||||
"offline",
|
||||
"time",
|
||||
"chrono",
|
||||
] }
|
||||
chrono = { version = "0.4.26", features = ["serde"] }
|
||||
|
||||
tokio = { version = "1.28.2", features = ["full"] }
|
||||
|
||||
uuid = { version = "1.3.3", features = ["v4", "fast-rng", "serde"] }
|
||||
anyhow = "1.0.71"
|
||||
dotenv = "0.15.0"
|
||||
tracing = "0.1.37"
|
||||
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
|
||||
clap = { version = "4.3.0", features = ["derive", "env"] }
|
||||
|
||||
argon2 = { version = "0.5.0" }
|
||||
rand_core = { version = "0.6.4" }
|
||||
|
||||
pretty_assertions = "1.4.0"
|
||||
sealed_test = "1.0.0"
|
||||
members = [ "crates/*" ]
|
||||
|
77
README.md
77
README.md
@ -1,3 +1,76 @@
|
||||
# Cibus Backend
|
||||
# Omnia
|
||||
|
||||
Some text
|
||||
`Omnia` is a tool to provide a common platform for everything. It is a tool
|
||||
build to support note-taking, handling personal relationships, project
|
||||
development, todos, research, up-keep of external work, and much more.
|
||||
|
||||
This may seem like a large list of tasks, and it is. However, Omnia is designed
|
||||
to be opinionated, and provide a minimalistic approach to each of the above.
|
||||
Omnia is not a general purpose tool, text editing tool, it is designed to fit
|
||||
into your existing toolstack, with its opinionated project structure, and
|
||||
workflow.
|
||||
|
||||
Alternatives to this tool:
|
||||
|
||||
- Notion: with second brain templates etc.
|
||||
- Obsidian: with zen garden templates etc.
|
||||
|
||||
Omnia is a commandline tool, which functions using a terminal ui, or pure cli
|
||||
commands. Called `interactive` for the former, and `prompt` for the latter. It
|
||||
is designed to work with your favorite EDITOR, to keep the scope of this project
|
||||
reasonable, `Omnia` doesn't try to bundle with an editor. Instead it will
|
||||
respect your `OMNIA_EDITOR`, or `EDITOR` environment variables, and launch the
|
||||
files using those.
|
||||
|
||||
## How to use `Omnia`
|
||||
|
||||
To launch the fully interactive view, simply type `omnia` in your shell of
|
||||
choice. This will boot up the TUI, press `?` for help and it will show a brief,
|
||||
menu as well as the most common keybinds.
|
||||
|
||||
Following each command will be shown separately, these will be available in the
|
||||
UI as well, just follow along in the menu, or use the command key `:` to open
|
||||
the command palette.
|
||||
|
||||
### Commands
|
||||
|
||||
In Omnia everything is designed to use Markdown files, even the templates are
|
||||
markdown files, though with some special syntax to make prompting easier. This
|
||||
also means that you can open your local ~/omnia directory using your favorite
|
||||
editor, as everything is just markdown files.
|
||||
|
||||
Projects are the cornerstone of how Omnia functions. Every navigation item is a
|
||||
project, be they todo lists, research items, external sites etc. Projects can be
|
||||
nested, and projects can contain pages. External apps can be configured as a
|
||||
project, and will need a certain interface to be functional.
|
||||
|
||||
This means that when you type a command:
|
||||
|
||||
`omnia --help`, each subcommand will be a project, some keywords are reserved:
|
||||
(todo, inbox, project, etc.), each project decides which commands are available,
|
||||
and this is fully customizeable, through our plugin system. Though note that we
|
||||
ship the default view with a set of preconfigured plugins:
|
||||
|
||||
- todo
|
||||
- inbox
|
||||
- projects
|
||||
- areas
|
||||
|
||||
Typing each of the commands above will open the fully interactive tui:
|
||||
`omnia todo`. Todo has a set of commands available to it: `omnia todo create` as
|
||||
an example. This will open the prompt view, which will interactive ask the user
|
||||
to fill out a form. These questions will also be available using commandline
|
||||
flags as well.
|
||||
|
||||
### Views
|
||||
|
||||
Some of the projects are built as views, this may be a list of recent
|
||||
notifications on github, apis of interest, metrics and whatnot.
|
||||
|
||||
## Remote first
|
||||
|
||||
Omnia will sync remote first in nearly all cases. The only exception is in
|
||||
progress forms and whatnot. This is to keep complexity down, as well as making
|
||||
sure Omnia is as easily crossplatform as possible. You should be able to use
|
||||
`Omnia` from all your terminal capable devices. This sprung out of my own need
|
||||
for having my notes available everywhere, without having conflicts.
|
||||
|
1
crates/como/.gitignore
vendored
Normal file
1
crates/como/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
/target
|
@ -1,5 +1,5 @@
|
||||
[package]
|
||||
name = "ci"
|
||||
name = "como"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
@ -1,34 +0,0 @@
|
||||
[package]
|
||||
name = "como_api"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
como_gql.workspace = true
|
||||
como_core.workspace = true
|
||||
como_domain.workspace = true
|
||||
como_infrastructure.workspace = true
|
||||
como_auth.workspace = true
|
||||
|
||||
async-trait.workspace = true
|
||||
async-graphql.workspace = true
|
||||
async-graphql-axum.workspace = true
|
||||
axum.workspace = true
|
||||
axum-extra.workspace = true
|
||||
axum-sessions.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
tokio.workspace = true
|
||||
uuid.workspace = true
|
||||
sqlx.workspace = true
|
||||
anyhow.workspace = true
|
||||
tracing.workspace = true
|
||||
async-sqlx-session.workspace = true
|
||||
|
||||
zitadel = { version = "3.3.1", features = ["axum"] }
|
||||
tower = "0.4.13"
|
||||
tower-http = { version = "0.4.0", features = ["cors", "trace"] }
|
||||
oauth2 = "4.4.0"
|
||||
openidconnect = "3.0.0"
|
@ -1,160 +0,0 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use crate::router::AppState;
|
||||
|
||||
use axum::extract::{FromRef, FromRequestParts, Query, State};
|
||||
|
||||
use axum::headers::authorization::Basic;
|
||||
use axum::headers::{Authorization, Cookie};
|
||||
use axum::http::request::Parts;
|
||||
use axum::http::StatusCode;
|
||||
|
||||
use axum::response::{ErrorResponse, IntoResponse, Redirect};
|
||||
use axum::routing::get;
|
||||
use axum::{async_trait, Json, RequestPartsExt, Router, TypedHeader};
|
||||
|
||||
use como_domain::users::User;
|
||||
use como_infrastructure::register::ServiceRegister;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde_json::json;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ZitadelAuthParams {
|
||||
#[allow(dead_code)]
|
||||
return_url: Option<String>,
|
||||
}
|
||||
|
||||
trait AnyhowExtensions<T, E>
|
||||
where
|
||||
E: Display,
|
||||
{
|
||||
fn into_response(self) -> Result<T, ErrorResponse>;
|
||||
}
|
||||
impl<T, E> AnyhowExtensions<T, E> for anyhow::Result<T, E>
|
||||
where
|
||||
E: Display,
|
||||
{
|
||||
fn into_response(self) -> Result<T, ErrorResponse> {
|
||||
match self {
|
||||
Ok(o) => Ok(o),
|
||||
Err(e) => {
|
||||
tracing::error!("failed with anyhow error: {}", e);
|
||||
Err(ErrorResponse::from((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({
|
||||
"status": "something",
|
||||
})),
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn zitadel_auth(
|
||||
State(services): State<ServiceRegister>,
|
||||
) -> Result<impl IntoResponse, ErrorResponse> {
|
||||
let url = services.auth_service.login().await.into_response()?;
|
||||
|
||||
Ok(Redirect::to(&url.to_string()))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct AuthRequest {
|
||||
code: String,
|
||||
state: String,
|
||||
}
|
||||
|
||||
pub async fn login_authorized(
|
||||
Query(query): Query<AuthRequest>,
|
||||
State(services): State<ServiceRegister>,
|
||||
) -> Result<impl IntoResponse, ErrorResponse> {
|
||||
let (headers, url) = services
|
||||
.auth_service
|
||||
.login_authorized(&query.code, &query.state)
|
||||
.await
|
||||
.into_response()?;
|
||||
|
||||
Ok((headers, Redirect::to(url.as_str())))
|
||||
}
|
||||
|
||||
pub struct AuthController;
|
||||
|
||||
impl AuthController {
|
||||
pub async fn new_router(
|
||||
_service_register: ServiceRegister,
|
||||
app_state: AppState,
|
||||
) -> anyhow::Result<Router> {
|
||||
Ok(Router::new()
|
||||
.route("/zitadel", get(zitadel_auth))
|
||||
.route("/authorized", get(login_authorized))
|
||||
.with_state(app_state))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct UserFromSession {
|
||||
pub user: User,
|
||||
}
|
||||
|
||||
pub static COOKIE_NAME: &str = "SESSION";
|
||||
|
||||
#[async_trait]
|
||||
impl<S> FromRequestParts<S> for UserFromSession
|
||||
where
|
||||
ServiceRegister: FromRef<S>,
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = (StatusCode, &'static str);
|
||||
|
||||
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let services = ServiceRegister::from_ref(state);
|
||||
|
||||
let cookie: Option<TypedHeader<Cookie>> = parts.extract().await.unwrap();
|
||||
let session_cookie = cookie.as_ref().and_then(|cookie| cookie.get(COOKIE_NAME));
|
||||
if let None = session_cookie {
|
||||
let basic: Option<TypedHeader<Authorization<Basic>>> = parts.extract().await.unwrap();
|
||||
|
||||
if let Some(basic) = basic {
|
||||
let token = services
|
||||
.auth_service
|
||||
.login_token(basic.username(), basic.password())
|
||||
.await
|
||||
.into_response()
|
||||
.map_err(|_| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"could not get token from basic",
|
||||
)
|
||||
})?;
|
||||
|
||||
return Ok(UserFromSession {
|
||||
user: User { id: token },
|
||||
});
|
||||
}
|
||||
|
||||
return Err(anyhow::anyhow!("No session was found"))
|
||||
.into_response()
|
||||
.map_err(|_| (StatusCode::INTERNAL_SERVER_ERROR, "did not find a cookie"))?;
|
||||
}
|
||||
|
||||
let session_cookie = session_cookie.unwrap();
|
||||
|
||||
// continue to decode the session cookie
|
||||
let user = services
|
||||
.auth_service
|
||||
.get_user_from_session(session_cookie)
|
||||
.await
|
||||
.into_response()
|
||||
.map_err(|_| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"failed to decode session cookie",
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(UserFromSession {
|
||||
user: User { id: user.id },
|
||||
})
|
||||
}
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
use super::auth::UserFromSession;
|
||||
use crate::router::AppState;
|
||||
|
||||
use async_graphql::http::{playground_source, GraphQLPlaygroundConfig};
|
||||
use async_graphql::{EmptySubscription, Schema};
|
||||
use async_graphql_axum::{GraphQLRequest, GraphQLResponse};
|
||||
use axum::response::Html;
|
||||
use axum::{http::StatusCode, response::IntoResponse, routing::get, Extension, Router};
|
||||
|
||||
use como_domain::user::ContextUserExt;
|
||||
use como_domain::Context;
|
||||
use como_gql::graphql::{ComoSchema, MutationRoot, QueryRoot};
|
||||
use como_infrastructure::register::ServiceRegister;
|
||||
use tower::ServiceBuilder;
|
||||
|
||||
pub struct GraphQLController;
|
||||
|
||||
impl GraphQLController {
|
||||
pub fn new_router(service_register: ServiceRegister, state: AppState) -> Router {
|
||||
let schema = Schema::build(QueryRoot, MutationRoot, EmptySubscription)
|
||||
.data(service_register)
|
||||
.finish();
|
||||
|
||||
Router::new()
|
||||
.route("/", get(graphql_playground).post(graphql_handler))
|
||||
.layer(ServiceBuilder::new().layer(Extension(schema)))
|
||||
.with_state(state)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn graphql_handler(
|
||||
user: UserFromSession,
|
||||
schema: Extension<ComoSchema>,
|
||||
req: GraphQLRequest,
|
||||
) -> Result<GraphQLResponse, StatusCode> {
|
||||
let req = req.into_inner();
|
||||
let req = req.data(user.user.clone());
|
||||
|
||||
let context = Context::new();
|
||||
let context = context.set_user_id(user.user.id.clone());
|
||||
let req = req.data(context);
|
||||
|
||||
Ok(schema.execute(req).await.into())
|
||||
}
|
||||
|
||||
pub async fn graphql_playground() -> impl IntoResponse {
|
||||
Html(playground_source(GraphQLPlaygroundConfig::new("/graphql")))
|
||||
}
|
@ -1,2 +0,0 @@
|
||||
pub mod auth;
|
||||
pub mod graphql;
|
@ -1,3 +0,0 @@
|
||||
mod controllers;
|
||||
pub mod router;
|
||||
pub mod zitadel;
|
@ -1,73 +0,0 @@
|
||||
use std::env;
|
||||
|
||||
use anyhow::Context;
|
||||
use axum::extract::FromRef;
|
||||
use axum::http::{HeaderValue, Method};
|
||||
use axum::Router;
|
||||
use como_infrastructure::register::ServiceRegister;
|
||||
use tower::ServiceBuilder;
|
||||
use tower_http::{cors::CorsLayer, trace::TraceLayer};
|
||||
|
||||
use crate::controllers::auth::AuthController;
|
||||
use crate::controllers::graphql::GraphQLController;
|
||||
|
||||
pub struct Api;
|
||||
|
||||
impl Api {
|
||||
pub async fn new(
|
||||
port: u32,
|
||||
cors_origin: &str,
|
||||
service_register: ServiceRegister,
|
||||
) -> anyhow::Result<()> {
|
||||
let app_state = AppState {
|
||||
service_register: service_register.clone(),
|
||||
};
|
||||
|
||||
let router = Router::new()
|
||||
.nest(
|
||||
"/auth",
|
||||
AuthController::new_router(service_register.clone(), app_state.clone()).await?,
|
||||
)
|
||||
.nest(
|
||||
"/graphql",
|
||||
GraphQLController::new_router(service_register.clone(), app_state.clone()),
|
||||
)
|
||||
.layer(
|
||||
ServiceBuilder::new()
|
||||
.layer(TraceLayer::new_for_http())
|
||||
.layer(
|
||||
CorsLayer::new()
|
||||
.allow_origin(
|
||||
cors_origin
|
||||
.parse::<HeaderValue>()
|
||||
.context("could not parse cors origin as header")?,
|
||||
)
|
||||
.allow_headers([axum::http::header::CONTENT_TYPE])
|
||||
.allow_methods([Method::GET, Method::POST, Method::OPTIONS])
|
||||
.allow_credentials(true),
|
||||
),
|
||||
);
|
||||
|
||||
let host = env::var("HOST").unwrap_or("0.0.0.0".to_string());
|
||||
|
||||
tracing::info!("running on: {host}:{}", port);
|
||||
|
||||
axum::Server::bind(&format!("{host}:{}", port).parse().unwrap())
|
||||
.serve(router.into_make_service())
|
||||
.await
|
||||
.context("error while starting API")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
service_register: ServiceRegister,
|
||||
}
|
||||
|
||||
impl FromRef<AppState> for ServiceRegister {
|
||||
fn from_ref(input: &AppState) -> Self {
|
||||
input.service_register.clone()
|
||||
}
|
||||
}
|
@ -1,67 +0,0 @@
|
||||
use async_trait::async_trait;
|
||||
use oauth2::{basic::BasicClient, AuthUrl, ClientId, ClientSecret, RedirectUrl, TokenUrl};
|
||||
use std::{env, ops::Deref, sync::Arc};
|
||||
|
||||
#[async_trait]
|
||||
pub trait OAuthClient {
|
||||
async fn get_token(&self) -> anyhow::Result<()>;
|
||||
}
|
||||
|
||||
pub struct OAuth(Arc<dyn OAuthClient + Send + Sync + 'static>);
|
||||
|
||||
impl OAuth {
|
||||
pub fn new_zitadel() -> Self {
|
||||
Self(Arc::new(ZitadelOAuthClient {
|
||||
client: oauth_client(),
|
||||
}))
|
||||
}
|
||||
pub fn new_noop() -> Self {
|
||||
Self(Arc::new(NoopOAuthClient {}))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for OAuth {
|
||||
type Target = Arc<dyn OAuthClient + Send + Sync + 'static>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NoopOAuthClient;
|
||||
|
||||
#[async_trait]
|
||||
impl OAuthClient for NoopOAuthClient {
|
||||
async fn get_token(&self) -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ZitadelOAuthClient {
|
||||
client: BasicClient,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl OAuthClient for ZitadelOAuthClient {
|
||||
async fn get_token(&self) -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn oauth_client() -> BasicClient {
|
||||
let client_id = env::var("CLIENT_ID").expect("Missing CLIENT_ID!");
|
||||
let client_secret = env::var("CLIENT_SECRET").expect("Missing CLIENT_SECRET!");
|
||||
let redirect_url = env::var("REDIRECT_URL").expect("missing REDIRECT_URL");
|
||||
|
||||
let auth_url = env::var("AUTH_URL").expect("missing AUTH_URL");
|
||||
|
||||
let token_url = env::var("TOKEN_URL").expect("missing TOKEN_URL");
|
||||
|
||||
BasicClient::new(
|
||||
ClientId::new(client_id),
|
||||
Some(ClientSecret::new(client_secret)),
|
||||
AuthUrl::new(auth_url).unwrap(),
|
||||
Some(TokenUrl::new(token_url).unwrap()),
|
||||
)
|
||||
.set_redirect_uri(RedirectUrl::new(redirect_url).unwrap())
|
||||
}
|
@ -1 +0,0 @@
|
||||
|
@ -1,30 +0,0 @@
|
||||
[package]
|
||||
name = "como_auth"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
clap.workspace = true
|
||||
async-trait.workspace = true
|
||||
axum.workspace = true
|
||||
axum-extra.workspace = true
|
||||
axum-sessions.workspace = true
|
||||
serde.workspace = true
|
||||
uuid.workspace = true
|
||||
sqlx.workspace = true
|
||||
anyhow.workspace = true
|
||||
tracing.workspace = true
|
||||
async-sqlx-session.workspace = true
|
||||
|
||||
zitadel = { version = "3.3.1", features = ["axum"] }
|
||||
tower = "0.4.13"
|
||||
tower-http = { version = "0.4.0", features = ["cors", "trace"] }
|
||||
oauth2 = "4.4.0"
|
||||
openidconnect = "3.0.0"
|
||||
|
||||
[dev-dependencies]
|
||||
tokio.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
sealed_test.workspace = true
|
@ -1,128 +0,0 @@
|
||||
use std::{ops::Deref, sync::Arc};
|
||||
|
||||
use anyhow::Context;
|
||||
use async_trait::async_trait;
|
||||
use axum::http::{header::SET_COOKIE, HeaderMap};
|
||||
use oauth2::url::Url;
|
||||
|
||||
use crate::{
|
||||
introspection::IntrospectionService,
|
||||
oauth::{OAuth, ZitadelConfig},
|
||||
session::{SessionService, User},
|
||||
AuthClap, AuthEngine,
|
||||
};
|
||||
|
||||
#[async_trait]
|
||||
pub trait Auth {
|
||||
async fn login(&self) -> anyhow::Result<Url>;
|
||||
async fn login_token(&self, user: &str, password: &str) -> anyhow::Result<String>;
|
||||
async fn login_authorized(&self, code: &str, state: &str) -> anyhow::Result<(HeaderMap, Url)>;
|
||||
async fn get_user_from_session(&self, cookie: &str) -> anyhow::Result<User>;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AuthService(Arc<dyn Auth + Send + Sync + 'static>);
|
||||
|
||||
impl AuthService {
|
||||
pub async fn new(config: &AuthClap, session: SessionService) -> anyhow::Result<Self> {
|
||||
match config.engine {
|
||||
AuthEngine::Noop => Ok(Self::new_noop()),
|
||||
AuthEngine::Zitadel => {
|
||||
let oauth: OAuth = ZitadelConfig::try_from(config.zitadel.clone())?.into();
|
||||
let introspection: IntrospectionService =
|
||||
IntrospectionService::new_zitadel(config).await?;
|
||||
|
||||
Ok(Self::new_zitadel(oauth, introspection, session))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_zitadel(
|
||||
oauth: OAuth,
|
||||
introspection: IntrospectionService,
|
||||
session: SessionService,
|
||||
) -> Self {
|
||||
Self(Arc::new(ZitadelAuthService {
|
||||
oauth,
|
||||
introspection,
|
||||
session,
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn new_noop() -> Self {
|
||||
Self(Arc::new(NoopAuthService {}))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for AuthService {
|
||||
type Target = Arc<dyn Auth + Send + Sync + 'static>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ZitadelAuthService {
|
||||
oauth: OAuth,
|
||||
introspection: IntrospectionService,
|
||||
session: SessionService,
|
||||
}
|
||||
pub static COOKIE_NAME: &str = "SESSION";
|
||||
|
||||
#[async_trait]
|
||||
impl Auth for ZitadelAuthService {
|
||||
async fn login(&self) -> anyhow::Result<Url> {
|
||||
let authorize_url = self.oauth.authorize_url().await?;
|
||||
|
||||
Ok(authorize_url)
|
||||
}
|
||||
async fn login_authorized(&self, code: &str, _state: &str) -> anyhow::Result<(HeaderMap, Url)> {
|
||||
let token = self.oauth.exchange(code).await?;
|
||||
let user_id = self.introspection.get_id_token(token.as_str()).await?;
|
||||
let cookie_value = self.session.insert_user("user", user_id.as_str()).await?;
|
||||
|
||||
let cookie = format!("{}={}; SameSite=Lax; Path=/", COOKIE_NAME, cookie_value);
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(SET_COOKIE, cookie.parse().unwrap());
|
||||
|
||||
Ok((
|
||||
headers,
|
||||
Url::parse("http://localhost:3000/dash/home")
|
||||
.context("failed to parse login_authorized zitadel return url")?,
|
||||
))
|
||||
}
|
||||
async fn login_token(&self, _user: &str, password: &str) -> anyhow::Result<String> {
|
||||
self.introspection.get_id_token(password).await
|
||||
}
|
||||
async fn get_user_from_session(&self, cookie: &str) -> anyhow::Result<User> {
|
||||
match self.session.get_user(cookie).await? {
|
||||
Some(u) => Ok(User { id: u }),
|
||||
None => Err(anyhow::anyhow!("failed to find user")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NoopAuthService {}
|
||||
|
||||
#[async_trait]
|
||||
impl Auth for NoopAuthService {
|
||||
async fn login(&self) -> anyhow::Result<Url> {
|
||||
todo!()
|
||||
}
|
||||
async fn login_authorized(
|
||||
&self,
|
||||
_code: &str,
|
||||
_state: &str,
|
||||
) -> anyhow::Result<(HeaderMap, Url)> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn login_token(&self, _user: &str, _password: &str) -> anyhow::Result<String> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn get_user_from_session(&self, _cookie: &str) -> anyhow::Result<User> {
|
||||
todo!()
|
||||
}
|
||||
}
|
@ -1,159 +0,0 @@
|
||||
use std::{ops::Deref, sync::Arc};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use axum::extract::FromRef;
|
||||
use oauth2::TokenIntrospectionResponse;
|
||||
use openidconnect::IntrospectionUrl;
|
||||
use zitadel::{
|
||||
axum::introspection::IntrospectionStateBuilderError,
|
||||
credentials::Application,
|
||||
oidc::{
|
||||
discovery::discover,
|
||||
introspection::{introspect, AuthorityAuthentication},
|
||||
},
|
||||
};
|
||||
|
||||
use crate::AuthClap;
|
||||
|
||||
#[async_trait]
|
||||
pub trait Introspection {
|
||||
async fn get_user(&self) -> anyhow::Result<()>;
|
||||
async fn get_id_token(&self, token: &str) -> anyhow::Result<String>;
|
||||
}
|
||||
|
||||
pub struct IntrospectionService(Arc<dyn Introspection + Send + Sync + 'static>);
|
||||
impl IntrospectionService {
|
||||
pub async fn new_zitadel(config: &AuthClap) -> anyhow::Result<Self> {
|
||||
let res = IntrospectionStateBuilder::new(&config.zitadel.authority_url.clone().unwrap())
|
||||
.with_basic_auth(
|
||||
&config.zitadel.client_id.clone().unwrap(),
|
||||
&config.zitadel.client_secret.clone().unwrap(),
|
||||
)
|
||||
.build()
|
||||
.await?;
|
||||
|
||||
Ok(IntrospectionService(Arc::new(ZitadelIntrospection::new(
|
||||
res,
|
||||
))))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for IntrospectionService {
|
||||
type Target = Arc<dyn Introspection + Send + Sync + 'static>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ZitadelIntrospection {
|
||||
state: IntrospectionState,
|
||||
}
|
||||
|
||||
impl ZitadelIntrospection {
|
||||
pub fn new(state: IntrospectionState) -> Self {
|
||||
Self { state }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Introspection for ZitadelIntrospection {
|
||||
async fn get_user(&self) -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
async fn get_id_token(&self, token: &str) -> anyhow::Result<String> {
|
||||
let config = &self.state.config;
|
||||
let res = introspect(
|
||||
&config.introspection_uri,
|
||||
&config.authority,
|
||||
&config.authentication,
|
||||
token,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(res
|
||||
.sub()
|
||||
.ok_or(anyhow::anyhow!("could not find a userid (sub) in token"))?
|
||||
.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct IntrospectionState {
|
||||
pub(crate) config: IntrospectionConfig,
|
||||
}
|
||||
|
||||
/// Configuration that must be inject into the axum application state. Used by the
|
||||
/// [IntrospectionStateBuilder](super::IntrospectionStateBuilder). This struct is also used to create the [IntrospectionState](IntrospectionState)
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct IntrospectionConfig {
|
||||
pub authority: String,
|
||||
pub authentication: AuthorityAuthentication,
|
||||
pub introspection_uri: IntrospectionUrl,
|
||||
}
|
||||
|
||||
impl FromRef<IntrospectionState> for IntrospectionConfig {
|
||||
fn from_ref(input: &IntrospectionState) -> Self {
|
||||
input.config.clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IntrospectionStateBuilder {
|
||||
authority: String,
|
||||
authentication: Option<AuthorityAuthentication>,
|
||||
}
|
||||
|
||||
/// Builder for [IntrospectionConfig]
|
||||
impl IntrospectionStateBuilder {
|
||||
pub fn new(authority: &str) -> Self {
|
||||
Self {
|
||||
authority: authority.to_string(),
|
||||
authentication: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_basic_auth(
|
||||
&mut self,
|
||||
client_id: &str,
|
||||
client_secret: &str,
|
||||
) -> &mut IntrospectionStateBuilder {
|
||||
self.authentication = Some(AuthorityAuthentication::Basic {
|
||||
client_id: client_id.to_string(),
|
||||
client_secret: client_secret.to_string(),
|
||||
});
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn with_jwt_profile(&mut self, application: Application) -> &mut IntrospectionStateBuilder {
|
||||
self.authentication = Some(AuthorityAuthentication::JWTProfile { application });
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn build(&mut self) -> Result<IntrospectionState, IntrospectionStateBuilderError> {
|
||||
let authentication = self
|
||||
.authentication
|
||||
.clone()
|
||||
.ok_or(IntrospectionStateBuilderError::NoAuthSchema)?;
|
||||
|
||||
let metadata = discover(&self.authority)
|
||||
.await
|
||||
.map_err(|source| IntrospectionStateBuilderError::Discovery { source })?;
|
||||
|
||||
let introspection_uri = metadata
|
||||
.additional_metadata()
|
||||
.introspection_endpoint
|
||||
.clone()
|
||||
.ok_or(IntrospectionStateBuilderError::NoIntrospectionUrl)?;
|
||||
|
||||
Ok(IntrospectionState {
|
||||
config: IntrospectionConfig {
|
||||
authority: self.authority.clone(),
|
||||
introspection_uri: introspection_uri,
|
||||
authentication: authentication,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
@ -1,242 +0,0 @@
|
||||
use oauth::{OAuth, ZitadelConfig};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
mod auth;
|
||||
mod introspection;
|
||||
mod oauth;
|
||||
mod session;
|
||||
|
||||
pub use auth::{Auth, AuthService};
|
||||
use session::SessionClap;
|
||||
pub use session::SessionService;
|
||||
|
||||
#[derive(clap::ValueEnum, Clone, PartialEq, Eq, Debug)]
|
||||
pub enum AuthEngine {
|
||||
Noop,
|
||||
Zitadel,
|
||||
}
|
||||
|
||||
#[derive(clap::ValueEnum, Clone, PartialEq, Eq, Debug)]
|
||||
pub enum SessionBackend {
|
||||
InMemory,
|
||||
Postgresql,
|
||||
}
|
||||
|
||||
#[derive(clap::Args, Clone, PartialEq, Eq, Debug)]
|
||||
pub struct AuthClap {
|
||||
#[arg(
|
||||
env = "AUTH_ENGINE",
|
||||
long = "auth-engine",
|
||||
requires_ifs = [
|
||||
( "zitadel", "ZitadelClap" )
|
||||
],
|
||||
default_value = "noop" )
|
||||
]
|
||||
pub engine: AuthEngine,
|
||||
|
||||
#[arg(
|
||||
env = "SESSION_BACKEND",
|
||||
long = "session-backend",
|
||||
requires_ifs = [
|
||||
( "postgresql", "PostgresqlSessionClap" )
|
||||
],
|
||||
default_value = "in-memory" )
|
||||
]
|
||||
pub session_backend: SessionBackend,
|
||||
|
||||
#[clap(flatten)]
|
||||
pub zitadel: ZitadelClap,
|
||||
|
||||
#[clap(flatten)]
|
||||
pub session: SessionClap,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct AuthConfigFile {
|
||||
zitadel: Option<ZitadelClap>,
|
||||
}
|
||||
|
||||
#[derive(clap::Args, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[group(requires_all = ["auth_url", "client_id", "client_secret", "redirect_url", "token_url", "authority_url"])]
|
||||
pub struct ZitadelClap {
|
||||
#[arg(env = "ZITADEL_AUTH_URL", long = "zitadel-auth-url")]
|
||||
pub auth_url: Option<String>,
|
||||
|
||||
#[arg(env = "ZITADEL_CLIENT_ID", long = "zitadel-client-id")]
|
||||
pub client_id: Option<String>,
|
||||
|
||||
#[arg(env = "ZITADEL_CLIENT_SECRET", long = "zitadel-client-secret")]
|
||||
pub client_secret: Option<String>,
|
||||
|
||||
#[arg(env = "ZITADEL_REDIRECT_URL", long = "zitadel-redirect-url")]
|
||||
pub redirect_url: Option<String>,
|
||||
|
||||
#[arg(env = "ZITADEL_AUTHORITY_URL", long = "zitadel-authority-url")]
|
||||
pub authority_url: Option<String>,
|
||||
|
||||
#[arg(env = "ZITADEL_TOKEN_URL", long = "zitadel-token-url")]
|
||||
pub token_url: Option<String>,
|
||||
}
|
||||
|
||||
impl TryFrom<AuthClap> for OAuth {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(value: AuthClap) -> Result<Self, Self::Error> {
|
||||
match value.engine {
|
||||
AuthEngine::Noop => Ok(OAuth::new_noop()),
|
||||
AuthEngine::Zitadel => Ok(OAuth::from(ZitadelConfig::try_from(value.zitadel)?)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthClap {
|
||||
pub fn merge(&mut self, config: AuthConfigFile) -> &mut Self {
|
||||
if let Some(zitadel) = config.zitadel {
|
||||
if let Some(auth_url) = zitadel.auth_url {
|
||||
if let Some(_) = self.zitadel.auth_url {
|
||||
_ = self.zitadel.auth_url.replace(auth_url);
|
||||
}
|
||||
}
|
||||
if let Some(client_id) = zitadel.client_id {
|
||||
if let Some(_) = self.zitadel.client_id {
|
||||
_ = self.zitadel.client_id.replace(client_id);
|
||||
}
|
||||
}
|
||||
if let Some(client_secret) = zitadel.client_secret {
|
||||
if let Some(_) = self.zitadel.client_secret {
|
||||
_ = self.zitadel.client_secret.replace(client_secret);
|
||||
}
|
||||
}
|
||||
if let Some(redirect_url) = zitadel.redirect_url {
|
||||
if let Some(_) = self.zitadel.redirect_url {
|
||||
_ = self.zitadel.redirect_url.replace(redirect_url);
|
||||
}
|
||||
}
|
||||
if let Some(authority_url) = zitadel.authority_url {
|
||||
if let Some(_) = self.zitadel.authority_url {
|
||||
_ = self.zitadel.authority_url.replace(authority_url);
|
||||
}
|
||||
}
|
||||
if let Some(token_url) = zitadel.token_url {
|
||||
if let Some(_) = self.zitadel.token_url {
|
||||
_ = self.zitadel.token_url.replace(token_url);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::{
|
||||
session::{PostgresqlSessionClap, SessionClap},
|
||||
AuthClap, AuthEngine, SessionBackend, ZitadelClap,
|
||||
};
|
||||
use clap::Parser;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
pub struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(clap::Subcommand, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum Commands {
|
||||
One {
|
||||
#[clap(flatten)]
|
||||
options: AuthClap,
|
||||
},
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_command_parse_as_default_noop() {
|
||||
let cli: Cli = Cli::parse_from(&["base", "one"]);
|
||||
|
||||
assert_eq!(
|
||||
cli.command,
|
||||
Commands::One {
|
||||
options: AuthClap {
|
||||
engine: AuthEngine::Noop,
|
||||
zitadel: ZitadelClap {
|
||||
auth_url: None,
|
||||
client_id: None,
|
||||
client_secret: None,
|
||||
redirect_url: None,
|
||||
token_url: None,
|
||||
authority_url: None,
|
||||
},
|
||||
session_backend: SessionBackend::InMemory,
|
||||
session: SessionClap {
|
||||
postgresql: PostgresqlSessionClap { conn: None }
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_command_parse_as_noop() {
|
||||
let cli: Cli = Cli::parse_from(&["base", "one", "--auth-engine", "noop"]);
|
||||
|
||||
assert_eq!(
|
||||
cli.command,
|
||||
Commands::One {
|
||||
options: AuthClap {
|
||||
engine: AuthEngine::Noop,
|
||||
zitadel: ZitadelClap {
|
||||
auth_url: None,
|
||||
client_id: None,
|
||||
client_secret: None,
|
||||
redirect_url: None,
|
||||
token_url: None,
|
||||
authority_url: None,
|
||||
},
|
||||
session_backend: crate::SessionBackend::InMemory,
|
||||
session: crate::SessionClap {
|
||||
postgresql: PostgresqlSessionClap { conn: None }
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_command_parse_as_zitadel() {
|
||||
let cli: Cli = Cli::parse_from(&[
|
||||
"base",
|
||||
"one",
|
||||
"--auth-engine=zitadel",
|
||||
"--zitadel-client-id=something",
|
||||
"--zitadel-client-secret=something",
|
||||
"--zitadel-auth-url=https://something",
|
||||
"--zitadel-redirect-url=https://something",
|
||||
"--zitadel-token-url=https://something",
|
||||
"--zitadel-authority-url=https://something",
|
||||
]);
|
||||
|
||||
assert_eq!(
|
||||
cli.command,
|
||||
Commands::One {
|
||||
options: AuthClap {
|
||||
engine: AuthEngine::Zitadel,
|
||||
zitadel: ZitadelClap {
|
||||
auth_url: Some("https://something".into()),
|
||||
client_id: Some("something".into()),
|
||||
client_secret: Some("something".into()),
|
||||
redirect_url: Some("https://something".into()),
|
||||
token_url: Some("https://something".into()),
|
||||
authority_url: Some("https://something".into()),
|
||||
},
|
||||
session_backend: crate::SessionBackend::InMemory,
|
||||
session: crate::SessionClap {
|
||||
postgresql: PostgresqlSessionClap { conn: None }
|
||||
}
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
@ -1,288 +0,0 @@
|
||||
use async_trait::async_trait;
|
||||
use oauth2::reqwest::async_http_client;
|
||||
use oauth2::url::Url;
|
||||
use oauth2::{basic::BasicClient, AuthUrl, ClientId, ClientSecret, RedirectUrl, TokenUrl};
|
||||
use oauth2::{AuthorizationCode, CsrfToken, Scope, TokenResponse};
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::ZitadelClap;
|
||||
|
||||
#[async_trait]
|
||||
pub trait OAuthClient {
|
||||
async fn get_token(&self) -> anyhow::Result<()>;
|
||||
async fn authorize_url(&self) -> anyhow::Result<Url>;
|
||||
async fn exchange(&self, code: &str) -> anyhow::Result<String>;
|
||||
}
|
||||
|
||||
pub struct OAuth(Arc<dyn OAuthClient + Send + Sync + 'static>);
|
||||
|
||||
impl OAuth {
|
||||
pub fn new_zitadel(config: ZitadelConfig) -> Self {
|
||||
Self(Arc::new(ZitadelOAuthClient::from(config)))
|
||||
}
|
||||
pub fn new_noop() -> Self {
|
||||
Self(Arc::new(NoopOAuthClient {}))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for OAuth {
|
||||
type Target = Arc<dyn OAuthClient + Send + Sync + 'static>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ZitadelConfig> for OAuth {
|
||||
fn from(value: ZitadelConfig) -> Self {
|
||||
Self::new_zitadel(value)
|
||||
}
|
||||
}
|
||||
|
||||
// -- Noop
|
||||
#[derive(clap::Args, Clone)]
|
||||
pub struct NoopOAuthClient;
|
||||
#[async_trait]
|
||||
impl OAuthClient for NoopOAuthClient {
|
||||
async fn get_token(&self) -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
async fn authorize_url(&self) -> anyhow::Result<Url> {
|
||||
Ok(Url::parse("http://localhost:3000/auth/zitadel").unwrap())
|
||||
}
|
||||
|
||||
async fn exchange(&self, _code: &str) -> anyhow::Result<String> {
|
||||
Ok(String::new())
|
||||
}
|
||||
}
|
||||
|
||||
// -- Zitadel
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ZitadelConfig {
|
||||
auth_url: String,
|
||||
client_id: String,
|
||||
client_secret: String,
|
||||
redirect_url: String,
|
||||
token_url: String,
|
||||
authority_url: String,
|
||||
}
|
||||
|
||||
pub struct ZitadelOAuthClient {
|
||||
client: BasicClient,
|
||||
}
|
||||
|
||||
impl ZitadelOAuthClient {
|
||||
pub fn new(
|
||||
client_id: impl Into<String>,
|
||||
client_secret: impl Into<String>,
|
||||
redirect_url: impl Into<String>,
|
||||
auth_url: impl Into<String>,
|
||||
token_url: impl Into<String>,
|
||||
authority_url: impl Into<String>,
|
||||
) -> Self {
|
||||
Self {
|
||||
client: Self::oauth_client(ZitadelConfig {
|
||||
client_id: client_id.into(),
|
||||
client_secret: client_secret.into(),
|
||||
redirect_url: redirect_url.into(),
|
||||
auth_url: auth_url.into(),
|
||||
token_url: token_url.into(),
|
||||
authority_url: authority_url.into(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn oauth_client(config: ZitadelConfig) -> BasicClient {
|
||||
BasicClient::new(
|
||||
ClientId::new(config.client_id),
|
||||
Some(ClientSecret::new(config.client_secret)),
|
||||
AuthUrl::new(config.auth_url).unwrap(),
|
||||
Some(TokenUrl::new(config.token_url).unwrap()),
|
||||
)
|
||||
.set_redirect_uri(RedirectUrl::new(config.redirect_url).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ZitadelConfig> for ZitadelOAuthClient {
|
||||
fn from(value: ZitadelConfig) -> Self {
|
||||
Self::new(
|
||||
value.client_id,
|
||||
value.client_secret,
|
||||
value.redirect_url,
|
||||
value.auth_url,
|
||||
value.token_url,
|
||||
value.authority_url,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<ZitadelClap> for ZitadelConfig {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(value: ZitadelClap) -> Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
auth_url: value
|
||||
.auth_url
|
||||
.ok_or(anyhow::anyhow!("auth_url was not set"))?,
|
||||
client_id: value
|
||||
.client_id
|
||||
.ok_or(anyhow::anyhow!("client_id was not set"))?,
|
||||
client_secret: value
|
||||
.client_secret
|
||||
.ok_or(anyhow::anyhow!("client_secret was not set"))?,
|
||||
redirect_url: value
|
||||
.redirect_url
|
||||
.ok_or(anyhow::anyhow!("redirect_url was not set"))?,
|
||||
token_url: value
|
||||
.token_url
|
||||
.ok_or(anyhow::anyhow!("token_url was not set"))?,
|
||||
authority_url: value
|
||||
.authority_url
|
||||
.ok_or(anyhow::anyhow!("authority_url was not set"))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl OAuthClient for ZitadelOAuthClient {
|
||||
async fn get_token(&self) -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
async fn authorize_url(&self) -> anyhow::Result<Url> {
|
||||
let (auth_url, _csrf_token) = self
|
||||
.client
|
||||
.authorize_url(CsrfToken::new_random)
|
||||
.add_scope(Scope::new("identify".to_string()))
|
||||
.add_scope(Scope::new("openid".to_string()))
|
||||
.url();
|
||||
|
||||
Ok(auth_url)
|
||||
}
|
||||
|
||||
async fn exchange(&self, code: &str) -> anyhow::Result<String> {
|
||||
let token = self
|
||||
.client
|
||||
.exchange_code(AuthorizationCode::new(code.to_string()))
|
||||
.request_async(async_http_client)
|
||||
.await?;
|
||||
|
||||
Ok(token.access_token().secret().clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::ZitadelClap;
|
||||
use clap::Parser;
|
||||
use sealed_test::prelude::*;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
pub struct Cli {
|
||||
#[clap(flatten)]
|
||||
options: ZitadelClap,
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
pub struct CliSubCommand {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(clap::Subcommand, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum Commands {
|
||||
One {
|
||||
#[clap(flatten)]
|
||||
options: ZitadelClap,
|
||||
},
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_parse_clap_zitadel() {
|
||||
let cli: Cli = Cli::parse_from(&[
|
||||
"base",
|
||||
"--zitadel-client-id=something",
|
||||
"--zitadel-client-secret=something",
|
||||
"--zitadel-auth-url=https://something",
|
||||
"--zitadel-redirect-url=https://something",
|
||||
"--zitadel-token-url=https://something",
|
||||
"--zitadel-authority-url=https://something",
|
||||
]);
|
||||
println!("{:?}", cli.options);
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
cli.options,
|
||||
ZitadelClap {
|
||||
auth_url: Some("https://something".into()),
|
||||
client_id: Some("something".into()),
|
||||
client_secret: Some("something".into()),
|
||||
redirect_url: Some("https://something".into()),
|
||||
token_url: Some("https://something".into()),
|
||||
authority_url: Some("https://something".into()),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_clap_zitadel_fails_require_all() {
|
||||
let cli = CliSubCommand::try_parse_from(&[
|
||||
"base",
|
||||
"one",
|
||||
// "--zitadel-client-id=something", // We want to trigger missing variable
|
||||
"--zitadel-client-secret=something",
|
||||
"--zitadel-auth-url=https://something",
|
||||
"--zitadel-redirect-url=https://something",
|
||||
"--zitadel-token-url=https://something",
|
||||
"--zitadel-authority-url=https://something",
|
||||
]);
|
||||
|
||||
pretty_assertions::assert_eq!(cli.is_err(), true);
|
||||
}
|
||||
|
||||
#[sealed_test]
|
||||
fn test_parse_clap_env_zitadel() {
|
||||
std::env::set_var("ZITADEL_CLIENT_ID", "something");
|
||||
std::env::set_var("ZITADEL_CLIENT_SECRET", "something");
|
||||
std::env::set_var("ZITADEL_AUTH_URL", "https://something");
|
||||
std::env::set_var("ZITADEL_REDIRECT_URL", "https://something");
|
||||
std::env::set_var("ZITADEL_TOKEN_URL", "https://something");
|
||||
std::env::set_var("ZITADEL_AUTHORITY_URL", "https://something");
|
||||
|
||||
let cli = CliSubCommand::parse_from(&["base", "one"]);
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
cli.command,
|
||||
Commands::One {
|
||||
options: ZitadelClap {
|
||||
auth_url: Some("https://something".into()),
|
||||
client_id: Some("something".into()),
|
||||
client_secret: Some("something".into()),
|
||||
redirect_url: Some("https://something".into()),
|
||||
token_url: Some("https://something".into()),
|
||||
authority_url: Some("https://something".into()),
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn test_parse_clap_defaults_to_noop() {
|
||||
let cli = CliSubCommand::parse_from(&["base", "one"]);
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
cli.command,
|
||||
Commands::One {
|
||||
options: ZitadelClap {
|
||||
auth_url: None,
|
||||
client_id: None,
|
||||
client_secret: None,
|
||||
redirect_url: None,
|
||||
token_url: None,
|
||||
authority_url: None,
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
@ -1,120 +0,0 @@
|
||||
use std::{ops::Deref, sync::Arc};
|
||||
|
||||
use async_sqlx_session::PostgresSessionStore;
|
||||
use async_trait::async_trait;
|
||||
use axum_sessions::async_session::{Session as AxumSession, SessionStore as AxumSessionStore};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{AuthClap, SessionBackend};
|
||||
|
||||
#[derive(clap::Args, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct SessionClap {
|
||||
#[clap(flatten)]
|
||||
pub postgresql: PostgresqlSessionClap,
|
||||
}
|
||||
|
||||
#[derive(clap::Args, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct PostgresqlSessionClap {
|
||||
#[arg(env = "SESSION_POSTGRES_CONN", long = "session-postgres-conn")]
|
||||
pub conn: Option<String>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait Session {
|
||||
async fn insert_user(&self, id: &str, user_id: &str) -> anyhow::Result<String>;
|
||||
async fn get_user(&self, cookie: &str) -> anyhow::Result<Option<String>>;
|
||||
}
|
||||
|
||||
pub struct SessionService(Arc<dyn Session + Send + Sync + 'static>);
|
||||
impl SessionService {
|
||||
pub async fn new(config: &AuthClap) -> anyhow::Result<Self> {
|
||||
match config.session_backend {
|
||||
SessionBackend::InMemory => Ok(Self(Arc::new(InMemorySessionService {}))),
|
||||
SessionBackend::Postgresql => {
|
||||
let postgres_session = PostgresSessionStore::new(
|
||||
config
|
||||
.session
|
||||
.postgresql
|
||||
.conn
|
||||
.as_ref()
|
||||
.expect("SESSION_POSTGRES_CONN to be set"),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(Self(Arc::new(PostgresSessionService {
|
||||
store: postgres_session,
|
||||
})))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for SessionService {
|
||||
type Target = Arc<dyn Session + Send + Sync + 'static>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PostgresSessionService {
|
||||
store: PostgresSessionStore,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct User {
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Session for PostgresSessionService {
|
||||
async fn insert_user(&self, _id: &str, user_id: &str) -> anyhow::Result<String> {
|
||||
let mut session = AxumSession::new();
|
||||
session.insert(
|
||||
"user",
|
||||
User {
|
||||
id: user_id.to_string(),
|
||||
},
|
||||
)?;
|
||||
|
||||
let cookie = self
|
||||
.store
|
||||
.store_session(session)
|
||||
.await?
|
||||
.ok_or(anyhow::anyhow!("failed to store session"))?;
|
||||
|
||||
Ok(cookie)
|
||||
}
|
||||
async fn get_user(&self, cookie: &str) -> anyhow::Result<Option<String>> {
|
||||
if let Some(session) = self.store.load_session(cookie.to_string()).await.unwrap() {
|
||||
if let Some(user) = session.get::<User>("user") {
|
||||
tracing::debug!(
|
||||
"UserFromSession: session decoded success, user_id={:?}",
|
||||
user.id
|
||||
);
|
||||
Ok(Some(user.id))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"UserIdFromSession: err session not exists in store, {}",
|
||||
cookie
|
||||
);
|
||||
Err(anyhow::anyhow!("No session found for cookie"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct InMemorySessionService {}
|
||||
|
||||
#[async_trait]
|
||||
impl Session for InMemorySessionService {
|
||||
async fn insert_user(&self, _id: &str, _user_id: &str) -> anyhow::Result<String> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn get_user(&self, _cookie: &str) -> anyhow::Result<Option<String>> {
|
||||
todo!()
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
[package]
|
||||
name = "como_bin"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
como_gql.workspace = true
|
||||
como_core.workspace = true
|
||||
como_domain.workspace = true
|
||||
como_infrastructure.workspace = true
|
||||
como_api.workspace = true
|
||||
|
||||
|
||||
axum.workspace = true
|
||||
serde_json.workspace = true
|
||||
tokio.workspace = true
|
||||
anyhow.workspace = true
|
||||
dotenv.workspace = true
|
||||
tracing.workspace = true
|
||||
tracing-subscriber.workspace = true
|
||||
clap.workspace = true
|
@ -1,22 +0,0 @@
|
||||
use axum::{http::StatusCode, response::IntoResponse, Json};
|
||||
use serde_json::json;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum AppError {
|
||||
WrongCredentials,
|
||||
InternalServerError,
|
||||
}
|
||||
|
||||
impl IntoResponse for AppError {
|
||||
fn into_response(self) -> axum::response::Response {
|
||||
let (status, err_msg) = match self {
|
||||
Self::WrongCredentials => (StatusCode::BAD_REQUEST, "invalid credentials"),
|
||||
Self::InternalServerError => (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"something went wrong with your request",
|
||||
),
|
||||
};
|
||||
(status, Json(json!({ "error": err_msg }))).into_response()
|
||||
}
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
mod error;
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
use anyhow::Context;
|
||||
|
||||
use como_api::router::Api;
|
||||
use como_infrastructure::{
|
||||
configs::AppConfig, database::ConnectionPoolManager, register::ServiceRegister,
|
||||
};
|
||||
|
||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
tracing::info!("Loading dotenv");
|
||||
dotenv::dotenv()?;
|
||||
|
||||
let config = Arc::new(AppConfig::parse());
|
||||
|
||||
tracing_subscriber::registry()
|
||||
.with(tracing_subscriber::EnvFilter::new(&config.rust_log))
|
||||
.with(tracing_subscriber::fmt::layer())
|
||||
.init();
|
||||
|
||||
let pool = ConnectionPoolManager::new_pool(&config.database_url, true).await?;
|
||||
|
||||
let service_register = ServiceRegister::new(pool, config.clone()).await?;
|
||||
|
||||
Api::new(
|
||||
config.api_port,
|
||||
&config.cors_origin,
|
||||
service_register.clone(),
|
||||
)
|
||||
.await
|
||||
.context("could not initialize API")?;
|
||||
|
||||
Ok(())
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
[package]
|
||||
name = "como_core"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
como_domain.workspace = true
|
||||
|
||||
async-trait.workspace = true
|
||||
anyhow.workspace = true
|
@ -1,5 +0,0 @@
|
||||
{
|
||||
"version": 0,
|
||||
"root_name": "Workspace",
|
||||
"workspace_crates": {}
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use como_domain::{
|
||||
item::{
|
||||
queries::{GetItemQuery, GetItemsQuery},
|
||||
requests::{CreateItemDto, UpdateItemDto},
|
||||
responses::CreatedItemDto,
|
||||
ItemDto,
|
||||
},
|
||||
Context,
|
||||
};
|
||||
|
||||
pub type DynItemService = Arc<dyn ItemService + Send + Sync>;
|
||||
|
||||
#[async_trait]
|
||||
pub trait ItemService {
|
||||
async fn add_item(
|
||||
&self,
|
||||
context: &Context,
|
||||
item: CreateItemDto,
|
||||
) -> anyhow::Result<CreatedItemDto>;
|
||||
async fn get_item(&self, context: &Context, query: GetItemQuery) -> anyhow::Result<ItemDto>;
|
||||
async fn get_items(
|
||||
&self,
|
||||
context: &Context,
|
||||
query: GetItemsQuery,
|
||||
) -> anyhow::Result<Vec<ItemDto>>;
|
||||
|
||||
async fn update_item(&self, context: &Context, item: UpdateItemDto) -> anyhow::Result<ItemDto>;
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
pub mod items;
|
||||
pub mod projects;
|
||||
pub mod users;
|
@ -1,24 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use como_domain::{
|
||||
projects::{mutation::CreateProjectMutation, queries::GetProjectQuery, ProjectDto},
|
||||
Context,
|
||||
};
|
||||
|
||||
pub type DynProjectService = Arc<dyn ProjectService + Send + Sync>;
|
||||
|
||||
#[async_trait]
|
||||
pub trait ProjectService {
|
||||
async fn get_project(
|
||||
&self,
|
||||
context: &Context,
|
||||
query: GetProjectQuery,
|
||||
) -> anyhow::Result<ProjectDto>;
|
||||
async fn get_projects(&self, context: &Context) -> anyhow::Result<Vec<ProjectDto>>;
|
||||
async fn create_project(
|
||||
&self,
|
||||
context: &Context,
|
||||
name: CreateProjectMutation,
|
||||
) -> anyhow::Result<ProjectDto>;
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use como_domain::Context;
|
||||
|
||||
pub type DynUserService = Arc<dyn UserService + Send + Sync>;
|
||||
|
||||
#[async_trait]
|
||||
pub trait UserService {
|
||||
async fn add_user(
|
||||
&self,
|
||||
context: &Context,
|
||||
username: String,
|
||||
password: String,
|
||||
) -> anyhow::Result<String>;
|
||||
async fn validate_user(
|
||||
&self,
|
||||
context: &Context,
|
||||
username: String,
|
||||
password: String,
|
||||
) -> anyhow::Result<Option<String>>;
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
[package]
|
||||
name = "como_domain"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
async-graphql.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
uuid.workspace = true
|
@ -1,37 +0,0 @@
|
||||
pub mod user;
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Context {
|
||||
values: BTreeMap<String, String>,
|
||||
}
|
||||
|
||||
impl Context {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
values: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_value(&self, key: impl Into<String>, value: impl Into<String>) -> Self {
|
||||
let mut values = self.values.clone();
|
||||
|
||||
let _ = values.insert(key.into(), value.into());
|
||||
|
||||
Self { values }
|
||||
}
|
||||
|
||||
pub fn with_value_mut(
|
||||
&mut self,
|
||||
key: impl Into<String>,
|
||||
value: impl Into<String>,
|
||||
) -> &mut Self {
|
||||
self.values.insert(key.into(), value.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn get(&self, key: impl AsRef<str>) -> Option<&str> {
|
||||
self.values.get(key.as_ref()).map(|s| s.as_str())
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
use crate::Context;
|
||||
|
||||
pub trait ContextUserExt {
|
||||
fn set_user_id(&self, user_id: impl Into<String>) -> Context;
|
||||
fn set_user_id_mut(&mut self, user_id: impl Into<String>) -> &mut Context;
|
||||
fn get_user_id(&self) -> Option<String>;
|
||||
}
|
||||
|
||||
const USER_ID_KEY: &str = "user_id";
|
||||
|
||||
impl ContextUserExt for Context {
|
||||
fn set_user_id(&self, user_id: impl Into<String>) -> Context {
|
||||
self.with_value(USER_ID_KEY, user_id)
|
||||
}
|
||||
|
||||
fn set_user_id_mut(&mut self, user_id: impl Into<String>) -> &mut Context {
|
||||
self.with_value_mut(USER_ID_KEY, user_id)
|
||||
}
|
||||
|
||||
fn get_user_id(&self) -> Option<String> {
|
||||
self.get(USER_ID_KEY).map(|s| s.to_string())
|
||||
}
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
pub mod queries;
|
||||
pub mod requests;
|
||||
pub mod responses;
|
||||
|
||||
use async_graphql::{Enum, InputObject};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq, Enum, Copy)]
|
||||
pub enum ItemState {
|
||||
Created,
|
||||
Done,
|
||||
Archived,
|
||||
Deleted,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, InputObject)]
|
||||
pub struct ItemDto {
|
||||
pub id: Uuid,
|
||||
pub title: String,
|
||||
pub description: Option<String>,
|
||||
pub state: ItemState,
|
||||
pub project_id: Uuid,
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
use async_graphql::InputObject;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, InputObject)]
|
||||
pub struct GetItemQuery {
|
||||
pub item_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, InputObject)]
|
||||
pub struct GetItemsQuery {
|
||||
pub project_id: Uuid,
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
use async_graphql::InputObject;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::ItemState;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, InputObject)]
|
||||
pub struct CreateItemDto {
|
||||
pub title: String,
|
||||
pub description: Option<String>,
|
||||
pub project_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, InputObject)]
|
||||
pub struct UpdateItemDto {
|
||||
pub id: Uuid,
|
||||
pub title: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub state: Option<ItemState>,
|
||||
pub project_id: Option<Uuid>,
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
use super::ItemDto;
|
||||
|
||||
pub type CreatedItemDto = ItemDto;
|
@ -1,6 +0,0 @@
|
||||
pub mod common;
|
||||
pub mod item;
|
||||
pub mod projects;
|
||||
pub mod users;
|
||||
|
||||
pub use common::*;
|
@ -1,17 +0,0 @@
|
||||
pub mod mutation;
|
||||
pub mod queries;
|
||||
pub mod requests;
|
||||
pub mod responses;
|
||||
|
||||
use async_graphql::{InputObject, SimpleObject};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, InputObject, SimpleObject)]
|
||||
pub struct ProjectDto {
|
||||
pub id: Uuid,
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
|
||||
pub user_id: String,
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
use async_graphql::InputObject;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, InputObject)]
|
||||
pub struct CreateProjectMutation {
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
use async_graphql::InputObject;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, InputObject)]
|
||||
pub struct GetProjectQuery {
|
||||
pub project_id: Uuid,
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct CreateProjectDto {
|
||||
pub name: String,
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
use super::ProjectDto;
|
||||
|
||||
pub type CreatedProjectDto = ProjectDto;
|
@ -1,17 +0,0 @@
|
||||
pub mod requests;
|
||||
pub mod responses;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct UserDto {
|
||||
pub id: Uuid,
|
||||
pub username: String,
|
||||
pub email: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct User {
|
||||
pub id: String,
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct CreateUserDto {
|
||||
pub username: String,
|
||||
pub email: String,
|
||||
pub password: String,
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
use super::UserDto;
|
||||
|
||||
pub type UserCreatedDto = UserDto;
|
@ -1,16 +0,0 @@
|
||||
[package]
|
||||
name = "como_gql"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
como_core.workspace = true
|
||||
como_domain.workspace = true
|
||||
como_infrastructure.workspace = true
|
||||
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
async-graphql.workspace = true
|
||||
uuid.workspace = true
|
@ -1,94 +0,0 @@
|
||||
use crate::common::*;
|
||||
use crate::items::{CreatedItem, Item};
|
||||
use crate::projects::Project;
|
||||
use async_graphql::{Context, EmptySubscription, Object, Schema};
|
||||
use como_domain::item::queries::{GetItemQuery, GetItemsQuery};
|
||||
use como_domain::item::requests::{CreateItemDto, UpdateItemDto};
|
||||
use como_domain::projects::mutation::CreateProjectMutation;
|
||||
use como_domain::projects::queries::GetProjectQuery;
|
||||
use como_domain::projects::ProjectDto;
|
||||
|
||||
pub type ComoSchema = Schema<QueryRoot, MutationRoot, EmptySubscription>;
|
||||
|
||||
pub struct MutationRoot;
|
||||
|
||||
#[Object]
|
||||
impl MutationRoot {
|
||||
async fn create_item(
|
||||
&self,
|
||||
ctx: &Context<'_>,
|
||||
item: CreateItemDto,
|
||||
) -> anyhow::Result<CreatedItem> {
|
||||
let created_item = item_service(ctx)
|
||||
.add_item(get_domain_context(ctx), item)
|
||||
.await?;
|
||||
|
||||
Ok(CreatedItem {
|
||||
id: created_item.id,
|
||||
})
|
||||
}
|
||||
|
||||
async fn create_project(
|
||||
&self,
|
||||
ctx: &Context<'_>,
|
||||
request: CreateProjectMutation,
|
||||
) -> anyhow::Result<ProjectDto> {
|
||||
let project = project_service(ctx)
|
||||
.create_project(get_domain_context(ctx), request)
|
||||
.await?;
|
||||
|
||||
Ok(project)
|
||||
}
|
||||
|
||||
async fn update_item(&self, ctx: &Context<'_>, item: UpdateItemDto) -> anyhow::Result<Item> {
|
||||
let updated_item = item_service(ctx)
|
||||
.update_item(get_domain_context(ctx), item)
|
||||
.await?;
|
||||
|
||||
Ok(updated_item.into())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct QueryRoot;
|
||||
|
||||
#[Object]
|
||||
impl QueryRoot {
|
||||
async fn get_item(&self, ctx: &Context<'_>, query: GetItemQuery) -> anyhow::Result<Item> {
|
||||
let item = item_service(ctx)
|
||||
.get_item(get_domain_context(ctx), query)
|
||||
.await?;
|
||||
|
||||
Ok(Item::from(item))
|
||||
}
|
||||
|
||||
async fn get_items(
|
||||
&self,
|
||||
ctx: &Context<'_>,
|
||||
query: GetItemsQuery,
|
||||
) -> anyhow::Result<Vec<Item>> {
|
||||
let items = item_service(ctx)
|
||||
.get_items(get_domain_context(ctx), query)
|
||||
.await?;
|
||||
|
||||
Ok(items.iter().map(|i| Item::from(i.clone())).collect())
|
||||
}
|
||||
|
||||
// Projects
|
||||
async fn get_project(
|
||||
&self,
|
||||
ctx: &Context<'_>,
|
||||
query: GetProjectQuery,
|
||||
) -> anyhow::Result<Project> {
|
||||
project_service(ctx)
|
||||
.get_project(get_domain_context(ctx), query)
|
||||
.await
|
||||
.map(|p| p.into())
|
||||
}
|
||||
|
||||
async fn get_projects(&self, ctx: &Context<'_>) -> anyhow::Result<Vec<Project>> {
|
||||
project_service(ctx)
|
||||
.get_projects(get_domain_context(ctx))
|
||||
.await
|
||||
.map(|p| p.into_iter().map(|p| p.into()).collect())
|
||||
}
|
||||
}
|
@ -1,80 +0,0 @@
|
||||
use crate::common::*;
|
||||
use async_graphql::{Context, Object};
|
||||
use como_domain::{
|
||||
item::{queries::GetItemQuery, ItemDto, ItemState},
|
||||
projects::queries::GetProjectQuery,
|
||||
};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::projects::Project;
|
||||
|
||||
pub struct CreatedItem {
|
||||
pub id: Uuid,
|
||||
}
|
||||
|
||||
#[Object]
|
||||
impl CreatedItem {
|
||||
pub async fn item(&self, ctx: &Context<'_>) -> anyhow::Result<Item> {
|
||||
let item = item_service(ctx)
|
||||
.get_item(get_domain_context(ctx), GetItemQuery { item_id: self.id })
|
||||
.await?;
|
||||
|
||||
Ok(item.into())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Item {
|
||||
pub id: Uuid,
|
||||
pub title: String,
|
||||
pub description: Option<String>,
|
||||
pub state: ItemState,
|
||||
pub project_id: Uuid,
|
||||
}
|
||||
|
||||
#[Object]
|
||||
impl Item {
|
||||
pub async fn id(&self, _ctx: &Context<'_>) -> anyhow::Result<Uuid> {
|
||||
return Ok(self.id);
|
||||
}
|
||||
|
||||
pub async fn title(&self, _ctx: &Context<'_>) -> anyhow::Result<String> {
|
||||
return Ok(self.title.clone());
|
||||
}
|
||||
|
||||
pub async fn description(&self, _ctx: &Context<'_>) -> anyhow::Result<Option<String>> {
|
||||
return Ok(self.description.clone());
|
||||
}
|
||||
|
||||
pub async fn state(&self, _ctx: &Context<'_>) -> anyhow::Result<ItemState> {
|
||||
return Ok(self.state);
|
||||
}
|
||||
|
||||
pub async fn project(&self, ctx: &Context<'_>) -> anyhow::Result<Project> {
|
||||
let project = project_service(ctx)
|
||||
.get_project(
|
||||
get_domain_context(ctx),
|
||||
GetProjectQuery {
|
||||
project_id: self.project_id,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(project.into())
|
||||
}
|
||||
|
||||
pub async fn project_id(&self, _ctx: &Context<'_>) -> anyhow::Result<Uuid> {
|
||||
return Ok(self.project_id);
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ItemDto> for Item {
|
||||
fn from(dto: ItemDto) -> Self {
|
||||
Self {
|
||||
id: dto.id,
|
||||
title: dto.title,
|
||||
description: dto.description,
|
||||
state: dto.state,
|
||||
project_id: dto.project_id,
|
||||
}
|
||||
}
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
pub mod graphql;
|
||||
mod items;
|
||||
mod projects;
|
||||
|
||||
pub mod common {
|
||||
use async_graphql::Context;
|
||||
use como_core::items::DynItemService;
|
||||
use como_core::projects::DynProjectService;
|
||||
use como_infrastructure::register::ServiceRegister;
|
||||
|
||||
#[inline(always)]
|
||||
pub(crate) fn get_domain_context<'a>(ctx: &Context<'a>) -> &'a como_domain::Context {
|
||||
ctx.data_unchecked::<como_domain::Context>()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[inline(always)]
|
||||
pub(crate) fn get_service_register<'a>(ctx: &Context<'a>) -> &'a ServiceRegister {
|
||||
ctx.data_unchecked::<ServiceRegister>()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub(crate) fn project_service<'a>(ctx: &Context<'a>) -> DynProjectService {
|
||||
ctx.data_unchecked::<ServiceRegister>()
|
||||
.project_service
|
||||
.clone()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub(crate) fn item_service<'a>(ctx: &Context<'a>) -> DynItemService {
|
||||
ctx.data_unchecked::<ServiceRegister>().item_service.clone()
|
||||
}
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
use crate::common::*;
|
||||
use async_graphql::{Context, Object};
|
||||
use como_domain::projects::ProjectDto;
|
||||
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::items::Item;
|
||||
|
||||
pub struct Project {
|
||||
pub id: Uuid,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[Object]
|
||||
impl Project {
|
||||
async fn id(&self) -> &Uuid {
|
||||
&self.id
|
||||
}
|
||||
|
||||
async fn name(&self) -> &String {
|
||||
&self.name
|
||||
}
|
||||
|
||||
async fn items(&self, ctx: &Context<'_>) -> anyhow::Result<Vec<Item>> {
|
||||
let items = item_service(ctx)
|
||||
.get_items(
|
||||
get_domain_context(ctx),
|
||||
como_domain::item::queries::GetItemsQuery {
|
||||
project_id: self.id,
|
||||
},
|
||||
)
|
||||
.await?
|
||||
.iter()
|
||||
.map(|i| Item::from(i.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ProjectDto> for Project {
|
||||
fn from(dto: ProjectDto) -> Self {
|
||||
Self {
|
||||
id: dto.id,
|
||||
name: dto.name,
|
||||
}
|
||||
}
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
[package]
|
||||
name = "como_infrastructure"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
como_core.workspace = true
|
||||
como_domain.workspace = true
|
||||
como_auth.workspace = true
|
||||
|
||||
axum.workspace = true
|
||||
async-trait.workspace = true
|
||||
uuid.workspace = true
|
||||
anyhow.workspace = true
|
||||
sqlx.workspace = true
|
||||
chrono.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
||||
async-sqlx-session.workspace = true
|
||||
|
||||
tokio.workspace = true
|
||||
clap.workspace = true
|
||||
tracing.workspace = true
|
||||
argon2.workspace = true
|
||||
rand_core.workspace = true
|
@ -1,9 +0,0 @@
|
||||
fn main() {
|
||||
println!("cargo:rustc-env=SQLX_OFFLINE_DIR='./.sqlx'");
|
||||
// When building in docs.rs, we want to set SQLX_OFFLINE mode to true
|
||||
if std::env::var_os("DOCS_RS").is_some() {
|
||||
println!("cargo:rustc-env=SQLX_OFFLINE=true");
|
||||
} else if std::env::var_os("DOCKER_BUILD").is_some() {
|
||||
println!("cargo:rustc-env=SQLX_OFFLINE=true");
|
||||
}
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
-- Add migration script here
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
username varchar not null,
|
||||
password_hash varchar not null
|
||||
);
|
||||
|
||||
CREATE unique index users_username_idx on users(username)
|
@ -1,10 +0,0 @@
|
||||
-- Add migration script here
|
||||
|
||||
CREATE TABLE IF NOT EXISTS projects (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name varchar not null,
|
||||
description varchar default null,
|
||||
user_id varchar not null,
|
||||
created_at timestamp not null,
|
||||
updated_at timestamp not null
|
||||
);
|
@ -1,17 +0,0 @@
|
||||
-- Add migration script here
|
||||
|
||||
create table if not exists items (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
title varchar not null,
|
||||
description varchar default null,
|
||||
state integer not null,
|
||||
user_id varchar not null,
|
||||
project_id UUID not null,
|
||||
created_at timestamp not null,
|
||||
updated_at timestamp not null,
|
||||
CONSTRAINT fk_project
|
||||
FOREIGN KEY(project_id)
|
||||
REFERENCES projects(id)
|
||||
ON DELETE CASCADE
|
||||
)
|
||||
|
@ -1,4 +0,0 @@
|
||||
-- Add migration script here
|
||||
|
||||
ALTER TABLE items ALTER COLUMN state TYPE varchar(255);
|
||||
|
@ -1,346 +0,0 @@
|
||||
{
|
||||
"db": "PostgreSQL",
|
||||
"05d0a7901f0481d7443f125655df26eeacd63f2b023723a0c09c662617e0baf5": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 1,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"ordinal": 2,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "state",
|
||||
"ordinal": 3,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "project_id",
|
||||
"ordinal": 4,
|
||||
"type_info": "Uuid"
|
||||
}
|
||||
],
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Text"
|
||||
]
|
||||
}
|
||||
},
|
||||
"query": "\n SELECT id, title, description, state, project_id\n FROM items\n WHERE id = $1 AND user_id = $2\n "
|
||||
},
|
||||
"3b4484c5ccfd4dcb887c4e978fe6e45d4c9ecc2a73909be207dced79ddf17d87": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Uuid"
|
||||
}
|
||||
],
|
||||
"nullable": [
|
||||
false
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Varchar",
|
||||
"Varchar"
|
||||
]
|
||||
}
|
||||
},
|
||||
"query": "\n INSERT INTO users (username, password_hash) \n VALUES ( $1, $2 ) \n RETURNING id\n "
|
||||
},
|
||||
"4ec32ebd0ee991cec625d9de51de0d3e0ddfc8afda0568327fa9c818bde08e1f": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Uuid"
|
||||
}
|
||||
],
|
||||
"nullable": [
|
||||
false
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Varchar",
|
||||
"Varchar",
|
||||
"Varchar",
|
||||
"Timestamp",
|
||||
"Timestamp"
|
||||
]
|
||||
}
|
||||
},
|
||||
"query": "\n INSERT INTO projects (id, name, description, user_id, created_at, updated_at)\n VALUES ($1, $2, $3, $4, $5, $6)\n RETURNING id\n "
|
||||
},
|
||||
"7901e81b1f1f08f0c7e72a967a8116efb62f40d99f80900f1e56cd13ad4f6bb2": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 1,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"ordinal": 2,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "state",
|
||||
"ordinal": 3,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "project_id",
|
||||
"ordinal": 4,
|
||||
"type_info": "Uuid"
|
||||
}
|
||||
],
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Varchar",
|
||||
"Varchar",
|
||||
"Varchar",
|
||||
"Uuid",
|
||||
"Varchar"
|
||||
]
|
||||
}
|
||||
},
|
||||
"query": "\n INSERT INTO items (id, title, description, state, project_id, user_id, created_at, updated_at)\n VALUES ($1, $2, $3, $4, $5, $6, now(), now())\n RETURNING id, title, description, state, project_id\n "
|
||||
},
|
||||
"a188dc748025cf3311820d16002b111a75f571d18f44f54b730ac14e9b2e10ea": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 1,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"ordinal": 2,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 3,
|
||||
"type_info": "Varchar"
|
||||
}
|
||||
],
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Text"
|
||||
]
|
||||
}
|
||||
},
|
||||
"query": "\n SELECT id, name, description, user_id\n FROM projects\n WHERE id = $1 and user_id = $2\n "
|
||||
},
|
||||
"b930a7123d22d543e4d8ed70a1bc10477362127969ceca9653e445f26670003a": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 1,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"ordinal": 2,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 3,
|
||||
"type_info": "Varchar"
|
||||
}
|
||||
],
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
}
|
||||
},
|
||||
"query": "\n SELECT id, name, description, user_id\n FROM projects\n WHERE user_id = $1\n LIMIT 500\n "
|
||||
},
|
||||
"bacf3c8a2f302d50991483fa36a06965c3536c2ef3837c19c6e6361eff312848": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 1,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"ordinal": 2,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "state",
|
||||
"ordinal": 3,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "project_id",
|
||||
"ordinal": 4,
|
||||
"type_info": "Uuid"
|
||||
}
|
||||
],
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Varchar",
|
||||
"Varchar",
|
||||
"Varchar",
|
||||
"Uuid",
|
||||
"Uuid",
|
||||
"Text"
|
||||
]
|
||||
}
|
||||
},
|
||||
"query": "\n UPDATE items\n SET \n title = COALESCE($1, title), \n description = COALESCE($2, description), \n state = COALESCE($3, state), \n project_id = COALESCE($4, project_id), \n updated_at = now()\n WHERE id = $5 AND user_id = $6\n RETURNING id, title, description, state, project_id\n "
|
||||
},
|
||||
"bd2407ffb9637afcff3ffe1101e7c1920b8cf0be423ab0313d14acc9c76e0f93": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 1,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"ordinal": 2,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "state",
|
||||
"ordinal": 3,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "project_id",
|
||||
"ordinal": 4,
|
||||
"type_info": "Uuid"
|
||||
}
|
||||
],
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Uuid"
|
||||
]
|
||||
}
|
||||
},
|
||||
"query": "\n SELECT id, title, description, state, project_id\n FROM items\n WHERE user_id = $1 and project_id = $2\n LIMIT 500\n "
|
||||
},
|
||||
"d3f222cf6c3d9816705426fdbed3b13cb575bb432eb1f33676c0b414e67aecaf": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"name": "username",
|
||||
"ordinal": 1,
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"name": "password_hash",
|
||||
"ordinal": 2,
|
||||
"type_info": "Varchar"
|
||||
}
|
||||
],
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
}
|
||||
},
|
||||
"query": "\n SELECT * from users\n where username=$1\n "
|
||||
}
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
use clap::ValueEnum;
|
||||
use como_auth::AuthClap;
|
||||
|
||||
#[derive(clap::Parser)]
|
||||
pub struct AppConfig {
|
||||
#[clap(long, env)]
|
||||
pub database_url: String,
|
||||
#[clap(long, env, default_value = "postgres")]
|
||||
pub database_type: DatabaseType,
|
||||
#[clap(long, env)]
|
||||
pub rust_log: String,
|
||||
#[clap(long, env, default_value = "3001")]
|
||||
pub api_port: u32,
|
||||
#[clap(long, env, default_value = "true")]
|
||||
pub run_migrations: bool,
|
||||
#[clap(long, env, default_value = "false")]
|
||||
pub seed: bool,
|
||||
#[clap(long, env)]
|
||||
pub cors_origin: String,
|
||||
|
||||
#[clap(flatten)]
|
||||
pub auth: AuthClap,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, ValueEnum)]
|
||||
pub enum DatabaseType {
|
||||
Postgres,
|
||||
InMemory,
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
use anyhow::Context;
|
||||
use sqlx::{postgres::PgPoolOptions, Pool, Postgres};
|
||||
use tracing::log::info;
|
||||
|
||||
pub type ConnectionPool = Pool<Postgres>;
|
||||
|
||||
pub struct ConnectionPoolManager;
|
||||
|
||||
impl ConnectionPoolManager {
|
||||
pub async fn new_pool(
|
||||
connection_string: &str,
|
||||
run_migrations: bool,
|
||||
) -> anyhow::Result<ConnectionPool> {
|
||||
info!("initializing the database connection pool");
|
||||
let pool = PgPoolOptions::new()
|
||||
.max_connections(5)
|
||||
.connect(connection_string)
|
||||
.await
|
||||
.context("error while initializing the database connection pool")?;
|
||||
|
||||
if run_migrations {
|
||||
info!("migrations enabled");
|
||||
info!("migrating database");
|
||||
|
||||
sqlx::migrate!()
|
||||
.run(&pool)
|
||||
.await
|
||||
.context("error while running database migrations")?;
|
||||
}
|
||||
|
||||
Ok(pool)
|
||||
}
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
pub mod configs;
|
||||
pub mod database;
|
||||
pub mod register;
|
||||
pub mod repositories;
|
||||
pub mod services;
|
@ -1,74 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_sqlx_session::PostgresSessionStore;
|
||||
use como_auth::{AuthService, SessionService};
|
||||
use como_core::{items::DynItemService, projects::DynProjectService, users::DynUserService};
|
||||
use tracing::log::info;
|
||||
|
||||
use crate::{
|
||||
configs::{AppConfig, DatabaseType},
|
||||
database::ConnectionPool,
|
||||
services::{
|
||||
item_service::{DefaultItemService, MemoryItemService},
|
||||
project_service::{DefaultProjectService, MemoryProjectService},
|
||||
user_service::DefaultUserService,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ServiceRegister {
|
||||
pub item_service: DynItemService,
|
||||
pub project_service: DynProjectService,
|
||||
pub user_service: DynUserService,
|
||||
pub session_store: PostgresSessionStore,
|
||||
pub auth_service: AuthService,
|
||||
}
|
||||
|
||||
impl ServiceRegister {
|
||||
pub async fn new(pool: ConnectionPool, config: Arc<AppConfig>) -> anyhow::Result<Self> {
|
||||
info!("creating services");
|
||||
|
||||
let session = SessionService::new(&config.auth).await?;
|
||||
let auth = AuthService::new(&config.auth, session).await?;
|
||||
|
||||
let s = match config.database_type {
|
||||
DatabaseType::Postgres => {
|
||||
let item_service =
|
||||
Arc::new(DefaultItemService::new(pool.clone())) as DynItemService;
|
||||
let project_service =
|
||||
Arc::new(DefaultProjectService::new(pool.clone())) as DynProjectService;
|
||||
let user_service =
|
||||
Arc::new(DefaultUserService::new(pool.clone())) as DynUserService;
|
||||
let store = PostgresSessionStore::new(&config.database_url).await?;
|
||||
store.migrate().await?;
|
||||
|
||||
Self {
|
||||
item_service,
|
||||
user_service,
|
||||
project_service,
|
||||
session_store: store,
|
||||
auth_service: auth,
|
||||
}
|
||||
}
|
||||
DatabaseType::InMemory => {
|
||||
let item_service = Arc::new(MemoryItemService::new()) as DynItemService;
|
||||
let project_service = Arc::new(MemoryProjectService::new()) as DynProjectService;
|
||||
let user_service =
|
||||
Arc::new(DefaultUserService::new(pool.clone())) as DynUserService;
|
||||
let store = PostgresSessionStore::new(&config.database_url).await?;
|
||||
store.migrate().await?;
|
||||
|
||||
Self {
|
||||
item_service,
|
||||
user_service,
|
||||
project_service,
|
||||
session_store: store,
|
||||
auth_service: auth,
|
||||
}
|
||||
}
|
||||
};
|
||||
info!("services created succesfully");
|
||||
|
||||
Ok(s)
|
||||
}
|
||||
}
|
@ -1 +0,0 @@
|
||||
|
@ -1,221 +0,0 @@
|
||||
use crate::database::ConnectionPool;
|
||||
use async_trait::async_trait;
|
||||
use como_core::items::ItemService;
|
||||
use como_domain::{
|
||||
item::{
|
||||
queries::{GetItemQuery, GetItemsQuery},
|
||||
requests::{CreateItemDto, UpdateItemDto},
|
||||
responses::CreatedItemDto,
|
||||
ItemDto,
|
||||
},
|
||||
user::ContextUserExt,
|
||||
Context,
|
||||
};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
use uuid::Uuid;
|
||||
|
||||
pub struct DefaultItemService {
|
||||
pool: ConnectionPool,
|
||||
}
|
||||
|
||||
impl DefaultItemService {
|
||||
pub fn new(connection_pool: ConnectionPool) -> Self {
|
||||
Self {
|
||||
pool: connection_pool,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ItemService for DefaultItemService {
|
||||
async fn add_item(
|
||||
&self,
|
||||
context: &Context,
|
||||
item: CreateItemDto,
|
||||
) -> anyhow::Result<CreatedItemDto> {
|
||||
let state = serde_json::to_string(&como_domain::item::ItemState::Created {})?;
|
||||
let user_id = context.get_user_id().ok_or(anyhow::anyhow!("no user id"))?;
|
||||
|
||||
let rec = sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO items (id, title, description, state, project_id, user_id, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, now(), now())
|
||||
RETURNING id, title, description, state, project_id
|
||||
"#,
|
||||
Uuid::new_v4(),
|
||||
item.title,
|
||||
item.description,
|
||||
state,
|
||||
item.project_id,
|
||||
user_id,
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(CreatedItemDto {
|
||||
id: rec.id,
|
||||
title: rec.title,
|
||||
description: rec.description,
|
||||
state: como_domain::item::ItemState::Created {},
|
||||
project_id: rec.project_id,
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_item(&self, context: &Context, query: GetItemQuery) -> anyhow::Result<ItemDto> {
|
||||
let user_id = context.get_user_id().ok_or(anyhow::anyhow!("no user id"))?;
|
||||
|
||||
let rec = sqlx::query!(
|
||||
r#"
|
||||
SELECT id, title, description, state, project_id
|
||||
FROM items
|
||||
WHERE id = $1 AND user_id = $2
|
||||
"#,
|
||||
query.item_id,
|
||||
user_id,
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(ItemDto {
|
||||
id: rec.id,
|
||||
title: rec.title,
|
||||
description: rec.description,
|
||||
state: serde_json::from_str(&rec.state)?,
|
||||
project_id: rec.project_id,
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_items(
|
||||
&self,
|
||||
context: &Context,
|
||||
query: GetItemsQuery,
|
||||
) -> anyhow::Result<Vec<ItemDto>> {
|
||||
let user_id = context.get_user_id().ok_or(anyhow::anyhow!("no user id"))?;
|
||||
|
||||
let recs = sqlx::query!(
|
||||
r#"
|
||||
SELECT id, title, description, state, project_id
|
||||
FROM items
|
||||
WHERE user_id = $1 and project_id = $2
|
||||
LIMIT 500
|
||||
"#,
|
||||
user_id,
|
||||
query.project_id,
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(recs
|
||||
.into_iter()
|
||||
.map(|rec| ItemDto {
|
||||
id: rec.id,
|
||||
title: rec.title,
|
||||
description: rec.description,
|
||||
state: serde_json::from_str(&rec.state).unwrap(),
|
||||
project_id: rec.project_id,
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
async fn update_item(&self, context: &Context, item: UpdateItemDto) -> anyhow::Result<ItemDto> {
|
||||
let state = item.state.map(|s| serde_json::to_string(&s)).transpose()?;
|
||||
let user_id = context.get_user_id().ok_or(anyhow::anyhow!("no user id"))?;
|
||||
|
||||
let rec = sqlx::query!(
|
||||
r#"
|
||||
UPDATE items
|
||||
SET
|
||||
title = COALESCE($1, title),
|
||||
description = COALESCE($2, description),
|
||||
state = COALESCE($3, state),
|
||||
project_id = COALESCE($4, project_id),
|
||||
updated_at = now()
|
||||
WHERE id = $5 AND user_id = $6
|
||||
RETURNING id, title, description, state, project_id
|
||||
"#,
|
||||
item.title,
|
||||
item.description,
|
||||
state,
|
||||
item.project_id,
|
||||
item.id,
|
||||
user_id,
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(ItemDto {
|
||||
id: rec.id,
|
||||
title: rec.title,
|
||||
description: rec.description,
|
||||
state: serde_json::from_str(&rec.state)?,
|
||||
project_id: rec.project_id,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MemoryItemService {
|
||||
item_store: Arc<Mutex<HashMap<String, ItemDto>>>,
|
||||
}
|
||||
|
||||
impl MemoryItemService {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
item_store: Arc::new(Mutex::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ItemService for MemoryItemService {
|
||||
async fn add_item(
|
||||
&self,
|
||||
_context: &Context,
|
||||
create_item: CreateItemDto,
|
||||
) -> anyhow::Result<CreatedItemDto> {
|
||||
if let Ok(mut item_store) = self.item_store.lock() {
|
||||
let item = ItemDto {
|
||||
id: Uuid::new_v4(),
|
||||
title: create_item.title,
|
||||
description: create_item.description,
|
||||
state: como_domain::item::ItemState::Created,
|
||||
project_id: create_item.project_id,
|
||||
};
|
||||
|
||||
item_store.insert(item.id.to_string(), item.clone());
|
||||
|
||||
return Ok(item);
|
||||
} else {
|
||||
Err(anyhow::anyhow!("could not unlock item_store"))
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_item(&self, _context: &Context, query: GetItemQuery) -> anyhow::Result<ItemDto> {
|
||||
if let Ok(item_store) = self.item_store.lock() {
|
||||
let item = item_store
|
||||
.get(&query.item_id.to_string())
|
||||
.ok_or(anyhow::anyhow!("could not find item"))?;
|
||||
return Ok(item.clone());
|
||||
} else {
|
||||
Err(anyhow::anyhow!("could not unlock item_store"))
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_items(
|
||||
&self,
|
||||
_context: &Context,
|
||||
_query: GetItemsQuery,
|
||||
) -> anyhow::Result<Vec<ItemDto>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn update_item(
|
||||
&self,
|
||||
_context: &Context,
|
||||
_item: UpdateItemDto,
|
||||
) -> anyhow::Result<ItemDto> {
|
||||
todo!()
|
||||
}
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
pub mod item_service;
|
||||
pub mod project_service;
|
||||
pub mod user_service;
|
@ -1,167 +0,0 @@
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use axum::async_trait;
|
||||
use como_core::projects::ProjectService;
|
||||
use como_domain::{
|
||||
projects::{mutation::CreateProjectMutation, queries::GetProjectQuery, ProjectDto},
|
||||
user::ContextUserExt,
|
||||
Context,
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use crate::database::ConnectionPool;
|
||||
|
||||
pub struct DefaultProjectService {
|
||||
pool: ConnectionPool,
|
||||
}
|
||||
|
||||
impl DefaultProjectService {
|
||||
pub fn new(connection_pool: ConnectionPool) -> Self {
|
||||
Self {
|
||||
pool: connection_pool,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ProjectService for DefaultProjectService {
|
||||
async fn get_project(
|
||||
&self,
|
||||
context: &Context,
|
||||
query: GetProjectQuery,
|
||||
) -> anyhow::Result<ProjectDto> {
|
||||
let user_id = context.get_user_id().ok_or(anyhow::anyhow!("no user id"))?;
|
||||
|
||||
let rec = sqlx::query!(
|
||||
r#"
|
||||
SELECT id, name, description, user_id
|
||||
FROM projects
|
||||
WHERE id = $1 and user_id = $2
|
||||
"#,
|
||||
query.project_id,
|
||||
&user_id
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(ProjectDto {
|
||||
id: rec.id,
|
||||
name: rec.name,
|
||||
description: rec.description,
|
||||
user_id: rec.user_id,
|
||||
})
|
||||
}
|
||||
async fn get_projects(&self, context: &Context) -> anyhow::Result<Vec<ProjectDto>> {
|
||||
let user_id = context.get_user_id().ok_or(anyhow::anyhow!("no user id"))?;
|
||||
|
||||
let recs = sqlx::query!(
|
||||
r#"
|
||||
SELECT id, name, description, user_id
|
||||
FROM projects
|
||||
WHERE user_id = $1
|
||||
LIMIT 500
|
||||
"#,
|
||||
&user_id
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(recs
|
||||
.into_iter()
|
||||
.map(|rec| ProjectDto {
|
||||
id: rec.id,
|
||||
name: rec.name,
|
||||
description: rec.description,
|
||||
user_id: rec.user_id,
|
||||
})
|
||||
.collect::<_>())
|
||||
}
|
||||
async fn create_project(
|
||||
&self,
|
||||
context: &Context,
|
||||
request: CreateProjectMutation,
|
||||
) -> anyhow::Result<ProjectDto> {
|
||||
let user_id = context.get_user_id().ok_or(anyhow::anyhow!("no user id"))?;
|
||||
|
||||
let rec = sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO projects (id, name, description, user_id, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6)
|
||||
RETURNING id
|
||||
"#,
|
||||
uuid::Uuid::new_v4(),
|
||||
request.name,
|
||||
request.description,
|
||||
&user_id,
|
||||
chrono::Utc::now().naive_utc(),
|
||||
chrono::Utc::now().naive_utc(),
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(ProjectDto {
|
||||
id: rec.id,
|
||||
name: request.name,
|
||||
description: request.description,
|
||||
user_id: user_id.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MemoryProjectService {
|
||||
project_store: Arc<Mutex<HashMap<String, ProjectDto>>>,
|
||||
}
|
||||
|
||||
impl MemoryProjectService {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
project_store: Arc::new(Mutex::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ProjectService for MemoryProjectService {
|
||||
async fn get_project(
|
||||
&self,
|
||||
_context: &Context,
|
||||
query: GetProjectQuery,
|
||||
) -> anyhow::Result<ProjectDto> {
|
||||
let ps = self.project_store.lock().await;
|
||||
Ok(ps
|
||||
.get(&query.project_id.to_string())
|
||||
.ok_or(anyhow::anyhow!("could not find project"))?
|
||||
.clone())
|
||||
}
|
||||
async fn get_projects(&self, context: &Context) -> anyhow::Result<Vec<ProjectDto>> {
|
||||
let user_id = context.get_user_id().ok_or(anyhow::anyhow!("no user id"))?;
|
||||
|
||||
Ok(self
|
||||
.project_store
|
||||
.lock()
|
||||
.await
|
||||
.values()
|
||||
.filter(|p| p.user_id == user_id)
|
||||
.cloned()
|
||||
.collect::<_>())
|
||||
}
|
||||
|
||||
async fn create_project(
|
||||
&self,
|
||||
context: &Context,
|
||||
mutation: CreateProjectMutation,
|
||||
) -> anyhow::Result<ProjectDto> {
|
||||
let user_id = context.get_user_id().ok_or(anyhow::anyhow!("no user id"))?;
|
||||
|
||||
let mut ps = self.project_store.lock().await;
|
||||
let project = ProjectDto {
|
||||
id: uuid::Uuid::new_v4(),
|
||||
name: mutation.name,
|
||||
description: None,
|
||||
user_id,
|
||||
};
|
||||
|
||||
ps.insert(project.id.to_string(), project.clone());
|
||||
Ok(project)
|
||||
}
|
||||
}
|
@ -1,95 +0,0 @@
|
||||
use argon2::{password_hash::SaltString, Argon2, PasswordHash, PasswordHasher, PasswordVerifier};
|
||||
use axum::async_trait;
|
||||
use como_core::users::UserService;
|
||||
use como_domain::Context;
|
||||
use rand_core::OsRng;
|
||||
|
||||
use crate::database::ConnectionPool;
|
||||
|
||||
pub struct DefaultUserService {
|
||||
pool: ConnectionPool,
|
||||
}
|
||||
|
||||
impl DefaultUserService {
|
||||
pub fn new(pool: ConnectionPool) -> Self {
|
||||
Self { pool }
|
||||
}
|
||||
|
||||
fn hash_password(&self, _context: &Context, password: String) -> anyhow::Result<String> {
|
||||
let salt = SaltString::generate(&mut OsRng);
|
||||
let argon2 = Argon2::default();
|
||||
|
||||
let password_hash = argon2
|
||||
.hash_password(password.as_bytes(), &salt)
|
||||
.map_err(|e| anyhow::anyhow!(e))?
|
||||
.to_string();
|
||||
|
||||
Ok(password_hash)
|
||||
}
|
||||
|
||||
fn validate_password(
|
||||
&self,
|
||||
_context: &Context,
|
||||
password: String,
|
||||
hashed_password: String,
|
||||
) -> anyhow::Result<bool> {
|
||||
let argon2 = Argon2::default();
|
||||
|
||||
let parsed_hash = PasswordHash::new(&hashed_password).map_err(|e| anyhow::anyhow!(e))?;
|
||||
match argon2.verify_password(password.as_bytes(), &parsed_hash) {
|
||||
Ok(..) => Ok(true),
|
||||
Err(..) => Ok(false),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl UserService for DefaultUserService {
|
||||
async fn add_user(
|
||||
&self,
|
||||
context: &Context,
|
||||
username: String,
|
||||
password: String,
|
||||
) -> anyhow::Result<String> {
|
||||
let hashed_password = self.hash_password(context, password)?;
|
||||
|
||||
let rec = sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO users (username, password_hash)
|
||||
VALUES ( $1, $2 )
|
||||
RETURNING id
|
||||
"#,
|
||||
username,
|
||||
hashed_password
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(rec.id.to_string())
|
||||
}
|
||||
|
||||
async fn validate_user(
|
||||
&self,
|
||||
context: &Context,
|
||||
username: String,
|
||||
password: String,
|
||||
) -> anyhow::Result<Option<String>> {
|
||||
let rec = sqlx::query!(
|
||||
r#"
|
||||
SELECT * from users
|
||||
where username=$1
|
||||
"#,
|
||||
username,
|
||||
)
|
||||
.fetch_optional(&self.pool)
|
||||
.await?;
|
||||
|
||||
match rec {
|
||||
Some(user) => match self.validate_password(context, password, user.password_hash)? {
|
||||
true => Ok(Some(user.id.to_string())),
|
||||
false => Ok(None),
|
||||
},
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
}
|
14
crates/como_web/.gitignore
vendored
14
crates/como_web/.gitignore
vendored
@ -1,14 +0,0 @@
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/target/
|
||||
pkg
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
# node e2e test tools and outputs
|
||||
node_modules/
|
||||
test-results/
|
||||
end2end/playwright-report/
|
||||
playwright/.cache/
|
||||
.cuddle/
|
2980
crates/como_web/Cargo.lock
generated
2980
crates/como_web/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,108 +0,0 @@
|
||||
[package]
|
||||
name = "como_web"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
console_error_panic_hook = "0.1"
|
||||
console_log = "0.2"
|
||||
cfg-if = "1"
|
||||
lazy_static = "1"
|
||||
leptos = { version = "*", features = ["serde"] }
|
||||
leptos_dom = { version = "*" }
|
||||
leptos_meta = { version = "*" }
|
||||
leptos_axum = { version = "*", optional = true }
|
||||
leptos_router = { version = "*" }
|
||||
log = "0.4"
|
||||
simple_logger = "4"
|
||||
thiserror = "1"
|
||||
axum = { version = "0.6.1", optional = true }
|
||||
tower = { version = "0.4.13", optional = true }
|
||||
tower-http = { version = "0.3.4", features = ["fs"], optional = true }
|
||||
tokio = { version = "1", features = ["rt-multi-thread", "macros", "time"], optional = true }
|
||||
wasm-bindgen = "0.2"
|
||||
tracing-subscriber = { version = "0.3.16", optional = true, features = [
|
||||
"env-filter",
|
||||
] }
|
||||
tracing = { version = "0.1.37", features = ["log"], optional = true }
|
||||
anyhow = { version = "1.0.71" }
|
||||
|
||||
serde = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
uuid = { workspace = true, features = ["v4", "wasm-bindgen", "js", "serde"] }
|
||||
graphql_client = { version = "0.13.0", features = ["reqwest"] }
|
||||
reqwasm = "0.5.0"
|
||||
serde_json = "1.0.96"
|
||||
|
||||
[features]
|
||||
hydrate = ["leptos/hydrate", "leptos_meta/hydrate", "leptos_router/hydrate"]
|
||||
ssr = [
|
||||
"dep:axum",
|
||||
"dep:tower",
|
||||
"dep:tower-http",
|
||||
"dep:tokio",
|
||||
"leptos/ssr",
|
||||
"leptos_meta/ssr",
|
||||
"leptos_router/ssr",
|
||||
"leptos_dom/ssr",
|
||||
"dep:leptos_axum",
|
||||
"dep:tracing-subscriber",
|
||||
"dep:tracing",
|
||||
]
|
||||
|
||||
[package.metadata.cargo-all-features]
|
||||
denylist = ["axum", "tower", "tower-http", "tokio", "sqlx", "leptos_axum"]
|
||||
skip_feature_sets = [["ssr", "hydrate"]]
|
||||
|
||||
[package.metadata.leptos]
|
||||
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
|
||||
output-name = "como_web"
|
||||
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
|
||||
site-root = "target/site"
|
||||
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
|
||||
# Defaults to pkg
|
||||
site-pkg-dir = "pkg"
|
||||
# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to <site-root>/<site-pkg>/app.css
|
||||
style-file = "style/output.css"
|
||||
# Assets source dir. All files found here will be copied and synchronized to site-root.
|
||||
# The assets-dir cannot have a sub directory with the same name/path as site-pkg-dir.
|
||||
#
|
||||
# Optional. Env: LEPTOS_ASSETS_DIR.
|
||||
assets-dir = "assets"
|
||||
# The IP and port (ex: 127.0.0.1:3000) where the server serves the content. Use it in your server setup.
|
||||
site-addr = "127.0.0.1:3000"
|
||||
# The port to use for automatic reload monitoring
|
||||
reload-port = 3002
|
||||
# [Optional] Command to use when running end2end tests. It will run in the end2end dir.
|
||||
# [Windows] for non-WSL use "npx.cmd playwright test"
|
||||
# This binary name can be checked in Powershell with Get-Command npx
|
||||
end2end-cmd = "npx playwright test"
|
||||
end2end-dir = "end2end"
|
||||
# The browserlist query used for optimizing the CSS.
|
||||
browserquery = "defaults"
|
||||
# Set by cargo-leptos watch when building with that tool. Controls whether autoreload JS will be included in the head
|
||||
watch = false
|
||||
# The environment Leptos will run in, usually either "DEV" or "PROD"
|
||||
env = "DEV"
|
||||
# The features to use when compiling the bin target
|
||||
#
|
||||
# Optional. Can be over-ridden with the command line parameter --bin-features
|
||||
bin-features = ["ssr"]
|
||||
|
||||
# If the --no-default-features flag should be used when compiling the bin target
|
||||
#
|
||||
# Optional. Defaults to false.
|
||||
bin-default-features = false
|
||||
|
||||
# The features to use when compiling the lib target
|
||||
#
|
||||
# Optional. Can be over-ridden with the command line parameter --lib-features
|
||||
lib-features = ["hydrate"]
|
||||
|
||||
# If the --no-default-features flag should be used when compiling the lib target
|
||||
#
|
||||
# Optional. Defaults to false.
|
||||
lib-default-features = false
|
@ -1,67 +0,0 @@
|
||||
<picture>
|
||||
<source srcset="https://raw.githubusercontent.com/leptos-rs/leptos/main/docs/logos/Leptos_logo_Solid_White.svg" media="(prefers-color-scheme: dark)">
|
||||
<img src="https://raw.githubusercontent.com/leptos-rs/leptos/main/docs/logos/Leptos_logo_RGB.svg" alt="Leptos Logo">
|
||||
</picture>
|
||||
|
||||
# Leptos Starter Template
|
||||
|
||||
This is a template for use with the [Leptos](https://github.com/leptos-rs/leptos) web framework and the [cargo-leptos](https://github.com/akesson/cargo-leptos) tool.
|
||||
|
||||
## Creating your template repo
|
||||
|
||||
If you don't have `cargo-leptos` installed you can install it with
|
||||
|
||||
`cargo install cargo-leptos`
|
||||
|
||||
Then run
|
||||
|
||||
`cargo leptos new --git leptos-rs/start`
|
||||
|
||||
to generate a new project template.
|
||||
|
||||
`cd {projectname}`
|
||||
|
||||
to go to your newly created project.
|
||||
|
||||
Of course you should explore around the project structure, but the best place to start with your application code is in `src/app.rs`.
|
||||
|
||||
## Running your project
|
||||
|
||||
`cargo leptos watch`
|
||||
|
||||
## Installing Additional Tools
|
||||
|
||||
By default, `cargo-leptos` uses `nightly` Rust, `cargo-generate`, and `sass`. If you run into any trouble, you may need to install one or more of these tools.
|
||||
|
||||
1. `rustup toolchain install nightly --allow-downgrade` - make sure you have Rust nightly
|
||||
2. `rustup target add wasm32-unknown-unknown` - add the ability to compile Rust to WebAssembly
|
||||
3. `cargo install cargo-generate` - install `cargo-generate` binary (should be installed automatically in future)
|
||||
4. `npm install -g sass` - install `dart-sass` (should be optional in future)
|
||||
|
||||
## Executing a Server on a Remote Machine Without the Toolchain
|
||||
After running a `cargo leptos build --release` the minimum files needed are:
|
||||
|
||||
1. The server binary located in `target/server/release`
|
||||
2. The `site` directory and all files within located in `target/site`
|
||||
|
||||
Copy these files to your remote server. The directory structure should be:
|
||||
```text
|
||||
como_web
|
||||
site/
|
||||
```
|
||||
Set the following enviornment variables (updating for your project as needed):
|
||||
```text
|
||||
LEPTOS_OUTPUT_NAME="como_web"
|
||||
LEPTOS_SITE_ROOT="site"
|
||||
LEPTOS_SITE_PKG_DIR="pkg"
|
||||
LEPTOS_SITE_ADDR="127.0.0.1:3000"
|
||||
LEPTOS_RELOAD_PORT="3001"
|
||||
```
|
||||
Finally, run the server binary.
|
||||
|
||||
## Notes about SSG and Trunk:
|
||||
Although it is not recommended, you can also run your project without server integration using the feature `csr` and `trunk serve`:
|
||||
|
||||
`trunk serve --open --features csr`
|
||||
|
||||
This may be useful for integrating external tools which require a static site, e.g. `tauri`.
|
Binary file not shown.
Before Width: | Height: | Size: 15 KiB |
@ -1,27 +0,0 @@
|
||||
yaml-language-server: $schema=https://git.front.kjuulh.io/kjuulh/cuddle/raw/branch/main/schemas/base.json
|
||||
|
||||
base: "git@git.front.kjuulh.io:kjuulh/cuddle-rust-plan.git"
|
||||
|
||||
vars:
|
||||
service: "como-web"
|
||||
deployments: "git@git.front.kjuulh.io:como/deployments.git"
|
||||
|
||||
scripts:
|
||||
render_como_templates:
|
||||
type: shell
|
||||
local_up:
|
||||
type: shell
|
||||
local_down:
|
||||
type: shell
|
||||
"tailwind:watch":
|
||||
type: "shell"
|
||||
"tailwind:build":
|
||||
type: "shell"
|
||||
"leptos:dev":
|
||||
type: "shell"
|
||||
"dev":
|
||||
type: "shell"
|
||||
"nodev":
|
||||
type: "shell"
|
||||
"refresh:schema":
|
||||
type: "shell"
|
74
crates/como_web/end2end/package-lock.json
generated
74
crates/como_web/end2end/package-lock.json
generated
@ -1,74 +0,0 @@
|
||||
{
|
||||
"name": "end2end",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "end2end",
|
||||
"version": "1.0.0",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.28.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@playwright/test": {
|
||||
"version": "1.28.0",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.28.0.tgz",
|
||||
"integrity": "sha512-vrHs5DFTPwYox5SGKq/7TDn/S4q6RA1zArd7uhO6EyP9hj3XgZBBM12ktMbnDQNxh/fL1IUKsTNLxihmsU38lQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"playwright-core": "1.28.0"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "18.11.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.9.tgz",
|
||||
"integrity": "sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/playwright-core": {
|
||||
"version": "1.28.0",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.28.0.tgz",
|
||||
"integrity": "sha512-nJLknd28kPBiCNTbqpu6Wmkrh63OEqJSFw9xOfL9qxfNwody7h6/L3O2dZoWQ6Oxcm0VOHjWmGiCUGkc0X3VZA==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@playwright/test": {
|
||||
"version": "1.28.0",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.28.0.tgz",
|
||||
"integrity": "sha512-vrHs5DFTPwYox5SGKq/7TDn/S4q6RA1zArd7uhO6EyP9hj3XgZBBM12ktMbnDQNxh/fL1IUKsTNLxihmsU38lQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/node": "*",
|
||||
"playwright-core": "1.28.0"
|
||||
}
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "18.11.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.9.tgz",
|
||||
"integrity": "sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg==",
|
||||
"dev": true
|
||||
},
|
||||
"playwright-core": {
|
||||
"version": "1.28.0",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.28.0.tgz",
|
||||
"integrity": "sha512-nJLknd28kPBiCNTbqpu6Wmkrh63OEqJSFw9xOfL9qxfNwody7h6/L3O2dZoWQ6Oxcm0VOHjWmGiCUGkc0X3VZA==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
{
|
||||
"name": "end2end",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.28.0"
|
||||
}
|
||||
}
|
@ -1,107 +0,0 @@
|
||||
import type { PlaywrightTestConfig } from "@playwright/test";
|
||||
import { devices } from "@playwright/test";
|
||||
|
||||
/**
|
||||
* Read environment variables from file.
|
||||
* https://github.com/motdotla/dotenv
|
||||
*/
|
||||
// require('dotenv').config();
|
||||
|
||||
/**
|
||||
* See https://playwright.dev/docs/test-configuration.
|
||||
*/
|
||||
const config: PlaywrightTestConfig = {
|
||||
testDir: "./tests",
|
||||
/* Maximum time one test can run for. */
|
||||
timeout: 30 * 1000,
|
||||
expect: {
|
||||
/**
|
||||
* Maximum time expect() should wait for the condition to be met.
|
||||
* For example in `await expect(locator).toHaveText();`
|
||||
*/
|
||||
timeout: 5000,
|
||||
},
|
||||
/* Run tests in files in parallel */
|
||||
fullyParallel: true,
|
||||
/* Fail the build on CI if you accidentally left test.only in the source code. */
|
||||
forbidOnly: !!process.env.CI,
|
||||
/* Retry on CI only */
|
||||
retries: process.env.CI ? 2 : 0,
|
||||
/* Opt out of parallel tests on CI. */
|
||||
workers: process.env.CI ? 1 : undefined,
|
||||
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
|
||||
reporter: "html",
|
||||
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
|
||||
use: {
|
||||
/* Maximum time each action such as `click()` can take. Defaults to 0 (no limit). */
|
||||
actionTimeout: 0,
|
||||
/* Base URL to use in actions like `await page.goto('/')`. */
|
||||
// baseURL: 'http://localhost:3000',
|
||||
|
||||
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
|
||||
trace: "on-first-retry",
|
||||
},
|
||||
|
||||
/* Configure projects for major browsers */
|
||||
projects: [
|
||||
{
|
||||
name: "chromium",
|
||||
use: {
|
||||
...devices["Desktop Chrome"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "firefox",
|
||||
use: {
|
||||
...devices["Desktop Firefox"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "webkit",
|
||||
use: {
|
||||
...devices["Desktop Safari"],
|
||||
},
|
||||
},
|
||||
|
||||
/* Test against mobile viewports. */
|
||||
// {
|
||||
// name: 'Mobile Chrome',
|
||||
// use: {
|
||||
// ...devices['Pixel 5'],
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// name: 'Mobile Safari',
|
||||
// use: {
|
||||
// ...devices['iPhone 12'],
|
||||
// },
|
||||
// },
|
||||
|
||||
/* Test against branded browsers. */
|
||||
// {
|
||||
// name: 'Microsoft Edge',
|
||||
// use: {
|
||||
// channel: 'msedge',
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// name: 'Google Chrome',
|
||||
// use: {
|
||||
// channel: 'chrome',
|
||||
// },
|
||||
// },
|
||||
],
|
||||
|
||||
/* Folder for test artifacts such as screenshots, videos, traces, etc. */
|
||||
// outputDir: 'test-results/',
|
||||
|
||||
/* Run your local dev server before starting the tests */
|
||||
// webServer: {
|
||||
// command: 'npm run start',
|
||||
// port: 3000,
|
||||
// },
|
||||
};
|
||||
|
||||
export default config;
|
@ -1,9 +0,0 @@
|
||||
import { test, expect } from "@playwright/test";
|
||||
|
||||
test("homepage has title and links to intro page", async ({ page }) => {
|
||||
await page.goto("http://localhost:3000/");
|
||||
|
||||
await expect(page).toHaveTitle("Welcome to Leptos");
|
||||
|
||||
await expect(page.locator("h1")).toHaveText("Welcome to Leptos!");
|
||||
});
|
@ -1,26 +0,0 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
html, body {
|
||||
scroll-behavior: smooth;
|
||||
min-height: 100%;
|
||||
|
||||
@apply bg-gray-100 dark:bg-gray-900 text-gray-900 dark:text-gray-100;
|
||||
}
|
||||
|
||||
.feature-case {
|
||||
@apply m-8 border-blue-700 border-2 rounded-lg p-4;
|
||||
}
|
||||
|
||||
.dashboard-list-item {
|
||||
@apply flex flex-col justify-center hover:dark:bg-blue-900 cursor-pointer select-none px-4 py-2 border-y border-y-gray-800;
|
||||
}
|
||||
|
||||
.dashboard-list-project {
|
||||
@apply dark:bg-gray-800 hover:dark:bg-blue-900 text-gray-300;
|
||||
}
|
||||
|
||||
.dashboard-item {
|
||||
@apply pl-6
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
max_width = 100
|
||||
tab_spaces = 4
|
||||
attr_value_brace_style = "WhenRequired" # "Always", "AlwaysUnlessLit", "WhenRequired" or "Preserve"
|
@ -1,3 +0,0 @@
|
||||
|
||||
[toolchain]
|
||||
channel = "nightly"
|
@ -1,6 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
zellij run -- sh -c "cuddle x leptos:dev"
|
||||
|
||||
zellij run -- sh -c "cuddle x tailwind:watch"
|
||||
|
@ -1,3 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cargo leptos watch
|
@ -1,3 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
tmux kill-window -t dev || true
|
@ -1,22 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
graphql-client introspect-schema \
|
||||
http://localhost:3001/graphql \
|
||||
--header "Authorization: Basic $COMO_GATEWAY_PAT" \
|
||||
--output src/api/graphql/schema/schema.json
|
||||
|
||||
graphql-client generate \
|
||||
--schema-path src/api/graphql/schema/schema.json \
|
||||
src/features/navbar_projects/graphql/queries.graphql \
|
||||
--output-directory src/features/navbar_projects/gen \
|
||||
--custom-scalars-module='crate::common::graphql' \
|
||||
--variables-derives='Clone,Debug' \
|
||||
--response-derives='Clone,Debug'
|
||||
|
||||
graphql-client generate \
|
||||
--schema-path src/api/graphql/schema/schema.json \
|
||||
src/features/dashboard_list_view/graphql/queries.graphql \
|
||||
--output-directory src/features/dashboard_list_view/gen \
|
||||
--custom-scalars-module='crate::common::graphql' \
|
||||
--variables-derives='Clone,Debug' \
|
||||
--response-derives='Clone,Debug'
|
@ -1,3 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
npx tailwindcss -i ./input.css -o ./style/output.css
|
@ -1,3 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
npx tailwindcss -i ./input.css -o ./style/output.css --watch
|
File diff suppressed because it is too large
Load Diff
@ -1,2 +0,0 @@
|
||||
#[cfg(feature = "ssr")]
|
||||
pub fn register() {}
|
@ -1,55 +0,0 @@
|
||||
use leptos::*;
|
||||
use leptos_meta::*;
|
||||
use leptos_router::*;
|
||||
|
||||
use crate::common::layout::DashboardLayout;
|
||||
use crate::routes::dash::home::DashHomePage;
|
||||
use crate::routes::features_view::FeaturesView;
|
||||
use crate::routes::home::HomePage;
|
||||
|
||||
#[component]
|
||||
pub fn App() -> impl IntoView {
|
||||
// Provides context that manages stylesheets, titles, meta tags, etc.
|
||||
provide_meta_context();
|
||||
|
||||
view! {
|
||||
<Stylesheet id="leptos" href="/pkg/como_web.css"/>
|
||||
<Router>
|
||||
<main>
|
||||
<Routes>
|
||||
<Route
|
||||
path=""
|
||||
view=|| {
|
||||
view! { <HomePage/> }
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/dash"
|
||||
view=|| {
|
||||
view! { <DashboardLayout/> }
|
||||
}
|
||||
>
|
||||
<Route
|
||||
path=""
|
||||
view=|| {
|
||||
view! { <DashHomePage/> }
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="home"
|
||||
view=|| {
|
||||
view! { <DashHomePage/> }
|
||||
}
|
||||
/>
|
||||
</Route>
|
||||
<Route
|
||||
path="/features"
|
||||
view=|| {
|
||||
view! { <FeaturesView/> }
|
||||
}
|
||||
/>
|
||||
</Routes>
|
||||
</main>
|
||||
</Router>
|
||||
}
|
||||
}
|
@ -1,2 +0,0 @@
|
||||
pub mod layout;
|
||||
pub mod graphql;
|
@ -1 +0,0 @@
|
||||
pub type UUID = uuid::Uuid;
|
@ -1,49 +0,0 @@
|
||||
use leptos::*;
|
||||
use leptos_router::*;
|
||||
|
||||
use crate::features::command_line::CommandLine;
|
||||
use crate::features::navbar_projects::NavbarProjects;
|
||||
|
||||
#[component]
|
||||
pub fn DashNav() -> impl IntoView {
|
||||
view! {
|
||||
<nav class="min-w-[200px] p-4 space-y-4 h-screen sticky top-0 select-none bg-gray-800">
|
||||
<div>
|
||||
<a href="/dash/home" class="text-xl">
|
||||
"como"
|
||||
</a>
|
||||
</div>
|
||||
<div>
|
||||
<a href="/dash/current" class="">
|
||||
"inbox"
|
||||
</a>
|
||||
</div>
|
||||
<div>
|
||||
<p class="text-sm mb-0.5 dark:text-gray-500">"Favorites"</p>
|
||||
<a href="/dash/current" class="dark:text-gray-300 pl-2">
|
||||
"inbox"
|
||||
</a>
|
||||
</div>
|
||||
<div>
|
||||
<p class="text-sm mb-0.5 dark:text-gray-500">"Projects"</p>
|
||||
<div class="pl-2 dark:text-gray-300">
|
||||
<NavbarProjects/>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn DashboardLayout() -> impl IntoView {
|
||||
view! {
|
||||
<div class="flex flex-row">
|
||||
<DashNav/>
|
||||
<div id="content" class="px-0.5 flex-grow">
|
||||
<CommandLine>
|
||||
<Outlet/>
|
||||
</CommandLine>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
use cfg_if::cfg_if;
|
||||
|
||||
cfg_if! { if #[cfg(feature = "ssr")] {
|
||||
use axum::{
|
||||
body::{boxed, Body, BoxBody},
|
||||
extract::State,
|
||||
response::IntoResponse,
|
||||
http::{Request, Response, StatusCode, Uri},
|
||||
};
|
||||
use axum::response::Response as AxumResponse;
|
||||
use tower::ServiceExt;
|
||||
use tower_http::services::ServeDir;
|
||||
use leptos::{LeptosOptions, view};
|
||||
use crate::app::App;
|
||||
|
||||
pub async fn file_and_error_handler(uri: Uri, State(options): State<LeptosOptions>, req: Request<Body>) -> AxumResponse {
|
||||
let root = options.site_root.clone();
|
||||
let res = get_static_file(uri.clone(), &root).await.unwrap();
|
||||
|
||||
if res.status() == StatusCode::OK {
|
||||
res.into_response()
|
||||
} else{
|
||||
let handler = leptos_axum::render_app_to_stream(
|
||||
options.to_owned(),
|
||||
move || view!{ <App/> }
|
||||
);
|
||||
handler(req).await.into_response()
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_static_file(uri: Uri, root: &str) -> Result<Response<BoxBody>, (StatusCode, String)> {
|
||||
let req = Request::builder().uri(uri.clone()).body(Body::empty()).unwrap();
|
||||
// `ServeDir` implements `tower::Service` so we can call it with `tower::ServiceExt::oneshot`
|
||||
// This path is relative to the cargo root
|
||||
match ServeDir::new(root).oneshot(req).await {
|
||||
Ok(res) => Ok(res.map(boxed)),
|
||||
Err(err) => Err((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
format!("Something went wrong: {err}"),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}}
|
@ -1,3 +0,0 @@
|
||||
pub mod command_line;
|
||||
pub mod dashboard_list_view;
|
||||
pub mod navbar_projects;
|
@ -1,56 +0,0 @@
|
||||
use leptos::*;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CommandLineState {
|
||||
hidden: bool,
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn CommandLineModalView() -> impl IntoView {
|
||||
view! { <div>"modal"</div> }
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn CommandLineModal() -> impl IntoView {
|
||||
let state =
|
||||
use_context::<RwSignal<CommandLineState>>().expect("command line state must be provided");
|
||||
|
||||
let (hidden, _) = create_slice(state, |state| state.hidden, |state, n| state.hidden = n);
|
||||
|
||||
view! {
|
||||
{move || {
|
||||
if !hidden.get() {
|
||||
view! { <CommandLineModalView/> }
|
||||
} else {
|
||||
view! { }.into_view()
|
||||
}
|
||||
}}
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn CommandLine(children: Children) -> impl IntoView {
|
||||
let state = create_rw_signal(CommandLineState { hidden: true });
|
||||
provide_context(state);
|
||||
let (hidden, set_hidden) =
|
||||
create_slice(state, |state| state.hidden, |state, n| state.hidden = n);
|
||||
|
||||
leptos_dom::helpers::window_event_listener(ev::keypress, move |event| {
|
||||
if event.ctrl_key() {
|
||||
match event.code().as_str() {
|
||||
"KeyK" => {
|
||||
set_hidden.set(!hidden.get());
|
||||
//log!("toggle command")
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
view! {
|
||||
<div>
|
||||
<div>{children()}</div>
|
||||
<CommandLineModal/>
|
||||
</div>
|
||||
}
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
pub mod dashboard_list_view;
|
||||
pub mod gen;
|
||||
|
||||
pub use dashboard_list_view::*;
|
@ -1,93 +0,0 @@
|
||||
use graphql_client::{GraphQLQuery, Response};
|
||||
use leptos::*;
|
||||
|
||||
use crate::features::dashboard_list_view::gen::queries::get_projects_list_view::GetProjectsListViewGetProjectsItems;
|
||||
|
||||
use super::gen::queries::get_projects_list_view::{
|
||||
GetProjectsListViewGetProjects, ResponseData, Variables,
|
||||
};
|
||||
use super::gen::queries::GetProjectsListView;
|
||||
|
||||
pub async fn get_projects_list() -> anyhow::Result<Vec<GetProjectsListViewGetProjects>> {
|
||||
let request_body = GetProjectsListView::build_query(Variables {});
|
||||
let payload = serde_json::to_string(&request_body)?;
|
||||
let res = reqwasm::http::Request::post("http://localhost:3001/graphql")
|
||||
.credentials(reqwasm::http::RequestCredentials::Include)
|
||||
.body(payload)
|
||||
.send()
|
||||
.await?;
|
||||
let response_body: Response<ResponseData> = res.json().await?;
|
||||
Ok(response_body
|
||||
.data
|
||||
.ok_or(anyhow::anyhow!("failed to get projects list"))?
|
||||
.get_projects)
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn DashboardItemView(item: GetProjectsListViewGetProjectsItems) -> impl IntoView {
|
||||
view! { <div class="dashboard-list-item dashboard-item">{item.title}</div> }
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn DashboardProjectItemView(project: GetProjectsListViewGetProjects) -> impl IntoView {
|
||||
view! {
|
||||
<div class="dashboard-list-item dashboard-list-project">
|
||||
<a href=format!("/dash/project/{}", & project.id) class="project-item flex flex-row">
|
||||
<div class="space-x-2">
|
||||
<span>{&project.name}</span>
|
||||
<span class="text-gray-50">{project.items.len()}</span>
|
||||
<span class="flex-grow"></span>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn DashboardListView(
|
||||
projects: Resource<(), Vec<GetProjectsListViewGetProjects>>,
|
||||
) -> impl IntoView {
|
||||
let projects_view = move || {
|
||||
projects.with(|projects| {
|
||||
if projects.is_none() {
|
||||
return Vec::new();
|
||||
}
|
||||
let projects = projects.as_ref().unwrap();
|
||||
|
||||
if projects.is_empty() {
|
||||
return vec![view! { <div class="project-item">"No projects"</div> }.into_any()];
|
||||
}
|
||||
|
||||
projects
|
||||
.into_iter()
|
||||
.filter(|project| !project.items.is_empty())
|
||||
.map(|project| {
|
||||
view! {
|
||||
<div>
|
||||
<DashboardProjectItemView project=project.clone()/>
|
||||
{&project
|
||||
.items
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|item| {
|
||||
view! { <DashboardItemView item=item/> }
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.into_view()}
|
||||
</div>
|
||||
}
|
||||
.into_any()
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
};
|
||||
|
||||
view! {<div class="project-items">{projects_view}</div> }
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn DashboardList() -> impl IntoView {
|
||||
let projects = create_local_resource(|| (), |_| async { get_projects_list().await.unwrap() });
|
||||
|
||||
view! {<DashboardListView projects=projects/> }
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user