6 Commits

24 changed files with 178 additions and 308 deletions

6
.dockerignore Normal file
View File

@@ -0,0 +1,6 @@
**
!src
!Cargo.toml
!Cargo.lock
!flake.nix
!flake.lock

23
.github/workflows/docker.yml vendored Normal file
View File

@@ -0,0 +1,23 @@
name: Docker
on:
push:
branches: ["main", "feature/docker"]
pull_request:
branches: ["main"]
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: hustcer/setup-nu@v3
- name: Get Cargo version
run: version=$(nu -c "cargo metadata --format-version=1 --no-deps | from json | get packages | first | get version")
- name: Build docker image
run: docker build -t docki:$version .

View File

@@ -1,40 +0,0 @@
name: Deploy Docker Image to Github Container Registry
on:
push:
branches: ["main"]
paths:
- "Cargo.toml"
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
permissions:
packages: write
contents: read
attestations: write
id-token: write
steps:
- uses: actions/checkout@v4
- uses: hustcer/setup-nu@v3
- uses: cachix/install-nix-action@v31
with:
nix_path: nixpkgs=channel:nixos-unstable
- run: nix build .#docker
- run: |
export CARGO_VERSION=$(nu -c "cargo metadata --format-version=1 --no-deps | from json | get packages | first | get version")
echo "CARGO_VERSION=$CARGO_VERSION" >> $GITHUB_ENV
- run: docker load -i result
- name: Log in to registry
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- run: docker tag docki:latest "ghcr.io/quirinecker/docki:$CARGO_VERSION"
- run: docker tag docki:latest "ghcr.io/quirinecker/docki:latest"
- run: docker push ghcr.io/quirinecker/docki:$CARGO_VERSION
- run: docker push ghcr.io/quirinecker/docki:latest

29
.github/workflows/rust.yml vendored Normal file
View File

@@ -0,0 +1,29 @@
name: Rust
on:
push:
branches: [ "main" ]
paths:
- 'Cargo.toml'
pull_request:
branches: [ "main" ]
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose
- name: Publish to crates.io
env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_TOKEN }}
run: |
cargo publish --verbose

View File

@@ -1,26 +0,0 @@
name: Deploy Rust to Crates.io
on:
push:
branches: ["main"]
paths:
- "Cargo.toml"
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose
- name: Publish to crates.io
env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_TOKEN }}
run: |
cargo publish --verbose

View File

@@ -1,24 +0,0 @@
name: Test Rust
on:
push:
branches: ["main", "develop"]
paths:
- "./src/**"
- "./Cargo.toml"
pull_request:
branches: ["main", "develop"]
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose

20
.gitlab-ci.yml Normal file
View File

@@ -0,0 +1,20 @@
workflow:
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main"'
default:
image: 'quirinecker/rust-openssl'
build:
script:
- cargo build
test:
script:
- cargo test
# publish:
# script:
# - export CARGO_REGISTRY_TOKEN=$CARGO_TOKEN
# - cargo publish

View File

Before

Width:  |  Height:  |  Size: 18 MiB

After

Width:  |  Height:  |  Size: 18 MiB

39
Cargo.lock generated
View File

@@ -630,6 +630,17 @@ dependencies = [
"unicode-width", "unicode-width",
] ]
[[package]]
name = "colored"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd"
dependencies = [
"atty",
"lazy_static",
"winapi 0.3.9",
]
[[package]] [[package]]
name = "concurrent-queue" name = "concurrent-queue"
version = "2.1.0" version = "2.1.0"
@@ -848,16 +859,16 @@ checksum = "212d0f5754cb6769937f4501cc0e67f4f4483c8d2c3e1e922ee9edbe4ab4c7c0"
[[package]] [[package]]
name = "docki" name = "docki"
version = "1.2.3" version = "1.2.1"
dependencies = [ dependencies = [
"bytes", "bytes",
"clap 4.1.8", "clap 4.1.8",
"clap_complete", "clap_complete",
"colored",
"futures", "futures",
"home", "home",
"live-server", "live-server",
"notify 5.1.0", "notify 5.1.0",
"nu-ansi-term",
"regex", "regex",
"reqwest", "reqwest",
"text_io", "text_io",
@@ -1861,15 +1872,6 @@ dependencies = [
"windows-sys 0.42.0", "windows-sys 0.42.0",
] ]
[[package]]
name = "nu-ansi-term"
version = "0.50.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
dependencies = [
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "num-integer" name = "num-integer"
version = "0.1.45" version = "0.1.45"
@@ -3204,12 +3206,6 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-link"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]] [[package]]
name = "windows-sys" name = "windows-sys"
version = "0.42.0" version = "0.42.0"
@@ -3234,15 +3230,6 @@ dependencies = [
"windows-targets", "windows-targets",
] ]
[[package]]
name = "windows-sys"
version = "0.61.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
dependencies = [
"windows-link",
]
[[package]] [[package]]
name = "windows-targets" name = "windows-targets"
version = "0.42.1" version = "0.42.1"

View File

@@ -1,17 +1,20 @@
[package] [package]
name = "docki" name = "docki"
version = "1.2.3" version = "1.2.1"
edition = "2021" edition = "2021"
description = "cli for building and publishing documentation using asciidoctor" description = "cli for building and publishing documentation using asciidoctor"
license-file = "LICENSE.txt" license-file = "LICENSE.txt"
authors = ["Quirin Ecker"] authors = ["Quirin Ecker"]
exclude = [".gitlab", ".github"] exclude = [
".gitlab"
]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
bytes = "1.4.0" bytes = "1.4.0"
clap = { version = "4.1.8", features = ["derive"] } clap = { version = "4.1.8", features = ["derive"] }
colored = "2.0.0"
futures = "0.3.26" futures = "0.3.26"
home = "0.5.4" home = "0.5.4"
live-server = "0.6.0" live-server = "0.6.0"
@@ -22,4 +25,3 @@ text_io = "0.1.12"
tokio = { version = "1.26.0", features = ["full"] } tokio = { version = "1.26.0", features = ["full"] }
zip-extract = "0.1.1" zip-extract = "0.1.1"
clap_complete = "4.1.4" clap_complete = "4.1.4"
nu-ansi-term = "0.50.3"

11
Dockerfile Normal file
View File

@@ -0,0 +1,11 @@
FROM nixos/nix AS build
WORKDIR /app
COPY . /app
RUN nix --extra-experimental-features nix-command --extra-experimental-features flakes build \
&& nix --extra-experimental-features nix-command --extra-experimental-features flakes store gc
RUN mkdir /out && cp result/bin/docki .

View File

@@ -6,7 +6,7 @@
## Preview ## Preview
![screencast](.github/assets/screencast.gif) ![screencast](.gitlab/screencast.gif)
Docki is cli for converting asciidoctor files into html files. Docki is cli for converting asciidoctor files into html files.
@@ -29,26 +29,29 @@ docki serve
## Installation ## Installation
### Homebrew ### Nix
This installation method is recommended, because it will include both asciidoctor and asciidoctor_revealjs.
Note: This is the most basic installation. If you are already more experienced, you might want to add it to your shell or home manager configuration.
```shell ```shell
brew tap quirinecker/homebrew-docki https://github.com/quirinecker/homebrew-docki nix profile install github:quirinecker/docki
```
### Homebrew
> [!NOTE]
> Installing it via homebrew will not include asciidoctor_revealjs. It can be installed afterwards with `docki install-reveal`
```shell
brew tap quirinecker/docki-homebrew https://github.com/quirinecker/docki-homebrew
``` ```
``` ```
brew install docki brew install docki
``` ```
### Nix
If you just want to try it out real quick and the nix package manager is available on your system you can use the following command.
```shell
nix develop github:quirinecker/docki#preview
```
This will open a shell evnironment with docki installed. If you want to install it permanently with nix, i would recommend following the instructions in the [Nix (Advanced, Flake)](#nix-advanced-flake) section.
### Cargo ### Cargo
> [!NOTE] > [!NOTE]
@@ -58,19 +61,6 @@ This will open a shell evnironment with docki installed. If you want to install
cargo install docki cargo install docki
``` ```
### Docker
There is also a docker image available to use. It is primarily used for the gh actions.
```shell
docker pull ghcr.io/quirinecker/docki:latest
```
You can also build it yourself with nix.
```
nix build .#docker && docker load -i result
```
### Nix (Advanced, Flake) ### Nix (Advanced, Flake)

View File

@@ -120,26 +120,6 @@
naerskLib = naerskLib; naerskLib = naerskLib;
pkgs = pkgs; pkgs = pkgs;
}; };
docker = pkgs.dockerTools.buildImage {
name = "docki";
tag = "latest";
config = {
WorkingDir = "/app";
};
copyToRoot = pkgs.buildEnv {
name = "docki-docker";
paths = [
pkgs.coreutils
pkgs.bash
pkgs.cacert
(build_docki {
naerskLib = naerskLib;
pkgs = pkgs;
})
];
};
};
} }
); );
}; };

View File

@@ -1,11 +0,0 @@
= My cool presentation
:author: John Doe
:email: john@doe.com
== First slide
Content
== Second slide
Content

View File

@@ -16,15 +16,10 @@ pub enum ShellArg {
#[derive(Subcommand)] #[derive(Subcommand)]
pub enum CommandArg { pub enum CommandArg {
/// Builds the documentation into a dist folder /// Builds the documentation into a dist folder
Build { Build,
/// When set to true, docki will download revealjs before building the documentation.
/// Otherwise it will use the cdn for revealjs
#[arg(short, long)]
offline_reveal: bool,
},
/// Checks if everything required for docki is installed /// Checks if everything required for docki is installed
Health, Health,
/// Deprecated: Helper command for installing asciidoctor-reveal-js /// Helper command for installing asciidoctor-reveal-js
InstallReveal, InstallReveal,
/// Starts a Webserver with the live preview of the Documentation /// Starts a Webserver with the live preview of the Documentation
Serve { Serve {

View File

@@ -1,4 +1,3 @@
use regex::Regex;
use std::process; use std::process;
fn exec_command(command: &mut process::Command) -> Result<(), String> { fn exec_command(command: &mut process::Command) -> Result<(), String> {
@@ -28,14 +27,9 @@ fn asciidoctor_docs(in_path: &str, out_path: &str) -> process::Command {
return command; return command;
} }
fn asciidoctor_slides(in_path: &str, out_path: &str, offline_reveal: bool) -> process::Command { fn asciidoctor_slides(in_path: &str, out_path: &str) -> process::Command {
let mut command = process::Command::new(format!("asciidoctor-revealjs")); let mut command = process::Command::new(format!("asciidoctor-revealjs"));
let out_dir = parent_path(out_path); let revealjs_path = "/slides/revealjs";
let revealjs_path = if offline_reveal {
path_between(out_dir.to_string(), "./dist/slides/revealjs".to_string())
} else {
"https://cdn.jsdelivr.net/npm/reveal.js@5.2.1".to_string()
};
command command
.arg(format!("{in_path}")) .arg(format!("{in_path}"))
@@ -46,67 +40,13 @@ fn asciidoctor_slides(in_path: &str, out_path: &str, offline_reveal: bool) -> pr
return command; return command;
} }
fn parent_path(child_path: &str) -> String {
let split: Vec<&str> = child_path.split("/").collect();
let slice = &split[..split.len() - 1];
return slice.join("/");
}
pub fn path_between(from: String, to: String) -> String {
let from_segments = transform_input_to_clone_split(&from);
let to_segments = transform_input_to_clone_split(&to);
let last_matching_index = matching_from_start(&from_segments, &to_segments);
let number_of_backs = from_segments.len() - last_matching_index;
let mut path_between = path_back(number_of_backs);
let path_to_to_path = &to_segments[last_matching_index..];
path_between.push_str(&path_to_to_path.join("/"));
return path_between;
}
fn transform_input_to_clone_split(input: &String) -> Vec<String> {
let regex = Regex::new(r"/$").unwrap();
let first_transformation = input.clone().replace("./", "");
return regex
.replace_all(&first_transformation, "")
.to_string()
.split("/")
.collect::<Vec<&str>>()
.iter()
.map(|s| s.to_string())
.collect();
}
fn path_back(count: usize) -> String {
let mut path = "".to_string();
for _ in 0..count {
path.push_str("../");
}
return path;
}
pub fn matching_from_start(from_segments: &Vec<String>, to_segments: &Vec<String>) -> usize {
for (index, from_segment) in from_segments.iter().enumerate() {
if let Some(to_segment) = to_segments.get(index) {
if from_segment != to_segment {
return index;
}
} else {
return index;
}
}
return from_segments.len();
}
pub fn build_doc(in_path: &str, out_path: &str) -> Result<(), String> { pub fn build_doc(in_path: &str, out_path: &str) -> Result<(), String> {
let mut command = asciidoctor_docs(in_path, out_path); let mut command = asciidoctor_docs(in_path, out_path);
return exec_command(&mut command); return exec_command(&mut command);
} }
pub fn build_slide(in_path: &str, out_path: &str, offline_reveal: bool) -> Result<(), String> { pub fn build_slide(in_path: &str, out_path: &str) -> Result<(), String> {
let mut command = asciidoctor_slides(in_path, out_path, offline_reveal); let mut command = asciidoctor_slides(in_path, out_path);
return exec_command(&mut command); return exec_command(&mut command);
} }

View File

@@ -6,12 +6,12 @@ use super::fs_util;
pub mod asciidoctor; pub mod asciidoctor;
pub fn docki_build(in_path: &str, offline_reveal: bool) -> DockiBuildResult { pub fn docki_build(in_path: &str) -> DockiBuildResult {
let out_path = in_path.replace("/docs/", "/dist/"); let out_path = in_path.replace("/docs/", "/dist/");
let convert_out_path = out_path.replace(".adoc", ".html"); let convert_out_path = out_path.replace(".adoc", ".html");
if in_path.starts_with("./docs/slides/") && in_path.ends_with(".adoc") { if in_path.starts_with("./docs/slides/") && in_path.ends_with(".adoc") {
if let Err(err) = build_slide(&in_path, &convert_out_path, offline_reveal) { if let Err(err) = build_slide(&in_path, &convert_out_path) {
return DockiBuildResult::Err(err); return DockiBuildResult::Err(err);
} }
@@ -23,10 +23,6 @@ pub fn docki_build(in_path: &str, offline_reveal: bool) -> DockiBuildResult {
DockiBuildResult::Doc(convert_out_path) DockiBuildResult::Doc(convert_out_path)
} else { } else {
if in_path.starts_with("./docs/slides/revealjs") && !offline_reveal {
return DockiBuildResult::Silent;
}
if let Err(err) = copy(&in_path, &out_path) { if let Err(err) = copy(&in_path, &out_path) {
return DockiBuildResult::Err(err); return DockiBuildResult::Err(err);
} }
@@ -50,5 +46,4 @@ pub enum DockiBuildResult {
Doc(String), Doc(String),
Copy(String), Copy(String),
Err(String), Err(String),
Silent,
} }

View File

@@ -1,6 +1,6 @@
use super::executions::build_execution::BuildExecution; use super::executions::build_execution::BuildExecution;
pub async fn build(offline_reveal: bool) -> () { pub async fn build() -> () {
let mut build_execution = BuildExecution::new(); let mut build_execution = BuildExecution::new();
build_execution.execute(offline_reveal).await.expect("build failed") build_execution.execute().await.expect("build failed")
} }

View File

@@ -21,7 +21,7 @@ impl BuildExecution {
}; };
} }
pub async fn execute(&mut self, offline_reveal: bool) -> Result<(), String> { pub async fn execute(&mut self) -> Result<(), String> {
let path = "./docs/".to_string(); let path = "./docs/".to_string();
if !fs_util::directory_exists(&path) { if !fs_util::directory_exists(&path) {
@@ -30,25 +30,20 @@ impl BuildExecution {
); );
} }
if let Err(error) = Self::prepare(offline_reveal).await { if let Err(error) = Self::prepare().await {
return Err(error); return Err(error);
} }
return self.build_dir(&path, offline_reveal); return self.build_dir(&path);
} }
async fn prepare(offline_reveal: bool) -> Result<(), String> { async fn prepare() -> Result<(), String> {
if !offline_reveal { let reveal_version = "3.9.2";
return Ok(())
}
let reveal_version = "5.2.1";
let target = format!("https://github.com/hakimel/reveal.js/archive/{reveal_version}.zip"); let target = format!("https://github.com/hakimel/reveal.js/archive/{reveal_version}.zip");
create_dir_recursive("./docs/slides"); create_dir_recursive("./docs/slides");
reqwest::get(target.clone()).await.unwrap();
let Ok(response) = reqwest::get(target).await else { let Ok(response) = reqwest::get(target).await else {
return Err("could not downlaod revealjs".to_string()) return Err("could not downlaod revealjs".to_string())
}; };
@@ -66,7 +61,7 @@ impl BuildExecution {
return Ok(()); return Ok(());
} }
fn build_dir(&mut self, path: &str, offline_reveal: bool) -> Result<(), String> { fn build_dir(&mut self, path: &str) -> Result<(), String> {
let result = fs_util::fetch_paths_recursive(&path); let result = fs_util::fetch_paths_recursive(&path);
let Ok(paths) = result else { let Ok(paths) = result else {
@@ -76,7 +71,7 @@ impl BuildExecution {
for (index, in_path) in paths.iter().enumerate() { for (index, in_path) in paths.iter().enumerate() {
self.progress = index + 1; self.progress = index + 1;
self.goal = paths.len(); self.goal = paths.len();
let result = docki_build(&in_path, offline_reveal); let result = docki_build(&in_path);
match result { match result {
DockiBuildResult::Err(err) => { DockiBuildResult::Err(err) => {
@@ -85,8 +80,7 @@ impl BuildExecution {
}, },
DockiBuildResult::Copy(out_path) => self.display_building_status("Copy", &in_path, &out_path), DockiBuildResult::Copy(out_path) => self.display_building_status("Copy", &in_path, &out_path),
DockiBuildResult::Slide(out_path) => self.display_building_status("Slide", &in_path, &out_path), DockiBuildResult::Slide(out_path) => self.display_building_status("Slide", &in_path, &out_path),
DockiBuildResult::Doc(out_path) => self.display_building_status("Doc", &in_path, &out_path), DockiBuildResult::Doc(out_path) => self.display_building_status("Doc", &in_path, &out_path)
DockiBuildResult::Silent => ()
} }
} }

View File

@@ -1,5 +1,5 @@
use nu_ansi_term::Color::{LightGray, LightGreen, LightRed}; use std::{process::Command, io::ErrorKind};
use std::{io::ErrorKind, process::Command}; use colored::Colorize;
const INFO_ASCIIDOC: &str = " const INFO_ASCIIDOC: &str = "
Install the binary with your package manager! Install the binary with your package manager!
@@ -37,7 +37,7 @@ fn check_reveal() -> () {
} }
fn reveal_is_installed() -> bool { fn reveal_is_installed() -> bool {
return check_command("asciidoctor-revealjs"); return check_command("asciidoctor-revealjs")
} }
fn check_asciidoc() -> () { fn check_asciidoc() -> () {
@@ -49,21 +49,23 @@ fn check_asciidoc() -> () {
} }
fn asciidoc_is_installed() -> bool { fn asciidoc_is_installed() -> bool {
return check_command("asciidoctor"); return check_command("asciidoctor")
} }
fn check_command(command: &str) -> bool { fn check_command(command: &str) -> bool {
return match Command::new(command).output() { return match Command::new(command)
.output() {
Ok(_) => true, Ok(_) => true,
Err(e) => ErrorKind::NotFound != e.kind(), Err(e) => ErrorKind::NotFound != e.kind()
}; }
} }
fn print_health_ok(name: &str) { fn print_health_ok(name: &str) {
println!("- ✔️ {}", LightGreen.paint(name)); println!("- ✔️ {}", name.bright_green());
} }
fn print_health_not_ok(name: &str, info: &str) { fn print_health_not_ok(name: &str, info: &str) {
println!("- ❗{}", LightRed.paint(name)); println!("- ❗{}", name.bright_red());
println!("{}", LightGray.paint(info)) println!("{}", info.bright_black())
} }

View File

@@ -1,4 +1,4 @@
use nu_ansi_term::Color::Green; use colored::Colorize;
use futures::StreamExt; use futures::StreamExt;
use live_server::listen; use live_server::listen;
use notify::{ use notify::{
@@ -11,7 +11,7 @@ use crate::app::{ watcher::watcher, build::{docki_build, DockiBuildResult}, comm
pub async fn serve(port: Option<u16>) { pub async fn serve(port: Option<u16>) {
build(false).await; build().await;
tokio::join!(watch_and_build(), start_server(port)); tokio::join!(watch_and_build(), start_server(port));
} }
@@ -21,18 +21,15 @@ async fn watch_and_build() {
.expect("something went wrong") .expect("something went wrong")
} }
async fn start_server(port: Option<u16>) { async fn start_server(port: Option<u16>) {
let port = port.unwrap_or(8080); let unwrapped_port = port.unwrap_or(8080);
let link = &format!("http://localhost:{}", port);
let hyperlink = Green.paint(link).hyperlink(link);
println!( println!(
"\nServing at {}", "\nServing at {}{} ",
hyperlink "http://localhost:".bold(),
unwrapped_port.to_string().bold()
); );
let Ok(()) = listen("localhost", port, "./dist").await else { let Ok(()) = listen("localhost", port.unwrap_or(8080), "./dist").await else {
panic!("could not start server") panic!("could not start server")
}; };
} }
@@ -62,13 +59,13 @@ fn build_file(paths: Vec<std::path::PathBuf>) {
let in_path = paths let in_path = paths
.first() .first()
.expect(invalid_path_message) .expect(invalid_path_message)
.strip_prefix(&current_dir())
.expect(invalid_path_message)
.to_str() .to_str()
.expect(invalid_path_message); .expect(invalid_path_message)
.replace(&current_dir(), "")
.replace("/./", "./");
let in_path = format!("./{}", in_path);
let result = docki_build(&in_path, false); let result = docki_build(&in_path);
match result { match result {
DockiBuildResult::Slide(out_path) => display_rebuilding_status("Slide", &in_path, &out_path), DockiBuildResult::Slide(out_path) => display_rebuilding_status("Slide", &in_path, &out_path),
@@ -78,7 +75,6 @@ fn build_file(paths: Vec<std::path::PathBuf>) {
display_rebuilding_status("Error", &in_path, ""); display_rebuilding_status("Error", &in_path, "");
println!("{}", err); println!("{}", err);
}, },
DockiBuildResult::Silent => ()
} }
} }

View File

@@ -1,19 +1,20 @@
use nu_ansi_term::Color::LightGreen; use colored::Colorize;
use nu_ansi_term::Color::LightRed;
use nu_ansi_term::Style;
pub fn display_status(context1: &str, context2: &str, in_path: &str, out_path: &str) { pub fn display_status(context1: &str, context2: &str, in_path: &str, out_path: &str) {
let colored_context = if context2 == "Error" { let colored_context = color_context(context2);
LightRed.paint(context2)
} else {
LightGreen.paint(context2)
};
println!( println!(
"({}) [{}] {} -> {}", "({}) [{}] {} -> {}",
Style::new().paint(context1), context1.bold(),
colored_context, colored_context,
in_path, in_path,
out_path out_path
); );
} }
fn color_context(context: &str) -> colored::ColoredString {
if context == "Error" {
return context.bright_red()
} else {
return context.bright_green()
}
}

View File

@@ -23,7 +23,7 @@ impl App {
Self::setup_environment_variables(); Self::setup_environment_variables();
match args.command { match args.command {
CommandArg::Build { offline_reveal } => build(offline_reveal).await, CommandArg::Build => build().await,
CommandArg::Health => health(), CommandArg::Health => health(),
CommandArg::InstallReveal => install_reveal().await, CommandArg::InstallReveal => install_reveal().await,
CommandArg::Serve { port } => serve(port).await, CommandArg::Serve { port } => serve(port).await,

View File

@@ -5,5 +5,5 @@ fn test_fetch_asciidoctor_paths_recursive() {
let paths = fs_util::fetch_paths_recursive("res/test/docs").unwrap(); let paths = fs_util::fetch_paths_recursive("res/test/docs").unwrap();
let len = paths.len(); let len = paths.len();
dbg!(paths); dbg!(paths);
assert_eq!(len, 6); assert_eq!(len, 5);
} }