Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
117 changes: 53 additions & 64 deletions src/core.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
use std::error::Error;
use std::fmt;
use std::fs;
use std::io::{self, prelude::*, Error, Write};
use std::io::{self, prelude::*};
use std::path::{Path, PathBuf};
use std::process;
use std::time::Duration;

use encoding_rs::Encoding;
Expand All @@ -18,6 +19,31 @@ use crate::html::{
};
use crate::url::{clean_url, create_data_url, get_referer_url, parse_data_url, resolve_url};

#[derive(Debug)]
pub struct MonolithError {
details: String,
}

impl MonolithError {
fn new(msg: &str) -> MonolithError {
MonolithError {
details: msg.to_string(),
}
}
}

impl fmt::Display for MonolithError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.details)
}
}

impl Error for MonolithError {
fn description(&self) -> &str {
&self.details
}
}

#[derive(Default)]
pub struct Options {
pub base_url: Option<String>,
Expand Down Expand Up @@ -46,42 +72,6 @@ pub struct Options {
pub user_agent: Option<String>,
}

enum Output {
Stdout(io::Stdout),
File(fs::File),
}

impl Output {
fn new(file_path: &str) -> Result<Output, Error> {
if file_path.is_empty() || file_path.eq("-") {
Ok(Output::Stdout(io::stdout()))
} else {
Ok(Output::File(fs::File::create(file_path)?))
}
}

fn write(&mut self, bytes: &Vec<u8>) -> Result<(), Error> {
match self {
Output::Stdout(stdout) => {
stdout.write_all(bytes)?;
// Ensure newline at end of output
if bytes.last() != Some(&b"\n"[0]) {
stdout.write(b"\n")?;
}
stdout.flush()
}
Output::File(file) => {
file.write_all(bytes)?;
// Ensure newline at end of output
if bytes.last() != Some(&b"\n"[0]) {
file.write(b"\n")?;
}
file.flush()
}
}
}
}

const ANSI_COLOR_RED: &'static str = "\x1b[31m";
const ANSI_COLOR_RESET: &'static str = "\x1b[0m";
const FILE_SIGNATURES: [[&[u8]; 2]; 18] = [
Expand Down Expand Up @@ -113,20 +103,25 @@ const PLAINTEXT_MEDIA_TYPES: &[&str] = &[
"image/svg+xml",
];

pub fn create_monolithic_file(options: &Options, mut cache: &mut Cache) {
pub fn create_monolithic_document(
options: &Options,
mut cache: &mut Cache,
) -> Result<Vec<u8>, MonolithError> {
// Check if target was provided
if options.target.len() == 0 {
if !options.silent {
eprintln!("No target specified");
}
process::exit(1);

return Err(MonolithError::new("no target specified"));
}

// Check if custom encoding value is acceptable
if let Some(custom_encoding) = options.encoding.clone() {
if !Encoding::for_label_no_replacement(custom_encoding.as_bytes()).is_some() {
eprintln!("Unknown encoding: {}", &custom_encoding);
process::exit(1);

return Err(MonolithError::new("unknown encoding specified"));
}
}

Expand All @@ -146,7 +141,8 @@ pub fn create_monolithic_file(options: &Options, mut cache: &mut Cache) {
if !options.silent {
eprintln!("Unsupported target URL type: {}", unsupported_scheme);
}
process::exit(1)

return Err(MonolithError::new("unsupported target URL type"));
}
},
Err(_) => {
Expand All @@ -165,15 +161,19 @@ pub fn create_monolithic_file(options: &Options, mut cache: &mut Cache) {
&target
);
}
process::exit(1);

return Err(MonolithError::new(
"could not generate file URL out of given path",
));
}
}
}
false => {
if !options.silent {
eprintln!("Local target is not a file: {}", &target);
}
process::exit(1);

return Err(MonolithError::new("local target is not a file"));
}
},
false => {
Expand Down Expand Up @@ -225,21 +225,11 @@ pub fn create_monolithic_file(options: &Options, mut cache: &mut Cache) {
{
match retrieve_asset(&mut cache, &client, &target_url, &target_url, &options) {
Ok((retrieved_data, final_url, media_type, charset)) => {
// Provide output as text without processing it, the way browsers do
// Provide output as text (without processing it, the way browsers do)
if !media_type.eq_ignore_ascii_case("text/html")
&& !media_type.eq_ignore_ascii_case("application/xhtml+xml")
{
// Define output
let mut output =
Output::new(&options.output).expect("Could not prepare output");

// Write retrieved data into STDOUT or file
output
.write(&retrieved_data)
.expect("Could not write output");

// Nothing else to do past this point
process::exit(0);
return Ok(retrieved_data);
}

if options
Expand All @@ -258,11 +248,12 @@ pub fn create_monolithic_file(options: &Options, mut cache: &mut Cache) {
if !options.silent {
eprintln!("Could not retrieve target document");
}
process::exit(1);

return Err(MonolithError::new("could not retrieve target document"));
}
}
} else {
process::exit(1);
return Err(MonolithError::new("unsupported target"));
}

// Initial parse
Expand Down Expand Up @@ -321,7 +312,9 @@ pub fn create_monolithic_file(options: &Options, mut cache: &mut Cache) {
custom_base_url
);
}
process::exit(1);
return Err(MonolithError::new(
"could not map given path to base URL",
));
}
}
}
Expand Down Expand Up @@ -373,11 +366,7 @@ pub fn create_monolithic_file(options: &Options, mut cache: &mut Cache) {
result.splice(0..0, metadata_comment.as_bytes().to_vec());
}

// Define output
let mut output = Output::new(&options.output).expect("Could not prepare output");

// Write result into STDOUT or file
output.write(&result).expect("Could not write output");
Ok(result)
}

pub fn detect_media_type(data: &[u8], url: &Url) -> String {
Expand Down
56 changes: 52 additions & 4 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,50 @@
use std::env;
use std::fs::read_to_string;
use std::fs;
use std::io::{self, Error as IoError, Write};
use std::process;

use clap::{App, Arg, ArgAction};
use tempfile::Builder;

use monolith::cache::Cache;
use monolith::cookies::parse_cookie_file_contents;
use monolith::core::{create_monolithic_file, Options};
use monolith::core::{create_monolithic_document, Options};

enum Output {
Stdout(io::Stdout),
File(fs::File),
}

impl Output {
fn new(file_path: &str) -> Result<Output, IoError> {
if file_path.is_empty() || file_path.eq("-") {
Ok(Output::Stdout(io::stdout()))
} else {
Ok(Output::File(fs::File::create(file_path)?))
}
}

fn write(&mut self, bytes: &Vec<u8>) -> Result<(), IoError> {
match self {
Output::Stdout(stdout) => {
stdout.write_all(bytes)?;
// Ensure newline at end of output
if bytes.last() != Some(&b"\n"[0]) {
stdout.write(b"\n")?;
}
stdout.flush()
}
Output::File(file) => {
file.write_all(bytes)?;
// Ensure newline at end of output
if bytes.last() != Some(&b"\n"[0]) {
file.write(b"\n")?;
}
file.flush()
}
}
}
}

const ASCII: &'static str = " \
_____ ______________ __________ ___________________ ___
Expand Down Expand Up @@ -154,7 +191,7 @@ fn main() {

// Read and parse cookie file
if let Some(opt_cookie_file) = cookie_file_path.clone() {
match read_to_string(opt_cookie_file) {
match fs::read_to_string(opt_cookie_file) {
Ok(str) => match parse_cookie_file_contents(&str) {
Ok(parsed_cookies_from_file) => {
options.cookies = parsed_cookies_from_file;
Expand All @@ -171,5 +208,16 @@ fn main() {
}
}

create_monolithic_file(&options, &mut cache);
match create_monolithic_document(&options, &mut cache) {
Ok(result) => {
// Define output
let mut output = Output::new(&options.output).expect("Could not prepare output");

// Write result into STDOUT or file
output.write(&result).expect("Could not write output");
}
Err(_) => {
process::exit(1);
}
}
}