diff --git a/src/main.rs b/src/main.rs index 388d044..afd65f9 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,35 +1,23 @@ -extern crate string_format; -extern crate clap; -extern crate walkdir; -extern crate string_parser; -extern crate dirs; -extern crate glob; -extern crate chrono; -use glob::glob; -use colored::Colorize; -use clap::{Arg, App, SubCommand}; -use chrono::NaiveDate; - -//local files -mod parser; -mod regex; -mod token; -use crate::parser::*; -use crate::regex::regex_parser; -use crate::token::Token; - -//std use std::io::Write; use std::fs::OpenOptions; use std::env; use std::path::Path; use std::fs::File; -use std::io::{self, BufRead}; +use std::io::{ self, BufRead }; +use glob::glob; +use colored::Colorize; +use clap::{ Arg, App, SubCommand }; +use chrono::NaiveDate; +mod parser; +mod regex; +mod token; +use crate::parser::*; +use crate::regex::regex_parser; +use crate::token::Token; fn main() -> std::io::Result<()> { - let matches = App::new("Cargo-todo") .author("Clément Guiton ") .about("cargo tool to find TODOs in your code") @@ -169,7 +157,7 @@ fn main() -> std::io::Result<()> { let path = entry.unwrap(); let path = Path::new(&path).strip_prefix(env::current_dir().unwrap().to_str().unwrap()).unwrap(); // println!("{}", path.to_str().unwrap()); - if !path.starts_with("target/"){ + if !path.starts_with("target/") { let path = path.to_str().unwrap(); if matches.occurrences_of("verbose") == 0 || matches.occurrences_of("verbose") == 2{ @@ -181,7 +169,7 @@ fn main() -> std::io::Result<()> { } } else { - match regex_parser(path, regex.clone(), 1){ + match regex_parser(path, regex.clone(), 1) { Ok(mut t) => { tokens.append(&mut t); }, @@ -193,8 +181,8 @@ fn main() -> std::io::Result<()> { } - if matches.is_present("sort"){ - if matches.value_of("sort").unwrap() == "priority"{ + if matches.is_present("sort") { + if matches.value_of("sort").unwrap() == "priority" { fn token_priority_sort(t : &Token) -> String { if t.priority.is_none() { @@ -206,7 +194,7 @@ fn main() -> std::io::Result<()> { } tokens.sort_unstable_by_key(token_priority_sort); } - else if matches.value_of("sort").unwrap() == "deadline"{ + else if matches.value_of("sort").unwrap() == "deadline" { fn token_deadline_sort(t : &Token) -> NaiveDate { if t.date.is_none() { @@ -218,7 +206,7 @@ fn main() -> std::io::Result<()> { } tokens.sort_unstable_by_key(token_deadline_sort); } - else if matches.value_of("sort").unwrap() == "member"{ + else if matches.value_of("sort").unwrap() == "member" { fn token_member_sort(t : &Token) -> String { if t.priority.is_none() { @@ -232,8 +220,8 @@ fn main() -> std::io::Result<()> { } } - if matches.is_present("list"){ - let lines = match matches.value_of("list").unwrap().parse::(){ + if matches.is_present("list") { + let lines = match matches.value_of("list").unwrap().parse::() { Ok(lines) => lines, Err(_) => { eprintln!("{}", "list argument should be a valid number!".red()); @@ -242,18 +230,16 @@ fn main() -> std::io::Result<()> { let mut new_tokens : Vec = Vec::new(); for i in tokens{ - if new_tokens.len() < lines{ + if new_tokens.len() < lines { &new_tokens.push(i.clone()); - } - else - { + } else { break; } } tokens = new_tokens; } - if matches.is_present("member"){ + if matches.is_present("member") { let filters : Vec<&str> = matches.values_of("member").unwrap().collect(); let mut new_tokens : Vec = Vec::new(); for i in tokens{ @@ -269,10 +255,10 @@ fn main() -> std::io::Result<()> { tokens = new_tokens; } - if matches.is_present("filter"){ + if matches.is_present("filter") { let filters : Vec<&str> = matches.values_of("filter").unwrap().collect(); let mut new_tokens : Vec = Vec::new(); - for i in tokens{ + for i in tokens { for y in &filters { if i.keyword == String::from(*y){ &new_tokens.push(i.clone()); @@ -284,12 +270,12 @@ fn main() -> std::io::Result<()> { // tokens = new.into_iter().filter(|t| t.keyword == String::from(matches.value_of("filter").unwrap())).collect(); } - if matches.is_present("exclude"){ + if matches.is_present("exclude") { let excludes : Vec<&str> = matches.values_of("exclude").unwrap().collect(); - let mut new_tokens : Vec = Vec::new(); + let mut new_tokens : Vec = vec![]; for i in tokens{ for y in 0..excludes.len() { - if i.keyword == String::from(excludes[y]){ + if i.keyword == String::from(excludes[y]) { break; } else if y == excludes.len() -1{ @@ -301,12 +287,11 @@ fn main() -> std::io::Result<()> { tokens = new_tokens; // tokens = new.into_iter().filter(|t| t.keyword == String::from(matches.value_of("filter").unwrap())).collect(); } - if matches.is_present("inline"){ - for i in tokens{ + if matches.is_present("inline") { + for i in tokens { i.inline(); } - } - else { + } else { for i in tokens { println!("{}", i); } diff --git a/src/parser.rs b/src/parser.rs index b009dad..fd86ac2 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -1,25 +1,24 @@ -extern crate string_parser; use string_parser::string_parser_with_file; use colored::Colorize; pub struct Parser{ - keyword : String, - end_filter : Box) -> bool>, - callback : Box, + keyword: String, + end_filter: Box) -> bool>, + callback: Box, } impl Parser { - pub fn new(keyword : String, end_filter : Box) -> bool>) -> Parser{ + pub fn new(keyword: String, end_filter: Box) -> bool>) -> Parser { let callback = Box::from(|text : String, line : usize, file : &str| { // let path = Path::new(file).strip_prefix(env::current_dir().unwrap().to_str().unwrap()).unwrap(); - println!("{} {} {} {} : {}",file,"TODO".green() ,"Line ".green(), line.to_string().green(), text.blue()); + println!("{} {} {} {} : {}", file, "TODO".green() ,"Line ".green(), line.to_string().green(), text.blue()); }); - Parser{keyword: keyword, end_filter : end_filter, callback} + Parser{keyword: keyword, end_filter: end_filter, callback} } - pub fn new_callback(keyword : String, end_filter : Box) -> bool>, callback : Box) -> Parser{ + pub fn new_callback(keyword: String, end_filter: Box) -> bool>, callback: Box) -> Parser { - Parser{keyword: keyword, end_filter : end_filter, callback} + Parser{ keyword: keyword, end_filter : end_filter, callback } } fn get_keyword(&self) -> String { @@ -34,7 +33,7 @@ impl Parser { &self.callback } - pub fn parse(&self, path : &str) { + pub fn parse(&self, path: &str) { string_parser_with_file(path, self.get_keyword().as_str(), self.get_end_filter(), self.get_callback()).expect("failed to open file"); } } \ No newline at end of file diff --git a/src/regex.rs b/src/regex.rs index 0c6564b..0cf5a57 100644 --- a/src/regex.rs +++ b/src/regex.rs @@ -1,29 +1,24 @@ -extern crate regex; - use std::fs::File; -use std::io::{self, BufRead}; +use std::io::{ self, BufRead }; use std::path::Path; - use regex::{RegexSet}; use crate::token::*; // The output is wrapped in a Result to allow matching on errors // Returns an Iterator to the Reader of the lines of the file. -fn read_lines

(filename: P) -> io::Result>> -where P: AsRef, { +fn read_lines

(filename: P) -> io::Result>> where P: AsRef, { let file = File::open(filename)?; Ok(io::BufReader::new(file).lines()) } -pub fn regex_parser(path : &str, regex : Vec, verbosity : i8) -> Result, io::Error>{ - +pub fn regex_parser(path : &str, regex : Vec, verbosity : i8) -> Result, io::Error> { let set = RegexSet::new(regex).unwrap(); - let mut tokens = Vec::new(); + let mut tokens = vec![]; let mut line_cpt = 0; for line in read_lines(path)? { line_cpt +=1; let line = line.unwrap(); - if set.is_match(line.to_lowercase().as_str()){ + if set.is_match(line.to_lowercase().as_str()) { tokens.push(Token::new(path.to_string(), line_cpt, line, verbosity)); // println!("{}", t); } diff --git a/src/token.rs b/src/token.rs index ca2aefc..defe718 100644 --- a/src/token.rs +++ b/src/token.rs @@ -1,7 +1,3 @@ -extern crate string_format; -extern crate regex; -extern crate chrono; - use chrono::NaiveDate; use string_format::string_format; use std::fmt; @@ -10,18 +6,18 @@ use regex::Regex; #[derive(Clone)] pub struct Token{ - file : String, - line : usize, - pub keyword : String, - pub comment : Option, - pub priority : Option, - pub date : Option, - pub member : Option, - verbosity : i8, + file: String, + line: usize, + pub keyword: String, + pub comment: Option, + pub priority: Option, + pub date: Option, + pub member: Option, + verbosity: i8, } impl Token { - pub fn new (file : String, line : usize, s : String, verbosity : i8) -> Token{ + pub fn new(file: String, line: usize, s: String, verbosity: i8) -> Token { // println!("{}", s); let fields : Vec<&str>= s.split_whitespace().collect(); let number_regex = Regex::new("\\b[1-9]\\b").unwrap(); @@ -35,43 +31,38 @@ impl Token { // } let mut t = Token { - file : file, - line : line, - keyword: "todo".to_string(), - comment : None, - priority : None, - date : None, - member : None, - verbosity : verbosity - }; + file: file, + line: line, + keyword: "todo".to_string(), + comment: None, + priority: None, + date: None, + member: None, + verbosity: verbosity + }; for i in 0..fields.len() { - if i == 0{ + if i == 0 { t.keyword = fields[0].to_string().to_lowercase(); - } - else if number_regex.is_match(fields[i]) { + } else if number_regex.is_match(fields[i]) { t.priority = Some(fields[i].to_string()); - } - else if date_regex.is_match(fields[i]){ + } else if date_regex.is_match(fields[i]) { let date : Vec<&str> = fields[i].split("/").collect(); t.date = NaiveDate::from_ymd_opt(date[0].parse::().unwrap(), date[1].parse::().unwrap(), date[2].parse::().unwrap()); // t.date = Some(fields[i].to_string()); - } - else if member_regex.is_match(fields[i]){ + } else if member_regex.is_match(fields[i]) { let mut member = String::new(); //from(fields[i].clone()).chars().next().map(|c| &s[c.len_utf8()..]).unwrap(); let it = fields[i].chars().skip(1); - for i in it{ + for i in it { member.push(i); } t.member = Some(member); - } - else { - if t.comment.is_none(){ + } else { + if t.comment.is_none() { t.comment = Some(fields[i].to_string()); - } - else{ - t.comment = Some(string_format!("{} {}".to_string(),t.comment.unwrap(), fields[i].to_string())); + } else { + t.comment = Some(string_format!("{} {}".to_string(),t.comment.unwrap(), fields[i].to_string())); } } } @@ -82,13 +73,13 @@ impl Token { pub fn inline(&self) { let mut s; s = string_format!("{} line: {} {} ".to_string(), self.file.clone(), self.line.to_string().green().to_string(), self.keyword.clone().green().to_string()); - if self.member.is_some(){ + if self.member.is_some() { s = string_format!("{} Member: {}".to_string(),s ,self.member.clone().unwrap().red().to_string()); } - if self.priority.is_some(){ + if self.priority.is_some() { s = string_format!("{} Priority: {}".to_string(), s, self.priority.clone().unwrap().red().to_string()); } - if self.date.is_some(){ + if self.date.is_some() { s = string_format!("{} Deadline: {}".to_string(), s, self.date.clone().unwrap().to_string().red().to_string()); } if self.comment.is_some() { @@ -113,19 +104,19 @@ impl fmt::Display for Token { } } else { - if self.member.is_some(){ - s = string_format!("{}Member: {}\n".to_string(),s ,self.member.clone().unwrap().red().to_string()); + if self.member.is_some() { + s = string_format!("{}Member: {}\n".to_string(),s ,self.member.clone().unwrap().red().to_string()); + } + if self.priority.is_some() { + s = string_format!("{}Priority: {}\n".to_string(), s, self.priority.clone().unwrap().red().to_string()); + } + if self.date.is_some() { + s = string_format!("{}Deadline: {}\n".to_string(), s, self.date.clone().unwrap().to_string().red().to_string()); + } + if self.comment.is_some() { + s = string_format!("{}{}\n".to_string(), s, self.comment.clone().unwrap().blue().to_string()); + } } - if self.priority.is_some(){ - s = string_format!("{}Priority: {}\n".to_string(), s, self.priority.clone().unwrap().red().to_string()); - } - if self.date.is_some(){ - s = string_format!("{}Deadline: {}\n".to_string(), s, self.date.clone().unwrap().to_string().red().to_string()); - } - if self.comment.is_some() { - s = string_format!("{}{}\n".to_string(), s, self.comment.clone().unwrap().blue().to_string()); - } - } write!(f, "{}", s)?; Ok(())