style: code style
This commit is contained in:
144
src/main.rs
144
src/main.rs
@@ -4,6 +4,7 @@ use std::env;
|
||||
use std::path::Path;
|
||||
use std::fs::File;
|
||||
use std::io::{ self, BufRead };
|
||||
use std::collections::HashSet;
|
||||
use glob::glob;
|
||||
use colored::Colorize;
|
||||
use clap::{ Arg, App, SubCommand };
|
||||
@@ -115,15 +116,16 @@ fn main() -> std::io::Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
{
|
||||
let path = entry.unwrap();
|
||||
let path = Path::new(&path).strip_prefix(env::current_dir().unwrap().to_str().unwrap()).unwrap();
|
||||
// println!("{}", path.to_str().unwrap());
|
||||
if !path.starts_with("target/") {
|
||||
let path = path.to_str().unwrap();
|
||||
|
||||
if matches.occurrences_of("verbose") == 0 || matches.occurrences_of("verbose") == 2 {
|
||||
match regex_parser(path, regex.clone(), 2){
|
||||
|
||||
let verbose_count = matches.occurrences_of("verbose");
|
||||
if verbose_count == 0 || verbose_count == 2 {
|
||||
match regex_parser(path, regex.clone(), 2) {
|
||||
Ok(mut t) => {
|
||||
tokens.append(&mut t);
|
||||
},
|
||||
@@ -140,120 +142,76 @@ fn main() -> std::io::Result<()> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches.is_present("sort") {
|
||||
if matches.value_of("sort").unwrap() == "priority" {
|
||||
fn token_priority_sort(t : &Token) -> String {
|
||||
|
||||
if let Some(sort) = matches.value_of("sort") {
|
||||
match sort {
|
||||
"priority" => tokens.sort_unstable_by_key(|t : &Token| -> String {
|
||||
t.priority.clone().unwrap_or_else(|| "z".into())
|
||||
}
|
||||
tokens.sort_unstable_by_key(token_priority_sort);
|
||||
}
|
||||
else if matches.value_of("sort").unwrap() == "deadline" {
|
||||
fn token_deadline_sort(t : &Token) -> NaiveDate {
|
||||
t.date.clone().unwrap_or_else(|| NaiveDate::from_ymd(3000, 1, 1))
|
||||
}
|
||||
tokens.sort_unstable_by_key(token_deadline_sort);
|
||||
}
|
||||
else if matches.value_of("sort").unwrap() == "member" {
|
||||
fn token_member_sort(t : &Token) -> String {
|
||||
t.priority.clone().unwrap_or_else(|| "z".into())
|
||||
}
|
||||
tokens.sort_unstable_by_key(token_member_sort);
|
||||
}),
|
||||
"deadline" => tokens.sort_unstable_by_key(|t : &Token| -> NaiveDate {
|
||||
t.date.clone().unwrap_or_else(|| NaiveDate::from_ymd(9999, 1, 1))
|
||||
}),
|
||||
"member" => tokens.sort_unstable_by_key(|t : &Token| -> String {
|
||||
t.member.clone().unwrap_or_else(|| "zzzzzzzzzzzzz".into())
|
||||
}),
|
||||
_ => {}, // IGNORE
|
||||
}
|
||||
}
|
||||
|
||||
if matches.is_present("list") {
|
||||
let lines = match matches.value_of("list").unwrap().parse::<usize>() {
|
||||
Ok(lines) => lines,
|
||||
Err(_) => {
|
||||
|
||||
if let Some(list) = matches.value_of("list") {
|
||||
let lines = match list.parse::<usize>() {
|
||||
Ok(lines) => lines, Err(_) => {
|
||||
eprintln!("{}", "list argument should be a valid number!".red());
|
||||
panic!()
|
||||
}
|
||||
};
|
||||
|
||||
let mut new_tokens : Vec<Token> = vec![];
|
||||
for i in tokens {
|
||||
if new_tokens.len() < lines {
|
||||
new_tokens.push(i.clone());
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
tokens = new_tokens;
|
||||
};
|
||||
tokens = tokens.iter().take(lines).map(|t| t.clone()).collect::<Vec<Token>>();
|
||||
}
|
||||
|
||||
if matches.is_present("member") {
|
||||
let filters : Vec<&str> = matches.values_of("member").unwrap().collect();
|
||||
let mut new_tokens : Vec<Token> = vec![];
|
||||
for i in tokens{
|
||||
// println!("{}", i);
|
||||
for y in &filters {
|
||||
if i.member.clone().is_some() && i.member.clone().unwrap() == *y.to_string() {
|
||||
println!("pushing");
|
||||
new_tokens.push(i.clone());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
tokens = new_tokens;
|
||||
if let Some(members) = matches.values_of("member") {
|
||||
let members = members.collect::<HashSet<&str>>();
|
||||
tokens = tokens.iter()
|
||||
.filter(|t| t.member.as_ref().map(|m| members.contains(m.as_str())).unwrap_or(false))
|
||||
.map(|t| t.clone())
|
||||
.collect::<Vec<Token>>();
|
||||
}
|
||||
|
||||
if matches.is_present("filter") {
|
||||
let filters : Vec<&str> = matches.values_of("filter").unwrap().collect();
|
||||
let mut new_tokens : Vec<Token> = vec![];
|
||||
for i in tokens {
|
||||
for y in &filters {
|
||||
if i.keyword == *y {
|
||||
new_tokens.push(i.clone());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
tokens = new_tokens;
|
||||
// tokens = new.into_iter().filter(|t| t.keyword == String::from(matches.value_of("filter").unwrap())).collect();
|
||||
if let Some(filters) = matches.values_of("filter") {
|
||||
let filters = filters.collect::<HashSet<&str>>();
|
||||
tokens = tokens.iter()
|
||||
.filter(|t| filters.contains(t.keyword.as_str()))
|
||||
.map(|t| t.clone())
|
||||
.collect::<Vec<Token>>();
|
||||
}
|
||||
|
||||
if matches.is_present("exclude") {
|
||||
let excludes : Vec<&str> = matches.values_of("exclude").unwrap().collect();
|
||||
let mut new_tokens : Vec<Token> = vec![];
|
||||
for i in tokens{
|
||||
for y in 0..excludes.len() {
|
||||
if i.keyword == excludes[y] {
|
||||
break;
|
||||
}
|
||||
else if y == excludes.len() -1 {
|
||||
new_tokens.push(i.clone());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
tokens = new_tokens;
|
||||
// tokens = new.into_iter().filter(|t| t.keyword == String::from(matches.value_of("filter").unwrap())).collect();
|
||||
if let Some(excludes) = matches.values_of("exclude") {
|
||||
let excludes = excludes.collect::<HashSet<&str>>();
|
||||
tokens = tokens.iter()
|
||||
.filter(|t| !excludes.contains(t.keyword.as_str()))
|
||||
.map(|t| t.clone())
|
||||
.collect::<Vec<Token>>();
|
||||
}
|
||||
|
||||
if matches.is_present("inline") {
|
||||
for i in tokens {
|
||||
i.inline();
|
||||
}
|
||||
tokens.iter().for_each(|i| i.inline());
|
||||
} else {
|
||||
for i in tokens {
|
||||
println!("{}", i);
|
||||
}
|
||||
tokens.iter().for_each(|i| println!("{}", i));
|
||||
}
|
||||
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
// test zone
|
||||
//TODO refactor
|
||||
//todo implement 2001/11/01 #3 getters !clement
|
||||
//todo implement 2001/11/01 #3 getters !thomas
|
||||
//fix implement 18/11/2001 getters !jht5945
|
||||
//todo implement @2001/11/01 #3 getters !clement
|
||||
//todo implement @2001-11-01 #3 getters !thomas
|
||||
//fix implement @18/11/2001 getters !jht5945
|
||||
//4
|
||||
//10/10/10
|
||||
fn test(){
|
||||
todo!("implements getters");
|
||||
}
|
||||
|
||||
//todo implement 2020/08/14 #5 getters !clement
|
||||
//todo implement @2020/08/14 #5 getters !clement
|
||||
@@ -33,6 +33,6 @@ impl Parser {
|
||||
}
|
||||
|
||||
pub fn parse(&self, path: &str) {
|
||||
string_parser_with_file(path, self.get_keyword().as_str(), self.get_end_filter(), self.get_callback()).expect("failed to open file");
|
||||
string_parser_with_file(path, &self.get_keyword(), self.get_end_filter(), self.get_callback()).expect("failed to open file");
|
||||
}
|
||||
}
|
||||
@@ -1,17 +1,17 @@
|
||||
use std::fs::File;
|
||||
use std::io::{ self, BufRead, BufReader };
|
||||
use std::io::{ self, Lines, BufRead, BufReader };
|
||||
use std::path::Path;
|
||||
use regex::{RegexSet};
|
||||
use crate::token::*;
|
||||
|
||||
// The output is wrapped in a Result to allow matching on errors
|
||||
// Returns an Iterator to the Reader of the lines of the file.
|
||||
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>> where P: AsRef<Path>, {
|
||||
fn read_lines<P>(filename: P) -> io::Result<Lines<BufReader<File>>> where P: AsRef<Path>, {
|
||||
let file = File::open(filename)?;
|
||||
Ok(BufReader::new(file).lines())
|
||||
}
|
||||
|
||||
pub fn regex_parser(path : &str, regex : Vec<String>, verbosity : i8) -> Result<Vec<Token>, io::Error> {
|
||||
pub fn regex_parser(path: &str, regex: Vec<String>, verbosity: i8) -> Result<Vec<Token>, io::Error> {
|
||||
let set = RegexSet::new(regex).unwrap();
|
||||
let mut tokens = vec![];
|
||||
for (line_cpt, line) in (read_lines(path)?).enumerate() {
|
||||
|
||||
34
src/token.rs
34
src/token.rs
@@ -17,15 +17,12 @@ pub struct Token{
|
||||
|
||||
impl Token {
|
||||
pub fn new(file: String, line: usize, s: String, verbosity: i8) -> Token {
|
||||
println!(">>>>>>>>>>>{}", s);
|
||||
// println!(">>>>>>>>>>>{}", s);
|
||||
let fields: Vec<&str>= s.split_whitespace().collect();
|
||||
let number_regex = Regex::new("#[0-9]").unwrap();
|
||||
let date_regex = Regex::new("(\\d+/\\d+/\\d+)").unwrap();
|
||||
let member_regex = Regex::new("![\\w]+").unwrap();
|
||||
if date_regex.is_match("5") {
|
||||
panic!("regex");
|
||||
}
|
||||
println!("///////////{:?}", fields);
|
||||
let number_regex = Regex::new(r"#[0-9]").unwrap();
|
||||
let date_regex = Regex::new(r"@(\d{4})[/\-](\d{2})[/\-](\d{2})").unwrap();
|
||||
let member_regex = Regex::new(r"![\w]+").unwrap();
|
||||
// println!("///////////{:?}", fields);
|
||||
|
||||
let mut t = Token {
|
||||
file,
|
||||
@@ -44,11 +41,18 @@ impl Token {
|
||||
} else if number_regex.is_match(field) {
|
||||
t.priority = Some(field.chars().skip(1).collect::<String>());
|
||||
} else if date_regex.is_match(field) {
|
||||
let date: Vec<&str> = field.split('/').collect();
|
||||
let year = date[0].parse::<i32>().unwrap();
|
||||
let month = date[1].parse::<u32>().unwrap();
|
||||
let day = date[2].parse::<u32>().unwrap();
|
||||
t.date = NaiveDate::from_ymd_opt(year, month, day);
|
||||
if let Some(capture) = date_regex.captures(field) {
|
||||
let year = capture.get(1).unwrap().as_str().parse::<i32>().unwrap();
|
||||
let month = capture.get(2).unwrap().as_str().parse::<u32>().unwrap();
|
||||
let day = capture.get(3).unwrap().as_str().parse::<u32>().unwrap();
|
||||
|
||||
t.date = NaiveDate::from_ymd_opt(year, month, day);
|
||||
}
|
||||
// let date: Vec<&str> = field.split('/').collect();
|
||||
// let year = date[0].parse::<i32>().unwrap();
|
||||
// let month = date[1].parse::<u32>().unwrap();
|
||||
// let day = date[2].parse::<u32>().unwrap();
|
||||
// t.date = NaiveDate::from_ymd_opt(year, month, day);
|
||||
} else if member_regex.is_match(field) {
|
||||
t.member = Some(field.chars().skip(1).collect::<String>());
|
||||
} else {
|
||||
@@ -64,7 +68,7 @@ impl Token {
|
||||
|
||||
pub fn inline(&self) {
|
||||
let mut inline_msg = vec![
|
||||
format!("{} line: {} {:<6} ", self.file, self.line.to_string().green(), self.keyword.clone().green())
|
||||
format!("{} line: {:>4} {:<6} ", self.file, self.line.to_string().green(), self.keyword.clone().green())
|
||||
];
|
||||
if let Some(member) = &self.member {
|
||||
inline_msg.push(format!("member: {}", member.red()));
|
||||
@@ -82,8 +86,6 @@ impl Token {
|
||||
}
|
||||
}
|
||||
|
||||
// To use the `{}` marker, the trait `fmt::Display` must be implemented
|
||||
// manually for the type.
|
||||
impl fmt::Display for Token {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut multiline_msg = vec![
|
||||
|
||||
Reference in New Issue
Block a user