shell: rework parsing (again)

This commit is contained in:
Mark Poliakov 2025-01-12 20:09:56 +02:00
parent 8454fec183
commit 6859e70651
3 changed files with 604 additions and 138 deletions

View File

@ -1,13 +1,35 @@
use std::{
collections::HashMap,
env,
fmt::{self, Write as FmtWrite},
fs::File,
io::Write as IoWrite,
path::{Path, PathBuf},
process::ExitCode,
};
use crate::{Error, Outcome};
pub type BuiltinCommand = fn(&[String], &mut HashMap<String, String>) -> Result<Outcome, Error>;
pub struct Io<'a> {
pub stdout: Output<'a>,
pub stderr: Output<'a>,
}
pub enum Output<'a> {
File(&'a mut File),
Default(&'a mut dyn IoWrite),
}
impl FmtWrite for Output<'_> {
fn write_str(&mut self, s: &str) -> fmt::Result {
match self {
Self::File(file) => file.write_all(s.as_bytes()).map_err(|_| fmt::Error),
Self::Default(file) => file.write_all(s.as_bytes()).map_err(|_| fmt::Error),
}
}
}
pub type BuiltinCommand = fn(&[String], &mut HashMap<String, String>, Io) -> Result<Outcome, Error>;
static BUILTINS: &[(&str, BuiltinCommand)] = &[
("echo", b_echo),
@ -26,7 +48,11 @@ pub fn get_builtin(name: &str) -> Option<BuiltinCommand> {
.find_map(|&(key, value)| if key == name { Some(value) } else { None })
}
fn b_which(args: &[String], _envs: &mut HashMap<String, String>) -> Result<Outcome, Error> {
fn b_which(
args: &[String],
_envs: &mut HashMap<String, String>,
mut io: Io,
) -> Result<Outcome, Error> {
fn find_in_path(path: &str, program: &str) -> Option<String> {
for entry in path.split(':') {
let full_path = PathBuf::from(entry).join(program);
@ -39,7 +65,7 @@ fn b_which(args: &[String], _envs: &mut HashMap<String, String>) -> Result<Outco
}
if args.len() != 1 {
eprintln!("which usage: which PROGRAM");
writeln!(io.stderr, "which usage: which PROGRAM").ok();
return Ok(Outcome::Exited(1));
}
@ -59,45 +85,45 @@ fn b_which(args: &[String], _envs: &mut HashMap<String, String>) -> Result<Outco
match resolution {
Some(path) => {
println!("{}: {}", program, path);
writeln!(io.stdout, "{}: {}", program, path).ok();
Ok(Outcome::Exited(0))
}
_ => Ok(Outcome::Exited(1)),
}
}
fn b_set(args: &[String], envs: &mut HashMap<String, String>) -> Result<Outcome, Error> {
fn b_set(args: &[String], envs: &mut HashMap<String, String>, mut io: Io) -> Result<Outcome, Error> {
if args.len() != 2 {
eprintln!("set usage: set VAR VALUE");
writeln!(io.stderr, "set usage: set VAR VALUE").ok();
return Ok(Outcome::Exited(1));
}
envs.insert(args[0].clone(), args[1].clone());
Ok(Outcome::ok())
}
fn b_echo(args: &[String], _envs: &mut HashMap<String, String>) -> Result<Outcome, Error> {
fn b_echo(args: &[String], _envs: &mut HashMap<String, String>, mut io: Io) -> Result<Outcome, Error> {
for (i, arg) in args.iter().enumerate() {
if i != 0 {
print!(" ");
write!(io.stdout, " ").ok();
}
print!("{}", arg);
write!(io.stdout, "{}", arg).ok();
}
println!();
writeln!(io.stdout).ok();
Ok(Outcome::ok())
}
fn b_exit(args: &[String], _envs: &mut HashMap<String, String>) -> Result<Outcome, Error> {
fn b_exit(args: &[String], _envs: &mut HashMap<String, String>, mut io: Io) -> Result<Outcome, Error> {
match args.len() {
0 => Ok(Outcome::ExitShell(ExitCode::SUCCESS)),
_ => {
eprintln!("Usage: exit [CODE]");
writeln!(io.stderr, "Usage: exit [CODE]").ok();
Ok(Outcome::Exited(1))
}
}
}
#[cfg(target_os = "yggdrasil")]
fn b_cd(args: &[String], _envs: &mut HashMap<String, String>) -> Result<Outcome, Error> {
fn b_cd(args: &[String], _envs: &mut HashMap<String, String>, _io: Io) -> Result<Outcome, Error> {
let path = if args.is_empty() {
"/"
} else {
@ -108,14 +134,14 @@ fn b_cd(args: &[String], _envs: &mut HashMap<String, String>) -> Result<Outcome,
}
#[cfg(target_os = "yggdrasil")]
fn b_pwd(args: &[String], _envs: &mut HashMap<String, String>) -> Result<Outcome, Error> {
fn b_pwd(args: &[String], _envs: &mut HashMap<String, String>, mut io: Io) -> Result<Outcome, Error> {
if !args.is_empty() {
eprintln!("Usage: pwd");
writeln!(io.stderr, "Usage: pwd").ok();
return Ok(Outcome::Exited(1));
}
let pwd = yggdrasil_rt::io::current_directory_string().map_err(Error::RtError)?;
println!("{pwd}");
writeln!(io.stdout, "{pwd}").ok();
Ok(Outcome::Exited(0))
}

View File

@ -4,14 +4,14 @@ use std::{
collections::HashMap,
env,
fs::File,
io::{self, stdin, stdout, BufRead, BufReader, Stdin, Write},
io::{self, stderr, stdin, stdout, BufRead, BufReader, Stdin, Write},
os::fd::{FromRawFd, IntoRawFd},
path::Path,
process::{Child, ExitCode, Stdio},
process::{self, Child, ExitCode, Stdio},
};
use clap::Parser;
use parser::Command;
use parser::{Command, CommandOutput};
mod builtins;
mod parser;
@ -112,7 +112,7 @@ pub fn exec(
//
// Pipe count: command count - 1
if command.commands.is_empty() {
if command.pipeline.is_empty() {
return Ok(Outcome::ok());
}
@ -121,18 +121,30 @@ pub fn exec(
} else {
None
};
let stdout = if let Some(path) = command.stdout.as_ref() {
Some(File::create(path)?)
} else {
None
let mut stdout = match command.stdout.as_ref() {
Some(CommandOutput::Path(path)) => Some(File::create(path)?),
Some(CommandOutput::Fd(_fd)) => None,
None => None,
};
if command.commands.len() == 1 {
let command = &command.commands[0];
let (cmd, args) = command.words.split_first().unwrap();
if command.pipeline.len() == 1 {
let command = &command.pipeline[0];
let (cmd, args) = command.0.split_first().unwrap();
if let Some(builtin) = builtins::get_builtin(cmd) {
return builtin(args, env);
let mut default_stdout = io::stdout();
let mut default_stderr = io::stderr();
let stdout = match stdout.as_mut() {
Some(file) => builtins::Output::File(file),
None => builtins::Output::Default(&mut default_stdout)
};
let stderr = builtins::Output::Default(&mut default_stderr);
let io = builtins::Io {
stdout,
stderr,
};
return builtin(args, env, io);
}
}
@ -144,7 +156,7 @@ pub fn exec(
} else {
inputs.push(Stdio::inherit());
}
for _ in 1..command.commands.len() {
for _ in 1..command.pipeline.len() {
let pipe = sys::create_pipe()?;
let read_fd = pipe.read.into_raw_fd();
@ -163,13 +175,13 @@ pub fn exec(
}
assert_eq!(inputs.len(), outputs.len());
assert_eq!(inputs.len(), command.commands.len());
assert_eq!(inputs.len(), command.pipeline.len());
let mut elements = vec![];
let ios = inputs.drain(..).zip(outputs.drain(..));
for (command, (input, output)) in command.commands.iter().zip(ios) {
let (cmd, args) = command.words.split_first().unwrap();
for (command, (input, output)) in command.pipeline.iter().zip(ios) {
let (cmd, args) = command.0.split_first().unwrap();
let element = PipelineElement {
command: cmd,
@ -210,10 +222,9 @@ fn run(mut input: Input, vars: &mut HashMap<String, String>) -> Result<ExitCode,
continue;
};
let line = line.trim();
let line = match line.split_once('#') {
Some((line, _)) => line.trim(),
None => line,
};
if line.starts_with('#') || line.is_empty() {
continue;
}
let cmd = match parser::parse_line(vars, line) {
Ok(cmd) => cmd,
Err(error) if input.is_interactive() => {
@ -258,6 +269,47 @@ fn run_stdin(env: &mut HashMap<String, String>) -> Result<ExitCode, Error> {
run(Input::Interactive(stdin()), env)
}
// Sets up builtin variables
fn setup_env(vars: &mut HashMap<String, String>, script: &Option<String>, args: &[String]) {
let pid = process::id();
let bin_name = env::args().next();
if let Some(bin_name) = bin_name.as_ref() {
vars.insert("SHELL".into(), bin_name.clone());
} else {
vars.remove("SHELL");
}
vars.insert("#".into(), format!("{}", args.len()));
if let Some(script) = script {
vars.insert("0".into(), script.clone());
} else if let Some(bin_name) = bin_name {
vars.insert("0".into(), bin_name);
} else {
vars.remove("0");
}
let mut args_string = String::new();
for (i, arg) in args.iter().enumerate() {
if i != 0 {
args_string.push(' ');
}
args_string.push_str(arg);
vars.insert(format!("{}", i + 1), arg.clone());
}
vars.insert("*".into(), args_string.clone());
// TODO array types
vars.insert("@".into(), args_string);
vars.insert("$".into(), format!("{pid}"));
// Insert PATH to current process env
if let Some(path) = vars.get("PATH") {
env::set_var("PATH", path);
}
}
fn main() -> ExitCode {
let args = Args::parse();
let mut vars = HashMap::new();
@ -266,15 +318,12 @@ fn main() -> ExitCode {
vars.insert(key, value);
}
setup_env(&mut vars, &args.script, &args.args);
if args.login {
run_file("/etc/profile", &mut vars).ok();
}
// Insert PATH to current process env
if let Some(path) = vars.get("PATH") {
env::set_var("PATH", path);
}
let result = if let Some(script) = &args.script {
run_file(script, &mut vars)
} else {

View File

@ -1,143 +1,534 @@
use std::{collections::HashMap, mem, path::PathBuf};
use std::{collections::HashMap, path::PathBuf};
use nom::{
branch::alt,
bytes::complete::{is_a, tag},
character::complete::{alphanumeric1, space0, u8 as num_u8},
combinator::{map, recognize, value},
multi::many1,
sequence::{preceded, terminated},
IResult, Parser,
bytes::complete::{is_a, is_not, tag, take_while1},
character::complete::{char, space0, u8 as num_u8},
combinator::{map, recognize, verify},
multi::{fold_many0, many0, many1, separated_list1},
sequence::{delimited, pair, preceded, separated_pair, terminated},
IResult,
};
#[derive(Debug, PartialEq)]
pub struct PipelineElement {
pub words: Vec<String>,
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("{0}")]
Lex(#[from] nom::Err<nom::error::Error<String>>),
}
#[derive(Debug, PartialEq)]
pub struct Command {
pub commands: Vec<PipelineElement>,
pub pipeline: Vec<CommandPipelineElement>,
pub stdin: Option<PathBuf>,
pub stdout: Option<PathBuf>,
pub stderr: Option<PathBuf>,
pub stdout: Option<CommandOutput>,
pub stderr: Option<CommandOutput>
}
#[derive(Debug, Clone)]
enum Token<'a> {
Word(&'a str),
Pipe,
Output(u8),
Input,
pub struct CommandPipelineElement(pub Vec<String>);
pub enum CommandOutput {
Fd(u8),
Path(PathBuf),
}
fn lex_word(i: &str) -> IResult<&str, Token> {
trait Expand {
type Output;
fn expand(self, env: &HashMap<String, String>) -> Self::Output;
}
#[derive(Clone, Debug, PartialEq)]
enum QuoteFragment<'a> {
Text(&'a str),
Var(&'a str),
}
#[derive(Clone, Debug, PartialEq)]
enum WordToken<'a> {
Text(&'a str),
Var(&'a str),
Quote(Vec<QuoteFragment<'a>>),
}
#[derive(Clone, Debug, PartialEq)]
struct Word<'a>(Vec<WordToken<'a>>);
#[derive(Clone, Debug, PartialEq)]
struct ParsedPipelineElement<'a>(Vec<Word<'a>>);
#[derive(Clone, Debug, PartialEq)]
struct ParsedPipeline<'a>(Vec<ParsedPipelineElement<'a>>);
#[derive(Clone, Debug, PartialEq)]
struct ParsedCommand<'a> {
pipeline: ParsedPipeline<'a>,
redirects: Redirects<'a>,
}
#[derive(Clone, Debug, PartialEq)]
struct Redirects<'a> {
stdin: Option<Word<'a>>,
stdout: Option<OutputTarget<'a>>,
stderr: Option<OutputTarget<'a>>,
}
#[derive(Clone, Debug, PartialEq)]
enum OutputTarget<'a> {
Path(Word<'a>),
Fd(u8),
}
#[derive(Clone, Debug, PartialEq)]
struct OutputRedirect<'a> {
fd: u8,
target: OutputTarget<'a>,
}
#[derive(Clone, Debug, PartialEq)]
enum Redirect<'a> {
Input(Word<'a>),
Output(OutputRedirect<'a>),
}
fn is_ident_tail(c: char) -> bool {
c.is_alphanumeric() || c == '_'
}
fn is_punctuation(c: char) -> bool {
[
';', '|', '{', '}', '(', ')', ',', '<', '>', '$', '"', '\'', '\\',
]
.contains(&c)
}
fn is_filename(c: char) -> bool {
!c.is_whitespace() && !is_punctuation(c)
}
fn lex_name(i: &str) -> IResult<&str, &str> {
alt((recognize(is_a("#*@?!$-")), take_while1(is_ident_tail)))(i)
}
fn lex_var_braced(i: &str) -> IResult<&str, &str> {
// ${ABCD}
delimited(tag("${"), lex_name, tag("}"))(i)
}
fn lex_var_unbraced(i: &str) -> IResult<&str, &str> {
// $ABCD $# $* $@ $? $! $$ $-
preceded(tag("$"), lex_name)(i)
}
fn lex_var(i: &str) -> IResult<&str, &str> {
alt((lex_var_braced, lex_var_unbraced))(i)
}
fn lex_filename(i: &str) -> IResult<&str, &str> {
take_while1(is_filename)(i)
}
fn lex_quoted_literal(i: &str) -> IResult<&str, QuoteFragment> {
let not_quote_slash = is_not("$\"\\");
map(
recognize(many1(alt((alphanumeric1, is_a("_-+=%!@/.[]:"))))),
Token::Word,
)
.parse(i)
verify(not_quote_slash, |s: &str| !s.is_empty()),
QuoteFragment::Text,
)(i)
}
fn lex_output(i: &str) -> IResult<&str, Token> {
fn lex_quoted_var(i: &str) -> IResult<&str, QuoteFragment> {
map(lex_var, QuoteFragment::Var)(i)
}
fn lex_quoted(i: &str) -> IResult<&str, WordToken> {
// "abcdef $abcdef"
map(
delimited(
char('"'),
many0(alt((lex_quoted_var, lex_quoted_literal))),
char('"'),
),
WordToken::Quote,
)(i)
}
fn lex_word_token(i: &str) -> IResult<&str, WordToken> {
alt((
map(terminated(num_u8, tag(">")), Token::Output),
value(Token::Output(1), tag(">")),
))
.parse(i)
lex_quoted,
map(lex_var, WordToken::Var),
map(lex_filename, WordToken::Text),
))(i)
}
fn lex_input(i: &str) -> IResult<&str, Token> {
value(Token::Input, tag("<")).parse(i)
fn lex_word(i: &str) -> IResult<&str, Word> {
map(many1(lex_word_token), Word)(i)
}
fn lex_pipe(i: &str) -> IResult<&str, Token> {
value(Token::Pipe, tag("|")).parse(i)
fn lex_pipeline_element(i: &str) -> IResult<&str, ParsedPipelineElement> {
map(
preceded(space0, many1(terminated(lex_word, space0))),
ParsedPipelineElement,
)(i)
}
fn lex_token(i: &str) -> IResult<&str, Token> {
preceded(space0, alt((lex_output, lex_word, lex_input, lex_pipe))).parse(i)
fn lex_pipeline(i: &str) -> IResult<&str, ParsedPipeline> {
map(
separated_list1(preceded(space0, char('|')), lex_pipeline_element),
ParsedPipeline,
)(i)
}
fn parse_command(_env: &HashMap<String, String>, input: &[Token]) -> Result<Command, Error> {
let mut elements = vec![];
let mut stdin = None;
let mut stdout = None;
let mut stderr = None;
fn lex_output_target(i: &str) -> IResult<&str, OutputTarget> {
// abcdef $a
// &2
preceded(space0, alt((
map(preceded(char('&'), num_u8), OutputTarget::Fd),
map(lex_word, OutputTarget::Path),
)))(i)
}
let mut current = vec![];
fn lex_output_redirect(i: &str) -> IResult<&str, OutputRedirect> {
// >$a
// 2>&1
alt((
map(
separated_pair(num_u8, char('>'), lex_output_target),
|(fd, target)| OutputRedirect { fd, target },
),
map(preceded(char('>'), lex_output_target), |target| {
OutputRedirect { fd: 1, target }
}),
))(i)
}
let mut it = input.iter();
while let Some(token) = it.next() {
match token {
&Token::Word(word) => {
current.push(word.into());
}
Token::Pipe => {
if current.is_empty() {
todo!();
}
fn lex_input_redirect(i: &str) -> IResult<&str, Word> {
preceded(char('<'), lex_word)(i)
}
elements.push(PipelineElement {
words: mem::take(&mut current)
});
fn lex_redirect(i: &str) -> IResult<&str, Redirect> {
alt((
map(lex_input_redirect, Redirect::Input),
map(lex_output_redirect, Redirect::Output),
))(i)
}
fn lex_redirects(i: &str) -> IResult<&str, Redirects> {
fold_many0(
preceded(space0, lex_redirect),
|| Redirects {
stdin: None,
stdout: None,
stderr: None,
},
|mut acc, redirect| match redirect {
Redirect::Input(path) => {
acc.stdin = Some(path);
acc
}
Token::Output(1) => {
// TODO ok_or
let path = it.next().unwrap();
let Token::Word(word) = path else {
todo!();
};
stdout = Some(PathBuf::from(word));
Redirect::Output(redirect) if redirect.fd == 1 => {
acc.stdout = Some(redirect.target);
acc
}
Token::Output(2) => {
// TODO ok_or
let path = it.next().unwrap();
let Token::Word(word) = path else {
todo!();
};
stderr = Some(PathBuf::from(word));
}
Token::Input => {
// TODO ok_or
let path = it.next().unwrap();
let Token::Word(word) = path else {
todo!();
};
stdin = Some(PathBuf::from(word));
}
Token::Output(_) => {
todo!();
Redirect::Output(redirect) if redirect.fd == 2 => {
acc.stderr = Some(redirect.target);
acc
}
_ => acc,
},
)(i)
}
fn lex_command(i: &str) -> IResult<&str, ParsedCommand> {
map(
terminated(pair(lex_pipeline, lex_redirects), space0),
|(pipeline, redirects)| ParsedCommand {
pipeline,
redirects,
},
)(i)
}
impl Expand for ParsedCommand<'_> {
type Output = Command;
fn expand(self, env: &HashMap<String, String>) -> Self::Output {
let pipeline = self.pipeline.expand(env);
let Redirects { stdin, stdout, stderr } = self.redirects;
let stdin = stdin.map(|e| e.expand(env)).map(PathBuf::from);
let stdout = stdout.map(|e| e.expand(env));
let stderr = stderr.map(|e| e.expand(env));
Command {
pipeline,
stdin,
stdout,
stderr
}
}
}
if !current.is_empty() {
elements.push(PipelineElement { words: current });
impl Expand for OutputTarget<'_> {
type Output = CommandOutput;
fn expand(self, env: &HashMap<String, String>) -> Self::Output {
match self {
Self::Fd(fd) => CommandOutput::Fd(fd),
Self::Path(path) => CommandOutput::Path(path.expand(env).into())
}
}
}
Ok(Command {
commands: elements,
stdin,
stdout,
stderr,
})
impl Expand for Word<'_> {
type Output = String;
fn expand(self, env: &HashMap<String, String>) -> Self::Output {
let mut word = String::new();
for token in &self.0 {
match token {
&WordToken::Var(var) => {
let val = env.get(var).map_or("", |s| s.as_str());
word.push_str(val);
}
&WordToken::Text(text) => {
word.push_str(text);
}
WordToken::Quote(frags) => {
for fragment in frags {
match *fragment {
QuoteFragment::Var(var) => {
let val = env.get(var).map_or("", |s| s.as_str());
word.push_str(val);
}
QuoteFragment::Text(text) => {
word.push_str(text);
}
}
}
}
}
}
word
}
}
impl Expand for ParsedPipeline<'_> {
type Output = Vec<CommandPipelineElement>;
fn expand(self, env: &HashMap<String, String>) -> Self::Output {
self.0.into_iter().map(|e| e.expand(env)).collect()
}
}
impl Expand for ParsedPipelineElement<'_> {
type Output = CommandPipelineElement;
fn expand(self, env: &HashMap<String, String>) -> Self::Output {
CommandPipelineElement(self.0.into_iter().map(|e| e.expand(env)).collect::<Vec<_>>())
}
}
pub fn parse_line(env: &HashMap<String, String>, input: &str) -> Result<Command, Error> {
let mut input = input;
let mut tokens = vec![];
while !input.is_empty() {
let (tail, token) = lex_token(input).map_err(|error| error.map_input(String::from))?;
tokens.push(token);
input = tail;
let (rest, command) = lex_command(input).map_err(|e| e.map_input(ToOwned::to_owned))?;
if !rest.is_empty() {
todo!("Trailing characters: {rest:?}")
}
let command = command.expand(env);
Ok(command)
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use crate::parser::{
lex_filename, lex_quoted, lex_word, lex_word_token, OutputTarget, ParsedPipeline,
ParsedPipelineElement, Redirects,
};
use super::{
lex_pipeline, lex_pipeline_element, lex_redirects, lex_var, QuoteFragment,
Word, WordToken, Expand
};
#[test]
fn test_lex_var() {
let tests = [
("$A1_", "A1_"),
("$1", "1"),
("${A1_}", "A1_"),
("${1}", "1"),
("$#", "#"),
];
for (input, expect) in tests {
let (rest, output) = lex_var(input).unwrap();
assert!(rest.is_empty());
assert_eq!(output, expect);
}
}
parse_command(env, &tokens)
#[test]
fn test_lex_filename() {
let tests = [
("abcdef", "abcdef"),
("abcdef1", "abcdef1"),
("1", "1"),
("_", "_"),
("[", "["),
("/a/b/c", "/a/b/c"),
];
for (input, expect) in tests {
let (rest, output) = lex_filename(input).unwrap();
assert!(rest.is_empty());
assert_eq!(output, expect);
}
}
#[test]
fn test_lex_word_token() {
let tests = [("a", WordToken::Text("a")), ("$b", WordToken::Var("b"))];
for (input, expect) in tests {
let (rest, output) = lex_word_token(input).unwrap();
assert!(rest.is_empty());
assert_eq!(output, expect);
}
}
#[test]
fn test_lex_quoted() {
let tests = [
(
"\"abcdef ghijkl\"",
WordToken::Quote(vec![QuoteFragment::Text("abcdef ghijkl")]),
),
(
"\"abcdef$ghijkl 123\"",
WordToken::Quote(vec![
QuoteFragment::Text("abcdef"),
QuoteFragment::Var("ghijkl"),
QuoteFragment::Text(" 123"),
]),
),
];
for (input, expect) in tests {
let (rest, output) = lex_quoted(input).unwrap();
assert!(rest.is_empty());
assert_eq!(output, expect);
}
}
#[test]
fn test_lex_word() {
let tests = [
(
"a$a$b",
Word(vec![
WordToken::Text("a"),
WordToken::Var("a"),
WordToken::Var("b"),
]),
),
(
"a$1\"b$c d\"e${f}",
Word(vec![
WordToken::Text("a"),
WordToken::Var("1"),
WordToken::Quote(vec![
QuoteFragment::Text("b"),
QuoteFragment::Var("c"),
QuoteFragment::Text(" d"),
]),
WordToken::Text("e"),
WordToken::Var("f"),
]),
),
];
for (input, expect) in tests {
let (rest, output) = lex_word(input).unwrap();
assert_eq!(rest, "");
assert_eq!(output, expect);
}
}
#[test]
fn test_lex_pipeline_element() {
let input = "a 1 $c d\"e $f g\" | ...";
let (rest, output) = lex_pipeline_element(input).unwrap();
assert_eq!(rest, "| ...");
assert_eq!(
output,
ParsedPipelineElement(vec![
Word(vec![WordToken::Text("a")]),
Word(vec![WordToken::Text("1")]),
Word(vec![WordToken::Var("c")]),
Word(vec![
WordToken::Text("d"),
WordToken::Quote(vec![
QuoteFragment::Text("e "),
QuoteFragment::Var("f"),
QuoteFragment::Text(" g")
])
]),
])
);
}
#[test]
fn test_lex_pipeline() {
let input = "a b $c | d $e f g | h 1";
let (rest, output) = lex_pipeline(input).unwrap();
assert_eq!(rest, "");
assert_eq!(
output,
ParsedPipeline(vec![
ParsedPipelineElement(vec![
Word(vec![WordToken::Text("a")]),
Word(vec![WordToken::Text("b")]),
Word(vec![WordToken::Var("c")]),
]),
ParsedPipelineElement(vec![
Word(vec![WordToken::Text("d")]),
Word(vec![WordToken::Var("e")]),
Word(vec![WordToken::Text("f")]),
Word(vec![WordToken::Text("g")]),
]),
ParsedPipelineElement(vec![
Word(vec![WordToken::Text("h")]),
Word(vec![WordToken::Text("1")]),
]),
])
);
}
#[test]
fn test_lex_redirects() {
let input = "2>$c >&2 <\"$d\"";
let (rest, output) = lex_redirects(input).unwrap();
assert_eq!(rest, "");
assert_eq!(
output,
Redirects {
stdin: Some(Word(vec![WordToken::Quote(vec![QuoteFragment::Var("d")])])),
stdout: Some(OutputTarget::Fd(2)),
stderr: Some(OutputTarget::Path(Word(vec![WordToken::Var("c")]))),
}
);
}
#[test]
fn test_expand_word() {
let word = Word(vec![
WordToken::Text("a"),
WordToken::Var("b"),
WordToken::Quote(vec![
QuoteFragment::Text(" my_text "),
QuoteFragment::Var("c"),
]),
]);
let env = HashMap::from_iter([("b".to_owned(), "my_var".to_owned())]);
let result = word.expand(&env);
assert_eq!(result, "amy_var my_text ");
}
}