Skip to main content

cranelift_isle/
compile.rs

1//! Compilation process, from AST to Sema to Sequences of Insts.
2
3use std::path::Path;
4use std::sync::Arc;
5
6use crate::ast::Def;
7use crate::error::Errors;
8use crate::files::Files;
9use crate::{ast, codegen, overlap, recursion, sema};
10
11/// Compile the given AST definitions into Rust source code.
12pub fn compile(
13    files: Arc<Files>,
14    defs: &[ast::Def],
15    options: &codegen::CodegenOptions,
16) -> Result<String, Errors> {
17    let mut type_env = match sema::TypeEnv::from_ast(defs) {
18        Ok(type_env) => type_env,
19        Err(errs) => return Err(Errors::new(errs, files)),
20    };
21    let term_env = match sema::TermEnv::from_ast(&mut type_env, defs, true) {
22        Ok(term_env) => term_env,
23        Err(errs) => return Err(Errors::new(errs, files)),
24    };
25    let terms = match overlap::check(&term_env) {
26        Ok(terms) => terms,
27        Err(errs) => return Err(Errors::new(errs, files)),
28    };
29    recursion::check(&terms, &term_env).map_err(|errs| Errors::new(errs, files.clone()))?;
30
31    Ok(codegen::codegen(
32        files, &type_env, &term_env, &terms, options,
33    ))
34}
35
36/// Compile the given files into Rust source code.
37pub fn from_files<P: AsRef<Path>>(
38    inputs: impl IntoIterator<Item = P>,
39    options: &codegen::CodegenOptions,
40) -> Result<String, Errors> {
41    let files = match Files::from_paths(inputs, &options.prefixes) {
42        Ok(files) => files,
43        Err((path, err)) => {
44            return Err(Errors::from_io(
45                err,
46                format!("cannot read file {}", path.display()),
47            ));
48        }
49    };
50
51    let files = Arc::new(files);
52
53    let mut defs = Vec::new();
54    for (file, src) in files.file_texts.iter().enumerate() {
55        let lexer = match crate::lexer::Lexer::new(file, src) {
56            Ok(lexer) => lexer,
57            Err(err) => return Err(Errors::new(vec![err], files)),
58        };
59
60        match crate::parser::parse(lexer) {
61            Ok(mut ds) => defs.append(&mut ds),
62            Err(err) => return Err(Errors::new(vec![err], files)),
63        }
64    }
65
66    compile(files, &defs, options)
67}
68
69/// Construct the ISLE type and term environments for further analysis
70/// (i.e., verification), without going all the way through codegen.
71pub fn create_envs(
72    inputs: Vec<std::path::PathBuf>,
73) -> Result<(sema::TypeEnv, sema::TermEnv, Vec<Def>), Errors> {
74    let files = match Files::from_paths(inputs, &[]) {
75        Ok(files) => files,
76        Err((path, err)) => {
77            return Err(Errors::from_io(
78                err,
79                format!("cannot read file {}", path.display()),
80            ));
81        }
82    };
83    let files = Arc::new(files);
84    let mut defs = Vec::new();
85    for (file, src) in files.file_texts.iter().enumerate() {
86        let lexer = match crate::lexer::Lexer::new(file, src) {
87            Ok(lexer) => lexer,
88            Err(err) => return Err(Errors::new(vec![err], files)),
89        };
90
91        match crate::parser::parse(lexer) {
92            Ok(mut ds) => defs.append(&mut ds),
93            Err(err) => return Err(Errors::new(vec![err], files)),
94        }
95    }
96    let mut type_env = match sema::TypeEnv::from_ast(&defs) {
97        Ok(type_env) => type_env,
98        Err(errs) => return Err(Errors::new(errs, files)),
99    };
100    // We want to allow annotations on terms with internal extractors,
101    // so we avoid expanding them within the sema rules.
102    let term_env = match sema::TermEnv::from_ast(&mut type_env, &defs, false) {
103        Ok(term_env) => term_env,
104        Err(errs) => return Err(Errors::new(errs, files)),
105    };
106    Ok((type_env, term_env, defs))
107}