glsl_lang_lexer/full/
str.rs1use lang_util::{position::LexerPosition, FileId};
4
5use glsl_lang_pp::{
6 exts::{Registry, DEFAULT_REGISTRY},
7 last::{self, Event},
8 processor::{
9 self,
10 str::{ExpandStr, ProcessStrError},
11 ProcessorState,
12 },
13};
14
15use crate::{HasLexerError, LangLexer, LangLexerIterator, ParseContext, ParseOptions, Token};
16
17use super::{
18 core::{self, HandleTokenResult, LexerCore},
19 Directives, LexicalError,
20};
21
22pub struct Lexer<'i> {
24 inner: last::Tokenizer<'i, ExpandStr>,
25 handle_token: HandleTokenResult<ProcessStrError>,
26 opts: ParseOptions,
27}
28
29impl<'i> Lexer<'i> {
30 pub(crate) fn new_with_state(
31 source: &'i str,
32 registry: &'i Registry,
33 opts: &ParseOptions,
34 state: ProcessorState,
35 ) -> Self {
36 Self {
37 inner: processor::str::process(source, state).tokenize(
38 opts.default_version,
39 opts.target_vulkan,
40 registry,
41 ),
42 handle_token: Default::default(),
43 opts: *opts,
44 }
45 }
46
47 fn with_context(self, ctx: ParseContext) -> LexerIterator<'i> {
48 LexerIterator {
49 inner: self.inner,
50 core: LexerCore::new(&self.opts, ctx),
51 handle_token: self.handle_token,
52 source_id: self.opts.source_id,
53 }
54 }
55}
56
57pub struct LexerIterator<'i> {
59 inner: last::Tokenizer<'i, ExpandStr>,
60 core: LexerCore,
61 handle_token: HandleTokenResult<ProcessStrError>,
62 source_id: FileId,
63}
64
65impl LexerIterator<'_> {
66 pub fn into_directives(self) -> Directives {
67 self.core.into_directives()
68 }
69}
70
71impl<'i> Iterator for LexerIterator<'i> {
72 type Item = core::Item<ProcessStrError>;
73
74 fn next(&mut self) -> Option<Self::Item> {
75 loop {
76 if let Some(item) = self.handle_token.pop_item() {
78 return Some(item);
79 }
80
81 if let Some(result) = self.handle_token.pop_event().or_else(|| self.inner.next()) {
82 match result {
83 Ok(event) => match event {
84 Event::Error { mut error, masked } => {
85 if !masked {
86 error.set_current_file(self.source_id);
87 return Some(Err(error.into()));
88 }
89 }
90
91 Event::EnterFile { .. } => {
92 }
94
95 Event::Token {
96 source_token,
97 token_kind,
98 state,
99 } => {
100 self.core.handle_token(
101 source_token,
102 token_kind,
103 state,
104 &mut self.inner,
105 &mut self.handle_token,
106 );
107 }
108
109 Event::Directive { directive, masked } => {
110 if let Err(errors) = self.core.handle_directive(directive, masked) {
111 self.handle_token.push_errors(errors);
112 }
113 }
114 },
115
116 Err(err) => {
117 return Some(Err(LexicalError::Io(err)));
118 }
119 }
120 } else {
121 return None;
122 }
123 }
124 }
125}
126
127impl HasLexerError for Lexer<'_> {
128 type Error = LexicalError<ProcessStrError>;
129}
130
131impl<'i> LangLexer<'i> for Lexer<'i> {
132 type Input = &'i str;
133 type Iter = LexerIterator<'i>;
134
135 fn new(source: Self::Input, opts: &ParseOptions) -> Self {
136 Self::new_with_state(source, &DEFAULT_REGISTRY, opts, ProcessorState::default())
137 }
138
139 fn run(self, ctx: ParseContext) -> Self::Iter {
140 self.with_context(ctx)
141 }
142}
143
144impl HasLexerError for LexerIterator<'_> {
145 type Error = LexicalError<ProcessStrError>;
146}
147
148impl<'i> LangLexerIterator for LexerIterator<'i> {
149 fn resolve_err(
150 &self,
151 err: lalrpop_util::ParseError<LexerPosition, Token, Self::Error>,
152 ) -> lang_util::error::ParseError<Self::Error> {
153 let location = self.inner.location();
154 let (_file_id, lexer) = lang_util::error::error_location(&err);
155
156 lang_util::error::ParseError::<Self::Error>::builder()
157 .pos(lexer)
158 .current_file(self.source_id)
159 .resolve(location)
160 .finish(err.into())
161 }
162}