Skip to content

Commit d0ffdf9

Browse files
committed
This commit causes an ICE.
1 parent 1b8f61b commit d0ffdf9

File tree

8 files changed

+437
-398
lines changed

8 files changed

+437
-398
lines changed

src/libsyntax/ext/qquote.rs

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import visit::*;
55
import ext::base::*;
66
import ext::build::*;
77
import parse::parser;
8+
import parse::lexer::{reader,lexer};
89
import parse::parser::parse_from_source_str;
910
import dvec::{dvec, extensions};
1011

@@ -163,21 +164,21 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
163164
};
164165
}
165166

166-
fn parse_crate(p: parser) -> @ast::crate { p.parse_crate_mod([]) }
167-
fn parse_ty(p: parser) -> @ast::ty { p.parse_ty(false) }
168-
fn parse_stmt(p: parser) -> @ast::stmt { p.parse_stmt([]) }
169-
fn parse_expr(p: parser) -> @ast::expr { p.parse_expr() }
170-
fn parse_pat(p: parser) -> @ast::pat { p.parse_pat() }
167+
fn parse_crate(p: parser<reader>) -> @ast::crate { p.parse_crate_mod([]) }
168+
fn parse_ty(p: parser<reader>) -> @ast::ty { p.parse_ty(false) }
169+
fn parse_stmt(p: parser<reader>) -> @ast::stmt { p.parse_stmt([]) }
170+
fn parse_expr(p: parser<reader>) -> @ast::expr { p.parse_expr() }
171+
fn parse_pat(p: parser<reader>) -> @ast::pat { p.parse_pat() }
171172

172-
fn parse_item(p: parser) -> @ast::item {
173+
fn parse_item(p: parser<reader>) -> @ast::item {
173174
alt p.parse_item([], ast::public) {
174175
some(item) { item }
175176
none { fail "parse_item: parsing an item failed"; }
176177
}
177178
}
178179

179180
fn finish<T: qq_helper>
180-
(ecx: ext_ctxt, body: ast::mac_body_, f: fn (p: parser) -> T)
181+
(ecx: ext_ctxt, body: ast::mac_body_, f: fn (p: parser<reader>) -> T)
181182
-> @ast::expr
182183
{
183184
let cm = ecx.codemap();

src/libsyntax/parse.rs

Lines changed: 14 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ import attr::parser_attr;
1717
import common::parser_common;
1818
import ast::node_id;
1919
import util::interner;
20-
import lexer::reader;
20+
import lexer::{reader,lexer};
2121

2222
type parse_sess = @{
2323
cm: codemap::codemap,
@@ -44,13 +44,13 @@ fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg,
4444
sess: parse_sess) -> @ast::crate {
4545
let p = new_parser_from_file(sess, cfg, input, parser::CRATE_FILE);
4646
let lo = p.span.lo;
47-
let prefix = path::dirname(p.reader.filemap.name);
47+
let prefix = path::dirname(input);
4848
let leading_attrs = p.parse_inner_attrs_and_next();
4949
let crate_attrs = leading_attrs.inner;
5050
let first_cdir_attr = leading_attrs.next;
5151
let cdirs = p.parse_crate_directives(token::EOF, first_cdir_attr);
5252
sess.chpos = p.reader.chpos;
53-
sess.byte_pos = sess.byte_pos + p.reader.pos;
53+
sess.byte_pos = sess.byte_pos + p.reader.bpos();
5454
let cx =
5555
@{sess: sess,
5656
cfg: p.cfg};
@@ -71,17 +71,17 @@ fn parse_crate_from_source_file(input: str, cfg: ast::crate_cfg,
7171
let p = new_parser_from_file(sess, cfg, input, parser::SOURCE_FILE);
7272
let r = p.parse_crate_mod(cfg);
7373
sess.chpos = p.reader.chpos;
74-
sess.byte_pos = sess.byte_pos + p.reader.pos;
74+
sess.byte_pos = sess.byte_pos + p.reader.bpos();
7575
ret r;
7676
}
7777

7878
fn parse_crate_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
7979
sess: parse_sess) -> @ast::crate {
80-
let p = new_parser_from_source_str(
81-
sess, cfg, name, codemap::fss_none, source);
80+
let p = new_parser_from_source_str(sess, cfg, name, codemap::fss_none,
81+
source);
8282
let r = p.parse_crate_mod(cfg);
8383
sess.chpos = p.reader.chpos;
84-
sess.byte_pos = sess.byte_pos + p.reader.pos;
84+
sess.byte_pos = sess.byte_pos + p.reader.bpos();
8585
ret r;
8686
}
8787

@@ -91,7 +91,7 @@ fn parse_expr_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
9191
sess, cfg, name, codemap::fss_none, source);
9292
let r = p.parse_expr();
9393
sess.chpos = p.reader.chpos;
94-
sess.byte_pos = sess.byte_pos + p.reader.pos;
94+
sess.byte_pos = sess.byte_pos + p.reader.bpos();
9595
ret r;
9696
}
9797

@@ -102,11 +102,11 @@ fn parse_item_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
102102
sess, cfg, name, codemap::fss_none, source);
103103
let r = p.parse_item(attrs, vis);
104104
sess.chpos = p.reader.chpos;
105-
sess.byte_pos = sess.byte_pos + p.reader.pos;
105+
sess.byte_pos = sess.byte_pos + p.reader.bpos();
106106
ret r;
107107
}
108108

109-
fn parse_from_source_str<T>(f: fn (p: parser) -> T,
109+
fn parse_from_source_str<T>(f: fn (p: parser<reader>) -> T,
110110
name: str, ss: codemap::file_substr,
111111
source: @str, cfg: ast::crate_cfg,
112112
sess: parse_sess)
@@ -118,7 +118,7 @@ fn parse_from_source_str<T>(f: fn (p: parser) -> T,
118118
p.reader.fatal("expected end-of-string");
119119
}
120120
sess.chpos = p.reader.chpos;
121-
sess.byte_pos = sess.byte_pos + p.reader.pos;
121+
sess.byte_pos = sess.byte_pos + p.reader.bpos();
122122
ret r;
123123
}
124124

@@ -132,7 +132,7 @@ fn next_node_id(sess: parse_sess) -> node_id {
132132

133133
fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
134134
name: str, ss: codemap::file_substr,
135-
source: @str) -> parser {
135+
source: @str) -> parser<reader> {
136136
let ftype = parser::SOURCE_FILE;
137137
let filemap = codemap::new_filemap_w_substr
138138
(name, ss, source, sess.chpos, sess.byte_pos);
@@ -145,7 +145,7 @@ fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
145145

146146
fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: str,
147147
ftype: parser::file_type) ->
148-
parser {
148+
parser<reader> {
149149
let src = alt io::read_whole_file_str(path) {
150150
result::ok(src) {
151151
// FIXME: This copy is unfortunate (#2319)
@@ -155,8 +155,7 @@ fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: str,
155155
sess.span_diagnostic.handler().fatal(e)
156156
}
157157
};
158-
let filemap = codemap::new_filemap(path, src,
159-
sess.chpos, sess.byte_pos);
158+
let filemap = codemap::new_filemap(path, src, sess.chpos, sess.byte_pos);
160159
sess.cm.files.push(filemap);
161160
let itr = @interner::mk(str::hash, str::eq);
162161
let rdr = lexer::new_reader(sess.span_diagnostic, filemap, itr);

src/libsyntax/parse/attr.rs

Lines changed: 3 additions & 110 deletions
Original file line numberDiff line numberDiff line change
@@ -9,118 +9,11 @@ export parser_attr;
99
// extensions, which both begin with token.POUND
1010
type attr_or_ext = option<either<[ast::attribute], @ast::expr>>;
1111

12-
impl parser_attr for parser {
1312

14-
fn parse_outer_attrs_or_ext(first_item_attrs: [ast::attribute])
15-
-> attr_or_ext
16-
{
17-
let expect_item_next = vec::is_not_empty(first_item_attrs);
18-
if self.token == token::POUND {
19-
let lo = self.span.lo;
20-
if self.look_ahead(1u) == token::LBRACKET {
21-
self.bump();
22-
let first_attr =
23-
self.parse_attribute_naked(ast::attr_outer, lo);
24-
ret some(left([first_attr] + self.parse_outer_attributes()));
25-
} else if !(self.look_ahead(1u) == token::LT
26-
|| self.look_ahead(1u) == token::LBRACKET
27-
|| self.look_ahead(1u) == token::POUND
28-
|| expect_item_next) {
29-
self.bump();
30-
ret some(right(self.parse_syntax_ext_naked(lo)));
31-
} else { ret none; }
32-
} else { ret none; }
33-
}
13+
type parser_attr = int;
14+
/*impl parser_attr<L: lexer> for parser<L> {
3415
35-
// Parse attributes that appear before an item
36-
fn parse_outer_attributes() -> [ast::attribute] {
37-
let mut attrs: [ast::attribute] = [];
38-
while self.token == token::POUND
39-
&& self.look_ahead(1u) == token::LBRACKET {
40-
attrs += [self.parse_attribute(ast::attr_outer)];
41-
}
42-
ret attrs;
43-
}
44-
45-
fn parse_attribute(style: ast::attr_style) -> ast::attribute {
46-
let lo = self.span.lo;
47-
self.expect(token::POUND);
48-
ret self.parse_attribute_naked(style, lo);
49-
}
50-
51-
fn parse_attribute_naked(style: ast::attr_style, lo: uint) ->
52-
ast::attribute {
53-
self.expect(token::LBRACKET);
54-
let meta_item = self.parse_meta_item();
55-
self.expect(token::RBRACKET);
56-
let mut hi = self.span.hi;
57-
ret spanned(lo, hi, {style: style, value: *meta_item});
58-
}
59-
60-
// Parse attributes that appear after the opening of an item, each
61-
// terminated by a semicolon. In addition to a vector of inner attributes,
62-
// this function also returns a vector that may contain the first outer
63-
// attribute of the next item (since we can't know whether the attribute
64-
// is an inner attribute of the containing item or an outer attribute of
65-
// the first contained item until we see the semi).
66-
fn parse_inner_attrs_and_next() ->
67-
{inner: [ast::attribute], next: [ast::attribute]} {
68-
let mut inner_attrs: [ast::attribute] = [];
69-
let mut next_outer_attrs: [ast::attribute] = [];
70-
while self.token == token::POUND {
71-
if self.look_ahead(1u) != token::LBRACKET {
72-
// This is an extension
73-
break;
74-
}
75-
let attr = self.parse_attribute(ast::attr_inner);
76-
if self.token == token::SEMI {
77-
self.bump();
78-
inner_attrs += [attr];
79-
} else {
80-
// It's not really an inner attribute
81-
let outer_attr =
82-
spanned(attr.span.lo, attr.span.hi,
83-
{style: ast::attr_outer, value: attr.node.value});
84-
next_outer_attrs += [outer_attr];
85-
break;
86-
}
87-
}
88-
ret {inner: inner_attrs, next: next_outer_attrs};
89-
}
90-
91-
fn parse_meta_item() -> @ast::meta_item {
92-
let lo = self.span.lo;
93-
let ident = self.parse_ident();
94-
alt self.token {
95-
token::EQ {
96-
self.bump();
97-
let lit = self.parse_lit();
98-
let mut hi = self.span.hi;
99-
ret @spanned(lo, hi, ast::meta_name_value(ident, lit));
100-
}
101-
token::LPAREN {
102-
let inner_items = self.parse_meta_seq();
103-
let mut hi = self.span.hi;
104-
ret @spanned(lo, hi, ast::meta_list(ident, inner_items));
105-
}
106-
_ {
107-
let mut hi = self.span.hi;
108-
ret @spanned(lo, hi, ast::meta_word(ident));
109-
}
110-
}
111-
}
112-
113-
fn parse_meta_seq() -> [@ast::meta_item] {
114-
ret self.parse_seq(token::LPAREN, token::RPAREN,
115-
seq_sep(token::COMMA),
116-
{|p| p.parse_meta_item()}).node;
117-
}
118-
119-
fn parse_optional_meta() -> [@ast::meta_item] {
120-
alt self.token { token::LPAREN { ret self.parse_meta_seq(); }
121-
_ { ret []; } }
122-
}
123-
}
16+
}*/
12417

12518
//
12619
// Local Variables:

src/libsyntax/parse/comments.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import io::reader_util;
22
import io::println;//XXXXXXXXxxx
33
import util::interner;
4-
import lexer::{ reader, new_reader, next_token, is_whitespace };
4+
import lexer::{ reader, nextch, new_reader, is_whitespace };
55

66
export cmnt;
77
export lit;
@@ -68,7 +68,7 @@ fn read_line_comments(rdr: reader, code_to_the_left: bool) -> cmnt {
6868
#debug(">>> line comments");
6969
let p = rdr.chpos;
7070
let mut lines: [str] = [];
71-
while rdr.curr == '/' && rdr.next() == '/' {
71+
while rdr.curr == '/' && nextch(rdr) == '/' {
7272
let line = read_one_line_comment(rdr);
7373
log(debug, line);
7474
lines += [line];
@@ -117,13 +117,13 @@ fn read_block_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
117117
rdr.bump();
118118
} else {
119119
str::push_char(curr_line, rdr.curr);
120-
if rdr.curr == '/' && rdr.next() == '*' {
120+
if rdr.curr == '/' && nextch(rdr) == '*' {
121121
rdr.bump();
122122
rdr.bump();
123123
curr_line += "*";
124124
level += 1;
125125
} else {
126-
if rdr.curr == '*' && rdr.next() == '/' {
126+
if rdr.curr == '*' && nextch(rdr) == '/' {
127127
rdr.bump();
128128
rdr.bump();
129129
curr_line += "/";
@@ -145,18 +145,18 @@ fn read_block_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
145145
}
146146

147147
fn peeking_at_comment(rdr: reader) -> bool {
148-
ret ((rdr.curr == '/' && rdr.next() == '/') ||
149-
(rdr.curr == '/' && rdr.next() == '*')) ||
150-
(rdr.curr == '#' && rdr.next() == '!');
148+
ret ((rdr.curr == '/' && nextch(rdr) == '/') ||
149+
(rdr.curr == '/' && nextch(rdr) == '*')) ||
150+
(rdr.curr == '#' && nextch(rdr) == '!');
151151
}
152152

153153
fn consume_comment(rdr: reader, code_to_the_left: bool, &comments: [cmnt]) {
154154
#debug(">>> consume comment");
155-
if rdr.curr == '/' && rdr.next() == '/' {
155+
if rdr.curr == '/' && nextch(rdr) == '/' {
156156
comments += [read_line_comments(rdr, code_to_the_left)];
157-
} else if rdr.curr == '/' && rdr.next() == '*' {
157+
} else if rdr.curr == '/' && nextch(rdr) == '*' {
158158
comments += [read_block_comment(rdr, code_to_the_left)];
159-
} else if rdr.curr == '#' && rdr.next() == '!' {
159+
} else if rdr.curr == '#' && nextch(rdr) == '!' {
160160
comments += [read_shebang_comment(rdr, code_to_the_left)];
161161
} else { fail; }
162162
#debug("<<< consume comment");
@@ -189,7 +189,7 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
189189
}
190190
break;
191191
}
192-
let tok = next_token(rdr);
192+
let tok = rdr.next_token();
193193
if token::is_lit(tok.tok) {
194194
let s = rdr.get_str_from(tok.bpos);
195195
literals += [{lit: s, pos: tok.chpos}];

0 commit comments

Comments
 (0)