Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions src/libcore/hashmap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -825,6 +825,10 @@ pub impl <T:Hash + Eq> HashSet<T> {
fn consume(&mut self, f: &fn(T)) {
self.map.consume(|k, _| f(k))
}

fn contains_equiv<Q:Hash + Equiv<T>>(&self, value: &Q) -> bool {
self.map.contains_key_equiv(value)
}
}

#[cfg(test)]
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/ext/trace_macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ pub fn expand_trace_macros(cx: @ext_ctxt,
rdr.dup()
);

if rust_parser.is_keyword(&~"true") {
if rust_parser.is_keyword("true") {
cx.set_trace_macros(true);
} else if rust_parser.is_keyword(&~"false") {
} else if rust_parser.is_keyword("false") {
cx.set_trace_macros(false);
} else {
cx.span_fatal(sp, ~"trace_macros! only accepts `true` or `false`")
Expand Down
42 changes: 21 additions & 21 deletions src/libsyntax/parse/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,27 +137,27 @@ pub impl Parser {

// A sanity check that the word we are asking for is a known keyword
// NOTE: this could be done statically....
fn require_keyword(&self, word: &~str) {
if !self.keywords.contains(word) {
self.bug(fmt!("unknown keyword: %s", *word));
fn require_keyword(&self, word: &str) {
if !self.keywords.contains_equiv(&word) {
self.bug(fmt!("unknown keyword: %s", word));
}
}

// return true when this token represents the given string, and is not
// followed immediately by :: .
fn token_is_word(&self, word: &~str, tok: &token::Token) -> bool {
fn token_is_word(&self, word: &str, tok: &token::Token) -> bool {
match *tok {
token::IDENT(sid, false) => { *self.id_to_str(sid) == *word }
token::IDENT(sid, false) => { word == *self.id_to_str(sid) }
_ => { false }
}
}

fn token_is_keyword(&self, word: &~str, tok: &token::Token) -> bool {
fn token_is_keyword(&self, word: &str, tok: &token::Token) -> bool {
self.require_keyword(word);
self.token_is_word(word, tok)
}

fn is_keyword(&self, word: &~str) -> bool {
fn is_keyword(&self, word: &str) -> bool {
self.token_is_keyword(word, &copy *self.token)
}

Expand All @@ -177,10 +177,10 @@ pub impl Parser {
// if the given word is not a keyword, signal an error.
// if the next token is the given keyword, eat it and return
// true. Otherwise, return false.
fn eat_keyword(&self, word: &~str) -> bool {
fn eat_keyword(&self, word: &str) -> bool {
self.require_keyword(word);
let is_kw = match *self.token {
token::IDENT(sid, false) => *word == *self.id_to_str(sid),
token::IDENT(sid, false) => word == *self.id_to_str(sid),
_ => false
};
if is_kw { self.bump() }
Expand All @@ -190,63 +190,63 @@ pub impl Parser {
// if the given word is not a keyword, signal an error.
// if the next token is not the given word, signal an error.
// otherwise, eat it.
fn expect_keyword(&self, word: &~str) {
fn expect_keyword(&self, word: &str) {
self.require_keyword(word);
if !self.eat_keyword(word) {
self.fatal(
fmt!(
"expected `%s`, found `%s`",
*word,
word,
self.this_token_to_str()
)
);
}
}

// return true if the given string is a strict keyword
fn is_strict_keyword(&self, word: &~str) -> bool {
self.strict_keywords.contains(word)
fn is_strict_keyword(&self, word: &str) -> bool {
self.strict_keywords.contains_equiv(&word)
}

// signal an error if the current token is a strict keyword
fn check_strict_keywords(&self) {
match *self.token {
token::IDENT(_, false) => {
let w = token_to_str(self.reader, &copy *self.token);
self.check_strict_keywords_(&w);
self.check_strict_keywords_(w);
}
_ => ()
}
}

// signal an error if the given string is a strict keyword
fn check_strict_keywords_(&self, w: &~str) {
fn check_strict_keywords_(&self, w: &str) {
if self.is_strict_keyword(w) {
self.span_err(*self.last_span,
fmt!("found `%s` in ident position", *w));
fmt!("found `%s` in ident position", w));
}
}

// return true if this is a reserved keyword
fn is_reserved_keyword(&self, word: &~str) -> bool {
self.reserved_keywords.contains(word)
fn is_reserved_keyword(&self, word: &str) -> bool {
self.reserved_keywords.contains_equiv(&word)
}

// signal an error if the current token is a reserved keyword
fn check_reserved_keywords(&self) {
match *self.token {
token::IDENT(_, false) => {
let w = token_to_str(self.reader, &copy *self.token);
self.check_reserved_keywords_(&w);
self.check_reserved_keywords_(w);
}
_ => ()
}
}

// signal an error if the given string is a reserved keyword
fn check_reserved_keywords_(&self, w: &~str) {
fn check_reserved_keywords_(&self, w: &str) {
if self.is_reserved_keyword(w) {
self.fatal(fmt!("`%s` is a reserved keyword", *w));
self.fatal(fmt!("`%s` is a reserved keyword", w));
}
}

Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/parse/obsolete.rs
Original file line number Diff line number Diff line change
Expand Up @@ -302,9 +302,9 @@ pub impl Parser {
}

fn try_parse_obsolete_priv_section(&self, attrs: &[attribute]) -> bool {
if self.is_keyword(&~"priv") && self.look_ahead(1) == token::LBRACE {
if self.is_keyword("priv") && self.look_ahead(1) == token::LBRACE {
self.obsolete(copy *self.span, ObsoletePrivSection);
self.eat_keyword(&~"priv");
self.eat_keyword("priv");
self.bump();
while *self.token != token::RBRACE {
self.parse_single_struct_field(ast::private, attrs.to_owned());
Expand Down
Loading