[Pkg-puppet-devel] [SCM] Puppet packaging for Debian branch, master, updated. debian/0.24.6-1-356-g5718585
Brice Figureau
brice-puppet at daysofwonder.com
Fri Jan 23 14:21:35 UTC 2009
The following commit has been merged in the master branch:
commit 2d37f09aa093b10cb64b9b649f0066217c53d48f
Author: Brice Figureau <brice-puppet at daysofwonder.com>
Date: Tue Oct 28 14:17:12 2008 +0100
Fix #1402 - Allow multiline comments
Signed-off-by: Brice Figureau <brice-puppet at daysofwonder.com>
diff --git a/lib/puppet/parser/lexer.rb b/lib/puppet/parser/lexer.rb
index 9226434..dd6c29d 100644
--- a/lib/puppet/parser/lexer.rb
+++ b/lib/puppet/parser/lexer.rb
@@ -157,6 +157,11 @@ class Puppet::Parser::Lexer
TOKENS.add_token :COMMENT, %r{#.*}, :skip => true
+ TOKENS.add_token :MLCOMMENT, %r{/\*(.*?)\*/}m do |lexer, value|
+ lexer.line += value.count("\n")
+ [nil,nil]
+ end
+
TOKENS.add_token :RETURN, "\n", :skip => true, :incr_line => true, :skip_text => true
TOKENS.add_token :SQUOTE, "'" do |lexer, value|
diff --git a/spec/unit/parser/lexer.rb b/spec/unit/parser/lexer.rb
index 3b0df96..d62d992 100755
--- a/spec/unit/parser/lexer.rb
+++ b/spec/unit/parser/lexer.rb
@@ -201,7 +201,7 @@ describe Puppet::Parser::Lexer::TOKENS do
end
# These tokens' strings don't matter, just that the tokens exist.
- [:DQTEXT, :SQTEXT, :BOOLEAN, :NAME, :NUMBER, :COMMENT, :RETURN, :SQUOTE, :DQUOTE, :VARIABLE].each do |name|
+ [:DQTEXT, :SQTEXT, :BOOLEAN, :NAME, :NUMBER, :COMMENT, :MLCOMMENT, :RETURN, :SQUOTE, :DQUOTE, :VARIABLE].each do |name|
it "should have a token named #{name.to_s}" do
Puppet::Parser::Lexer::TOKENS[name].should_not be_nil
end
@@ -287,6 +287,34 @@ describe Puppet::Parser::Lexer::TOKENS[:COMMENT] do
end
end
+describe Puppet::Parser::Lexer::TOKENS[:MLCOMMENT] do
+ before do
+ @token = Puppet::Parser::Lexer::TOKENS[:MLCOMMENT]
+ @lexer = stub 'lexer', :line => 0
+ end
+
+ it "should match against lines enclosed with '/*' and '*/'" do
+ @token.regex.should =~ "/* this is a comment */"
+ end
+
+ it "should match multiple lines enclosed with '/*' and '*/'" do
+ @token.regex.should =~ """/*
+ this is a comment
+ */"""
+ end
+
+ it "should increase the lexer current line number by the amount of lines spanned by the comment" do
+ @lexer.expects(:line=).with(2)
+ @token.convert(@lexer, "1\n2\n3")
+ end
+
+ it "should not greedily match comments" do
+ match = @token.regex.match("/* first */ word /* second */")
+ match[1].should == " first "
+ end
+
+end
+
describe Puppet::Parser::Lexer::TOKENS[:RETURN] do
before { @token = Puppet::Parser::Lexer::TOKENS[:RETURN] }
diff --git a/test/data/snippets/multilinecomments.pp b/test/data/snippets/multilinecomments.pp
new file mode 100644
index 0000000..816baeb
--- /dev/null
+++ b/test/data/snippets/multilinecomments.pp
@@ -0,0 +1,6 @@
+
+/*
+file {
+ "/tmp/multilinecomments": content => "pouet"
+}
+*/
diff --git a/test/language/snippets.rb b/test/language/snippets.rb
index 1003ded..c4318ee 100755
--- a/test/language/snippets.rb
+++ b/test/language/snippets.rb
@@ -30,6 +30,13 @@ class TestSnippets < Test::Unit::TestCase
end
end
+ def assert_not_file(path, msg = nil)
+ if file = @file[path]
+ msg ||= "File %s exists!" % path
+ raise msg
+ end
+ end
+
def assert_mode_equal(mode, path)
unless file = @file[path]
raise "Could not find file %s" % path
@@ -464,6 +471,10 @@ class TestSnippets < Test::Unit::TestCase
assert_file("/tmp/multipleclassone", "one")
assert_file("/tmp/multipleclasstwo", "two")
end
+
+ def snippet_multilinecomments
+ assert_not_file("/tmp/multilinecomments","Did create a commented resource");
+ end
# Iterate across each of the snippets and create a test.
Dir.entries(snippetdir).sort.each { |file|
--
Puppet packaging for Debian
More information about the Pkg-puppet-devel
mailing list