This is the mail archive of the binutils@sourceware.org mailing list for the binutils project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[linker] Allow asserts in sections


This patch does 2 things

1) allow assert statements inside section scripts. This means one doesn't have to write the awful
foo : {
. = . + (ASSERT (foo, bar) & 0);
}
any more.


I also changed the lexer to not allow ',' in NAMES when in expression mode. I kept getting tripped up by
ASSERT (FOO, "FOO zero");
which parses the first token as "FOO,". Also I recently got bitten by EXTERN which allows both comma and whitespace to separate the symbol names. Thus
EXTERN (FOO, BAR, BAZ)
parses ok, but does not pull in the symbols you think it might.


tested on m68k-elf, ok?

nathan
--
Nathan Sidwell    ::   http://www.codesourcery.com   ::         CodeSourcery
nathan@codesourcery.com    ::     http://www.planetfall.pwp.blueyonder.co.uk

2007-05-22  Nathan Sidwell  <nathan@codesourcery.com>

	* ldlex.l: ASSERT is recognized in SCRIPT env.  NAMES cannot
	contain commas in EXP env.
	* ldgram.y (extern_name_list): Push to EXP env, move body to ...
	(extern_name_list_body): ... here.
	(script_file, ifile_list): Reformat.
	(statement): Add ASSERT.

	testsuite/
	* ld/ld-scripts/assert.t: Add additional cases.

Index: testsuite/ld-scripts/assert.t
===================================================================
--- testsuite/ld-scripts/assert.t	(revision 171892)
+++ testsuite/ld-scripts/assert.t	(working copy)
@@ -1,5 +1,9 @@
 SECTIONS
 {
-  .empty : {}
+  .empty : {
+  here = !.;
+  ASSERT (!., "dot is not zero");
+  ASSERT (here, "here is zero");
+  }
   ASSERT (!SIZEOF(.empty), "Empty is not empty")
 }
Index: ldlex.l
===================================================================
--- ldlex.l	(revision 171892)
+++ ldlex.l	(working copy)
@@ -256,7 +256,7 @@ V_IDENTIFIER [*?.$_a-zA-Z\[\]\-\!\^\\]([
 <EXPRESSION,BOTH,SCRIPT>"LOADADDR"	{ RTOKEN(LOADADDR);}
 <EXPRESSION,BOTH>"MAX"			{ RTOKEN(MAX_K); }
 <EXPRESSION,BOTH>"MIN"			{ RTOKEN(MIN_K); }
-<EXPRESSION,BOTH>"ASSERT"		{ RTOKEN(ASSERT_K); }
+<EXPRESSION,BOTH,SCRIPT>"ASSERT"	{ RTOKEN(ASSERT_K); }
 <BOTH,SCRIPT>"ENTRY"			{ RTOKEN(ENTRY);}
 <BOTH,SCRIPT,MRI>"EXTERN"		{ RTOKEN(EXTERN);}
 <EXPRESSION,BOTH,SCRIPT>"NEXT"		{ RTOKEN(NEXT);}
@@ -363,11 +363,19 @@ V_IDENTIFIER [*?.$_a-zA-Z\[\]\-\!\^\\]([
 				}
 
 
-<BOTH,EXPRESSION>{FILENAMECHAR1}{FILENAMECHAR}*	{
+<BOTH>{FILENAMECHAR1}{FILENAMECHAR}*	{
 				 yylval.name = xstrdup (yytext);
 				  return NAME;
 				}
-<BOTH,EXPRESSION>"-l"{FILENAMECHAR}+ {
+<BOTH>"-l"{FILENAMECHAR}+ {
+				  yylval.name = xstrdup (yytext + 2);
+				  return LNAME;
+				}
+<EXPRESSION>{FILENAMECHAR1}{NOCFILENAMECHAR}*	{
+				 yylval.name = xstrdup (yytext);
+				  return NAME;
+				}
+<EXPRESSION>"-l"{NOCFILENAMECHAR}+ {
 				  yylval.name = xstrdup (yytext + 2);
 				  return LNAME;
 				}
Index: ldgram.y
===================================================================
--- ldgram.y	(revision 171892)
+++ ldgram.y	(working copy)
@@ -280,7 +280,13 @@ casesymlist:
 	| casesymlist ',' NAME
 	;
 
+/* Parsed as expressions so that commas separate entries */
 extern_name_list:
+	{ ldlex_expression (); }
+	extern_name_list_body
+	{ ldlex_popstate (); }
+
+extern_name_list_body:
 	  NAME
 			{ ldlang_add_undef ($1); }
 	| extern_name_list NAME
@@ -290,23 +296,17 @@ extern_name_list:
 	;
 
 script_file:
-	{
-	 ldlex_both();
-	}
-       ifile_list
-	{
-	ldlex_popstate();
-	}
+	{ ldlex_both(); }
+	ifile_list
+	{ ldlex_popstate(); }
         ;
 
-
 ifile_list:
-       ifile_list ifile_p1
+	ifile_list ifile_p1
         |
 	;
 
 
-
 ifile_p1:
 		memory
 	|	sections
@@ -573,6 +573,9 @@ statement:
 			{
 			  lang_add_fill ($3);
 			}
+	| ASSERT_K  {ldlex_expression ();} '(' exp ',' NAME ')' end
+			{ ldlex_popstate ();
+			  lang_add_assignment (exp_assert ($4, $6)); }
 	;
 
 statement_list:

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]