[geany/geany] d96a31: Rust: Change/simplify how the string tokens are handled.

SiegeLord git-noreply at xxxxx
Tue Jul 29 13:23:42 UTC 2014


Branch:      refs/heads/master
Author:      SiegeLord <slabode at aim.com>
Committer:   SiegeLord <slabode at aim.com>
Date:        Tue, 29 Jul 2014 13:23:42 UTC
Commit:      d96a314a68a6ca779f3e447dd25724df4881a93a
             https://github.com/geany/geany/commit/d96a314a68a6ca779f3e447dd25724df4881a93a

Log Message:
-----------
Rust: Change/simplify how the string tokens are handled.

Previously, only the string contents were stored in lexerState::token_str (i.e.
not including the delimeters). Now, the delimeters are stored as well, thus
preserving them. This also simplifies the code a bit.

A new function is added to handle the character storage, which is also now
applied for normal identifiers. To that end, the MAX_STRING_LENGTH was boosted
to 256 so that all reasonably sized identifiers may fit.


Modified Paths:
--------------
    tagmanager/ctags/rust.c

Modified: tagmanager/ctags/rust.c
49 lines changed, 20 insertions(+), 29 deletions(-)
===================================================================
@@ -24,7 +24,7 @@
 /*
 *   MACROS
 */
-#define MAX_STRING_LENGTH 64
+#define MAX_STRING_LENGTH 256
 
 /*
 *   DATA DECLARATIONS
@@ -117,9 +117,7 @@ static void writeCurTokenToStr (lexerState *lexer, vString *out_str)
 			vStringCat(out_str, lexer->token_str);
 			break;
 		case TOKEN_STRING:
-			vStringPut(out_str, '"');
 			vStringCat(out_str, lexer->token_str);
-			vStringPut(out_str, '"');
 			break;
 		case TOKEN_WHITESPACE:
 			vStringPut(out_str, ' ');
@@ -152,6 +150,14 @@ static void advanceNChar (lexerState *lexer, int n)
 		advanceChar(lexer);
 }
 
+/* Store the current character in lexerState::token_str if there is space
+ * (set by MAX_STRING_LENGTH), and then read the next character from the file */
+static void advanceAndStoreChar (lexerState *lexer)
+{
+	if (vStringLength(lexer->token_str) < MAX_STRING_LENGTH)
+		vStringPut(lexer->token_str, (char) lexer->cur_c);
+	advanceChar(lexer);
+}
 
 static boolean isWhitespace (int c)
 {
@@ -224,8 +230,7 @@ static void scanIdentifier (lexerState *lexer)
 	vStringClear(lexer->token_str);
 	do
 	{
-		vStringPut(lexer->token_str, (char) lexer->cur_c);
-		advanceChar(lexer);
+		advanceAndStoreChar(lexer);
 	} while(lexer->cur_c != EOF && isIdentifierContinue(lexer->cur_c));
 }
 
@@ -237,16 +242,14 @@ static void scanIdentifier (lexerState *lexer)
 static void scanString (lexerState *lexer)
 {
 	vStringClear(lexer->token_str);
-	advanceChar(lexer);
+	advanceAndStoreChar(lexer);
 	while (lexer->cur_c != EOF && lexer->cur_c != '"')
 	{
 		if (lexer->cur_c == '\\' && lexer->next_c == '"')
-			advanceChar(lexer);
-		if (vStringLength(lexer->token_str) < MAX_STRING_LENGTH)
-			vStringPut(lexer->token_str, (char) lexer->cur_c);
-		advanceChar(lexer);
+			advanceAndStoreChar(lexer);
+		advanceAndStoreChar(lexer);
 	}
-	advanceChar(lexer);
+	advanceAndStoreChar(lexer);
 }
 
 /* Raw strings look like this: r"" or r##""## where the number of
@@ -255,48 +258,36 @@ static void scanRawString (lexerState *lexer)
 {
 	size_t num_initial_hashes = 0;
 	vStringClear(lexer->token_str);
-	advanceChar(lexer);
+	advanceAndStoreChar(lexer);
 	/* Count how many leading hashes there are */
 	while (lexer->cur_c == '#')
 	{
 		num_initial_hashes++;
-		advanceChar(lexer);
+		advanceAndStoreChar(lexer);
 	}
 	if (lexer->cur_c != '"')
 		return;
-	advanceChar(lexer);
+	advanceAndStoreChar(lexer);
 	while (lexer->cur_c != EOF)
 	{
-		if (vStringLength(lexer->token_str) < MAX_STRING_LENGTH)
-			vStringPut(lexer->token_str, (char) lexer->cur_c);
 		/* Count how many trailing hashes there are. If the number is equal or more
 		 * than the number of leading hashes, break. */
 		if (lexer->cur_c == '"')
 		{
 			size_t num_trailing_hashes = 0;
-			advanceChar(lexer);
+			advanceAndStoreChar(lexer);
 			while (lexer->cur_c == '#' && num_trailing_hashes < num_initial_hashes)
 			{
 				num_trailing_hashes++;
 
-				if (vStringLength(lexer->token_str) < MAX_STRING_LENGTH)
-					vStringPut(lexer->token_str, (char) lexer->cur_c);
-				advanceChar(lexer);
+				advanceAndStoreChar(lexer);
 			}
 			if (num_trailing_hashes == num_initial_hashes)
-			{
-				/* Strip the trailing hashes and quotes */
-				if (vStringLength(lexer->token_str) < MAX_STRING_LENGTH && vStringLength(lexer->token_str) > num_trailing_hashes + 1)
-				{
-					lexer->token_str->length = vStringLength(lexer->token_str) - num_trailing_hashes - 1;
-					lexer->token_str->buffer[lexer->token_str->length] = '\0';
-				}
 				break;
-			}
 		}
 		else
 		{
-			advanceChar(lexer);
+			advanceAndStoreChar(lexer);
 		}
 	}
 }



--------------
This E-Mail was brought to you by github_commit_mail.py (Source: https://github.com/geany/infrastructure).


More information about the Commits mailing list