b4n requested changes on this pull request.
Comments on the uctags/geany diff:
```diff +static bool createTagsWithFallback1 (const langType language, + passStartCallback passCallback, void *userData) +{ + int lastPromise = getLastPromise (); + unsigned int passCount = 0; + rescanReason whyRescan; + + if (LanguageTable [language]->useCork) + corkTagFile(); ``` Doesn't this miss `initializeParser (language);`?
--- In `runParserInNarrowedInputStream()`: shouldn't more be guarded? Here the meat of the function is disabled with `CTAGS_LIB`, but not all.
--- ```diff -static void attachEndFieldMaybe (int macroCorkIndex) -{ - if (macroCorkIndex != CORK_NIL) - { - tagEntryInfo *tag; - - tag = getEntryInCorkQueue (macroCorkIndex); - tag->extensionFields.endLine = getInputLineNumber (); - } -} - ``` Would be nice to have that :)
--- In `directiveDefine()`: Why use a completely different signature collection logic than CTags'? I get that might be related to c.c's handling of it, but not sure how what affects what.
@@ -38,7 +38,14 @@ AC_PROG_LN_S
# autoscan start
# Checks for header files. -AC_CHECK_HEADERS([fcntl.h fnmatch.h glob.h stdlib.h sys/time.h]) +AC_CHECK_HEADERS([fcntl.h glob.h stdlib.h sys/time.h errno.h limits.h]) + +# Checks for dependencies needed by ctags +AC_CHECK_HEADERS([fnmatch.h direct.h io.h sys/dir.h]) +AH_TEMPLATE([USE_STDBOOL_H], [whether or not to use <stdbool.h>.]) +AC_DEFINE([USE_STDBOOL_H]) +AH_TEMPLATE([CTAGS_LIB], [compile ctags as a library.]) +AC_DEFINE([CTAGS_LIB])
Should rather use the `AC_DEFINE(symbol, value, description)` than calling `AH_TEMPLATE` manually IMO: ```autoconf AC_DEFINE([USE_STDBOOL_H], [1], [whether or not to use <stdbool.h>.]) AC_DEFINE([CTAGS_LIB], [1], [compile ctags as a library.]) ```
while (1) { nl = nestingLevelsGetCurrent(nestingLevels); - if (nl && nl->type >= kind) + e = getEntryOfNestingLevel (nl); + if ((nl && (e == NULL)) || (e && (e->kind - AsciidocKinds) >= kind))
why do that when `e == NULL`?
{
- e.extensionFields.scopeKind = &(AsciidocKinds [nl->type]); - e.extensionFields.scopeName = vStringValue (nl->name); + e.extensionFields.scopeKind = &(AsciidocKinds [parent->kind - AsciidocKinds]); + e.extensionFields.scopeName = parent->name;
isn't the whole scope part dealt with by Cork anyway?
{
- e.extensionFields.scopeKind = &(AsciidocKinds [nl->type]); - e.extensionFields.scopeName = vStringValue (nl->name); + e.extensionFields.scopeKind = &(AsciidocKinds [parent->kind - AsciidocKinds]);
seems like a convoluted way to say `parent->kind` :)
@@ -290,12 +289,12 @@ static kindOption CKinds [] = {
{ true, 'g', "enum", "enumeration names"}, { true, 'm', "member", "class, struct, and union members"}, { true, 'n', "namespace", "namespaces"}, - { false, 'p', "prototype", "function prototypes"}, + { true, 'p', "prototype", "function prototypes"},
that's odd… but I would have imagine those would have been enabled before. Weird.
@@ -1221,11 +1219,10 @@ static void addOtherFields (tagEntryInfo* const tag, const tagType type,
{ tag->extensionFields.access = accessField (st); } - if ((true == st->gotArgs) && (true == Option.extensionFields.argList) &&
is that option gone? Not that we actually do need it, but well
contextual_fake_count = 0;
Assert (passCount < 3); - cppInit ((bool) (passCount > 1), isInputLanguage (Lang_csharp), isInputLanguage (Lang_cpp), &(CKinds [CK_DEFINE])); + + kind_for_define = CKinds+CK_DEFINE; + + cppInit ((bool) (passCount > 1), isInputLanguage (Lang_csharp), isInputLanguage(Lang_cpp), + kind_for_define);
why introduce the `kind_for_define` variable?
exception = (exception_t) setjmp (Exception); - retry = false;
shouldn't `rescan` be reset to `RESCAN_NONE` here? `setjmp()` might lead to jumping there directly.
vStringCatS(result, ".");
else vStringCatS(result, "/"); */ } - vStringCat(result, nl->name); - is_class = (nl->type == K_CLASS); + + e = getEntryOfNestingLevel (nl); + if (e) + { + vStringCatS(result, e->name); + is_class = ((e->kind - PythonKinds) == K_CLASS); + } + else + is_class = K_FUNCTION; /* ??? */
shouldn't that be merely `false`?
prev = nl;
} return is_class; }
/* Check indentation level and truncate nesting levels accordingly */ -static void checkIndent(NestingLevels *nls, int indent) +static void checkIndent(NestingLevels *nls, int indent, bool eof)
unused last (new) param
@@ -27,6 +27,17 @@
/* * DATA DEFINITIONS */ + +struct corkPair { + int index; +};
that struct looks useless.
@@ -825,5 +859,6 @@ extern parserDefinition *PythonParser (void)
def->kindCount = ARRAY_SIZE (PythonKinds); def->extensions = extensions; def->parser = findPythonTags; + def->useCork = true; return def; }
well, I guess all changes to *python.c* are not too relevant so long as the parser still works, as I plan to import my new rewritten parser prom UCtags once Cork is available on our side.
while (1) { nl = nestingLevelsGetCurrent(nestingLevels); - if (nl && nl->type >= kind) + e = getEntryOfNestingLevel (nl); + if ((nl && (e == NULL)) || (e && (e->kind - RestKinds) >= kind))
same question as why handling `e == NULL` on the left
{
- e.extensionFields.scopeKind = &(RestKinds [nl->type]); - e.extensionFields.scopeName = vStringValue (nl->name); + e.extensionFields.scopeKind = &(RestKinds [parent->kind - RestKinds]); + e.extensionFields.scopeName = parent->name;
again, same as for asciidoc
- {
- GStatBuf s; - - /* load file to memory and parse it from memory unless the file is too big */ - if (g_stat(file_name, &s) != 0 || s.st_size > 10*1024*1024) - parse_file = TRUE; - else - { - if (!g_file_get_contents(file_name, (gchar**)&text_buf, (gsize*)&buf_size, NULL)) - { - g_warning("Unable to open %s", file_name); - return FALSE; - } - free_buf = TRUE; - } - }
shouldn't we keep this?
@@ -1,4 +1,5 @@
# format=tagmanager -ENTSEQNO�16�(seq)�0�FUNCSTS -MEMTXT�16�(form_msg)�0� -MEMTXT�1024�(form_msg)�0� +ENTSEQNO�16�(eq)�0�FUNCSTS
should really be `(seq)` if anything
@@ -1,4 +1,5 @@
# format=tagmanager -ENTSEQNO�16�(seq)�0�FUNCSTS -MEMTXT�16�(form_msg)�0� -MEMTXT�1024�(form_msg)�0� +ENTSEQNO�16�(eq)�0�FUNCSTS +MEMTXT�16�(mail)�0� +MEMTXT�1024�(orm_msg)�0�FUNCSTS
here too, should be `(from_msg)`
@@ -20,5 +20,5 @@ obj
qar�64�Struct.Union�0�int quxx�64�Struct.Union�0�bool test.simple�256�0 -tfun�16�(T)�Class�0�auto +tfun�16�(T v)�Class�0�auto
not sure if it's better or worse. Source line is ```d auto tfun(T)(T v) { return v; } ```
@@ -114,6 +150,14 @@ extern const char *tagFileName (void)
* Pseudo tag support */
+extern void abort_if_ferror(MIO *const mio) +{ +#ifndef CTAGS_LIB + if (mio_error (mio)) + error (FATAL | PERROR, "cannot write tag file"); +#endif
It maybe worth still doing *something*? warn maybe?
- if (includeExtensionFlags ()
+ && isXtagEnabled (XTAG_QUALIFIED_TAGS) + && doesInputLanguageRequestAutomaticFQTag ()) + buildFqTagCache (tag); + +#ifdef CTAGS_LIB + getTagScopeInformation((tagEntryInfo *)tag, NULL, NULL); + + if (TagEntryFunction != NULL) + { + ctagsTag t; + + initCtagsTag(&t, tag); + length = TagEntryFunction(&t, TagEntryUserData); + } +#else
Couldn't we simply have our own writer? Might be more work, but maybe less divergence?
return NULL;
+ + start = strdup (vStringValue (signature));
not standard, maybe use `eStrdup()`?
extern bool isExcludedFile (const char* const name);
+extern bool isIncludeFile (const char *const fileName); +/* GEANY DIFF */ +/* extern const ignoredTokenInfo * isIgnoreToken (const char *const name); */ extern bool isIgnoreToken (const char *const name, bool *const pIgnoreParens, const char **const replacement);
why not use the new ctags impl? I mean, it seems to do the same, but differently, couldn't we use that?