From c66230582da1f72c18e2ea77b220235939bc29bc Mon Sep 17 00:00:00 2001
From: Anonymous Maarten <[EMAIL REDACTED]>
Date: Tue, 27 Jun 2023 20:12:30 +0200
Subject: [PATCH] cmake: clone manpage scripts from SDL repo
---
CMakeLists.txt | 4 +-
build-scripts/wikiheaders.pl | 1663 ++++++++++++++++++++++++++++++++++
cmake/sdlmanpages.cmake | 61 ++
3 files changed, 1727 insertions(+), 1 deletion(-)
create mode 100755 build-scripts/wikiheaders.pl
create mode 100644 cmake/sdlmanpages.cmake
diff --git a/CMakeLists.txt b/CMakeLists.txt
index f74a334..be2ed45 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -18,6 +18,7 @@ project(SDL3_ttf
)
include(PrivateSdlFunctions)
+include(sdlmanpages)
sdl_calculate_derived_version_variables(${MAJOR_VERSION} ${MINOR_VERSION} ${MICRO_VERSION})
message(STATUS "Configuring ${PROJECT_NAME} ${PROJECT_VERSION}")
@@ -336,9 +337,10 @@ if(SDL3TTF_INSTALL)
COMPONENT library
)
- if(SDL3TTF_INSTALL_MAN AND COMMAND SDL_generate_manpages)
+ if(SDL3TTF_INSTALL_MAN)
SDL_generate_manpages(
SYMBOL "TTF_Init"
+ WIKIHEADERS_PL_PATH "${CMAKE_CURRENT_SOURCE_DIR}/build-scripts/wikiheaders.pl"
)
endif()
endif()
diff --git a/build-scripts/wikiheaders.pl b/build-scripts/wikiheaders.pl
new file mode 100755
index 0000000..bb09faa
--- /dev/null
+++ b/build-scripts/wikiheaders.pl
@@ -0,0 +1,1663 @@
+#!/usr/bin/perl -w
+
+use warnings;
+use strict;
+use File::Path;
+use Text::Wrap;
+
+$Text::Wrap::huge = 'overflow';
+
+my $projectfullname = 'Simple Directmedia Layer';
+my $projectshortname = 'SDL';
+my $wikisubdir = '';
+my $incsubdir = 'include';
+my $readmesubdir = undef;
+my $apiprefixregex = undef;
+my $versionfname = 'include/SDL_version.h';
+my $versionmajorregex = '\A\#define\s+SDL_MAJOR_VERSION\s+(\d+)\Z';
+my $versionminorregex = '\A\#define\s+SDL_MINOR_VERSION\s+(\d+)\Z';
+my $versionpatchregex = '\A\#define\s+SDL_PATCHLEVEL\s+(\d+)\Z';
+my $mainincludefname = 'SDL.h';
+my $selectheaderregex = '\ASDL.*?\.h\Z';
+my $projecturl = 'https://libsdl.org/';
+my $wikiurl = 'https://wiki.libsdl.org';
+my $bugreporturl = 'https://github.com/libsdl-org/sdlwiki/issues/new';
+my $srcpath = undef;
+my $wikipath = undef;
+my $wikireadmesubdir = 'README';
+my $warn_about_missing = 0;
+my $copy_direction = 0;
+my $optionsfname = undef;
+my $wikipreamble = undef;
+my $changeformat = undef;
+my $manpath = undef;
+
+foreach (@ARGV) {
+ $warn_about_missing = 1, next if $_ eq '--warn-about-missing';
+ $copy_direction = 1, next if $_ eq '--copy-to-headers';
+ $copy_direction = 1, next if $_ eq '--copy-to-header';
+ $copy_direction = -1, next if $_ eq '--copy-to-wiki';
+ $copy_direction = -2, next if $_ eq '--copy-to-manpages';
+ if (/\A--options=(.*)\Z/) {
+ $optionsfname = $1;
+ next;
+ } elsif (/\A--changeformat=(.*)\Z/) {
+ $changeformat = $1;
+ next;
+ } elsif (/\A--manpath=(.*)\Z/) {
+ $manpath = $1;
+ next;
+ }
+ $srcpath = $_, next if not defined $srcpath;
+ $wikipath = $_, next if not defined $wikipath;
+}
+
+my $default_optionsfname = '.wikiheaders-options';
+$default_optionsfname = "$srcpath/$default_optionsfname" if defined $srcpath;
+
+if ((not defined $optionsfname) && (-f $default_optionsfname)) {
+ $optionsfname = $default_optionsfname;
+}
+
+if (defined $optionsfname) {
+ open OPTIONS, '<', $optionsfname or die("Failed to open options file '$optionsfname': $!\n");
+ while (<OPTIONS>) {
+ chomp;
+ if (/\A(.*?)\=(.*)\Z/) {
+ my $key = $1;
+ my $val = $2;
+ $key =~ s/\A\s+//;
+ $key =~ s/\s+\Z//;
+ $val =~ s/\A\s+//;
+ $val =~ s/\s+\Z//;
+ $warn_about_missing = int($val), next if $key eq 'warn_about_missing';
+ $srcpath = $val, next if $key eq 'srcpath';
+ $wikipath = $val, next if $key eq 'wikipath';
+ $apiprefixregex = $val, next if $key eq 'apiprefixregex';
+ $projectfullname = $val, next if $key eq 'projectfullname';
+ $projectshortname = $val, next if $key eq 'projectshortname';
+ $wikisubdir = $val, next if $key eq 'wikisubdir';
+ $incsubdir = $val, next if $key eq 'incsubdir';
+ $readmesubdir = $val, next if $key eq 'readmesubdir';
+ $versionmajorregex = $val, next if $key eq 'versionmajorregex';
+ $versionminorregex = $val, next if $key eq 'versionminorregex';
+ $versionpatchregex = $val, next if $key eq 'versionpatchregex';
+ $versionfname = $val, next if $key eq 'versionfname';
+ $mainincludefname = $val, next if $key eq 'mainincludefname';
+ $selectheaderregex = $val, next if $key eq 'selectheaderregex';
+ $projecturl = $val, next if $key eq 'projecturl';
+ $wikiurl = $val, next if $key eq 'wikiurl';
+ $bugreporturl = $val, next if $key eq 'bugreporturl';
+ $wikipreamble = $val, next if $key eq 'wikipreamble';
+ }
+ }
+ close(OPTIONS);
+}
+
+my $wordwrap_mode = 'mediawiki';
+sub wordwrap_atom { # don't call this directly.
+ my $str = shift;
+ my $retval = '';
+
+ # wordwrap but leave links intact, even if they overflow.
+ if ($wordwrap_mode eq 'mediawiki') {
+ while ($str =~ s/(.*?)\s*(\[https?\:\/\/.*?\s+.*?\])\s*//ms) {
+ $retval .= fill('', '', $1); # wrap it.
+ $retval .= "\n$2\n"; # don't wrap it.
+ }
+ } elsif ($wordwrap_mode eq 'md') {
+ while ($str =~ s/(.*?)\s*(\[.*?\]\(https?\:\/\/.*?\))\s*//ms) {
+ $retval .= fill('', '', $1); # wrap it.
+ $retval .= "\n$2\n"; # don't wrap it.
+ }
+ }
+
+ return $retval . fill('', '', $str);
+}
+
+sub wordwrap_with_bullet_indent { # don't call this directly.
+ my $bullet = shift;
+ my $str = shift;
+ my $retval = '';
+
+ #print("WORDWRAP BULLET ('$bullet'):\n\n$str\n\n");
+
+ # You _can't_ (at least with Pandoc) have a bullet item with a newline in
+ # MediaWiki, so _remove_ wrapping!
+ if ($wordwrap_mode eq 'mediawiki') {
+ $retval = "$bullet$str";
+ $retval =~ s/\n/ /gms;
+ $retval =~ s/\s+$//gms;
+ #print("WORDWRAP BULLET DONE:\n\n$retval\n\n");
+ return "$retval\n";
+ }
+
+ my $bulletlen = length($bullet);
+
+ # wrap it and then indent each line to be under the bullet.
+ $Text::Wrap::columns -= $bulletlen;
+ my @wrappedlines = split /\n/, wordwrap_atom($str);
+ $Text::Wrap::columns += $bulletlen;
+
+ my $prefix = $bullet;
+ my $usual_prefix = ' ' x $bulletlen;
+
+ foreach (@wrappedlines) {
+ s/\s*\Z//;
+ $retval .= "$prefix$_\n";
+ $prefix = $usual_prefix;
+ }
+
+ return $retval;
+}
+
+sub wordwrap_one_paragraph { # don't call this directly.
+ my $retval = '';
+ my $p = shift;
+ #print "\n\n\nPARAGRAPH: [$p]\n\n\n";
+ if ($p =~ s/\A([\*\-] )//) { # bullet list, starts with "* " or "- ".
+ my $bullet = $1;
+ my $item = '';
+ my @items = split /\n/, $p;
+ foreach (@items) {
+ if (s/\A([\*\-] )//) {
+ $retval .= wordwrap_with_bullet_indent($bullet, $item);
+ $item = '';
+ }
+ s/\A\s*//;
+ $item .= "$_\n"; # accumulate lines until we hit the end or another bullet.
+ }
+ if ($item ne '') {
+ $retval .= wordwrap_with_bullet_indent($bullet, $item);
+ }
+ } else {
+ $retval = wordwrap_atom($p) . "\n";
+ }
+
+ return $retval;
+}
+
+sub wordwrap_paragraphs { # don't call this directly.
+ my $str = shift;
+ my $retval = '';
+ my @paragraphs = split /\n\n/, $str;
+ foreach (@paragraphs) {
+ next if $_ eq '';
+ $retval .= wordwrap_one_paragraph($_);
+ $retval .= "\n";
+ }
+ return $retval;
+}
+
+my $wordwrap_default_columns = 76;
+sub wordwrap {
+ my $str = shift;
+ my $columns = shift;
+
+ $columns = $wordwrap_default_columns if not defined $columns;
+ $columns += $wordwrap_default_columns if $columns < 0;
+ $Text::Wrap::columns = $columns;
+
+ my $retval = '';
+
+ #print("\n\nWORDWRAP:\n\n$str\n\n\n");
+
+ $str =~ s/\A\n+//ms;
+
+ while ($str =~ s/(.*?)(\`\`\`.*?\`\`\`|\<syntaxhighlight.*?\<\/syntaxhighlight\>)//ms) {
+ #print("\n\nWORDWRAP BLOCK:\n\n$1\n\n ===\n\n$2\n\n\n");
+ $retval .= wordwrap_paragraphs($1); # wrap it.
+ $retval .= "$2\n\n"; # don't wrap it.
+ }
+
+ $retval .= wordwrap_paragraphs($str); # wrap what's left.
+ $retval =~ s/\n+\Z//ms;
+
+ #print("\n\nWORDWRAP DONE:\n\n$retval\n\n\n");
+ return $retval;
+}
+
+# This assumes you're moving from Markdown (in the Doxygen data) to Wiki, which
+# is why the 'md' section is so sparse.
+sub wikify_chunk {
+ my $wikitype = shift;
+ my $str = shift;
+ my $codelang = shift;
+ my $code = shift;
+
+ #print("\n\nWIKIFY CHUNK:\n\n$str\n\n\n");
+
+ if ($wikitype eq 'mediawiki') {
+ # convert `code` things first, so they aren't mistaken for other markdown items.
+ my $codedstr = '';
+ while ($str =~ s/\A(.*?)\`(.*?)\`//ms) {
+ my $codeblock = $2;
+ $codedstr .= wikify_chunk($wikitype, $1, undef, undef);
+ if (defined $apiprefixregex) {
+ # Convert obvious API things to wikilinks, even inside `code` blocks.
+ $codeblock =~ s/\b($apiprefixregex[a-zA-Z0-9_]+)/[[$1]]/gms;
+ }
+ $codedstr .= "<code>$codeblock</code>";
+ }
+
+ # Convert obvious API things to wikilinks.
+ if (defined $apiprefixregex) {
+ $str =~ s/\b($apiprefixregex[a-zA-Z0-9_]+)/[[$1]]/gms;
+ }
+
+ # Make some Markdown things into MediaWiki...
+
+ # links
+ $str =~ s/\[(.*?)\]\((https?\:\/\/.*?)\)/\[$2 $1\]/g;
+
+ # bold+italic
+ $str =~ s/\*\*\*(.*?)\*\*\*/'''''$1'''''/gms;
+
+ # bold
+ $str =~ s/\*\*(.*?)\*\*/'''$1'''/gms;
+
+ # italic
+ $str =~ s/\*(.*?)\*/''$1''/gms;
+
+ # bullets
+ $str =~ s/^\- /* /gm;
+
+ $str = $codedstr . $str;
+
+ if (defined $code) {
+ $str .= "<syntaxhighlight lang='$codelang'>$code<\/syntaxhighlight>";
+ }
+ } elsif ($wikitype eq 'md') {
+ # convert `code` things first, so they aren't mistaken for other markdown items.
+ my $codedstr = '';
+ while ($str =~ s/\A(.*?)(\`.*?\`)//ms) {
+ my $codeblock = $2;
+ $codedstr .= wikify_chunk($wikitype, $1, undef, undef);
+ if (defined $apiprefixregex) {
+ # Convert obvious API things to wikilinks, even inside `code` blocks,
+ # BUT ONLY IF the entire code block is the API thing,
+ # So something like "just call `SDL_Whatever`" will become
+ # "just call [`SDL_Whatever`](SDL_Whatever)", but
+ # "just call `SDL_Whatever(7)`" will not. It's just the safest
+ # way to do this without resorting to wrapping things in html <code> tags.
+ $codeblock =~ s/\A\`($apiprefixregex[a-zA-Z0-9_]+)\`\Z/[`$1`]($1)/gms;
+ }
+ $codedstr .= $codeblock;
+ }
+
+ # Convert obvious API things to wikilinks.
+ if (defined $apiprefixregex) {
+ $str =~ s/\b($apiprefixregex[a-zA-Z0-9_]+)/[$1]($1)/gms;
+ }
+
+ $str = $codedstr . $str;
+
+ if (defined $code) {
+ $str .= "```$codelang$code```";
+ }
+ }
+
+ #print("\n\nWIKIFY CHUNK DONE:\n\n$str\n\n\n");
+
+ return $str;
+}
+
+sub wikify {
+ my $wikitype = shift;
+ my $str = shift;
+ my $retval = '';
+
+ #print("WIKIFY WHOLE:\n\n$str\n\n\n");
+
+ while ($str =~ s/\A(.*?)\`\`\`(c\+\+|c)(.*?)\`\`\`//ms) {
+ $retval .= wikify_chunk($wikitype, $1, $2, $3);
+ }
+ $retval .= wikify_chunk($wikitype, $str, undef, undef);
+
+ #print("WIKIFY WHOLE DONE:\n\n$retval\n\n\n");
+
+ return $retval;
+}
+
+
+my $dewikify_mode = 'md';
+my $dewikify_manpage_code_indent = 1;
+
+sub dewikify_chunk {
+ my $wikitype = shift;
+ my $str = shift;
+ my $codelang = shift;
+ my $code = shift;
+
+ #print("\n\nDEWIKIFY CHUNK:\n\n$str\n\n\n");
+
+ if ($dewikify_mode eq 'md') {
+ if ($wikitype eq 'mediawiki') {
+ # Doxygen supports Markdown (and it just simply looks better than MediaWiki
+ # when looking at the raw headers), so do some conversions here as necessary.
+
+ # Dump obvious wikilinks.
+ if (defined $apiprefixregex) {
+ $str =~ s/\[\[($apiprefixregex[a-zA-Z0-9_]+)\]\]/$1/gms;
+ }
+
+ # links
+ $str =~ s/\[(https?\:\/\/.*?)\s+(.*?)\]/\[$2\]\($1\)/g;
+
+ # <code></code> is also popular. :/
+ $str =~ s/\<code>(.*?)<\/code>/`$1`/gms;
+
+ # bold+italic
+ $str =~ s/'''''(.*?)'''''/***$1***/gms;
+
+ # bold
+ $str =~ s/'''(.*?)'''/**$1**/gms;
+
+ # italic
+ $str =~ s/''(.*?)''/*$1*/gms;
+
+ # bullets
+ $str =~ s/^\* /- /gm;
+ } elsif ($wikitype eq 'md') {
+ # Dump obvious wikilinks. The rest can just passthrough.
+ if (defined $apiprefixregex) {
+ $str =~ s/\[(\`?$apiprefixregex[a-zA-Z0-9_]+\`?)\]\($apiprefixregex[a-zA-Z0-9_]+\)/$1/gms;
+ }
+ }
+
+ if (defined $code) {
+ $str .= "```$codelang$code```";
+ }
+ } elsif ($dewikify_mode eq 'manpage') {
+ $str =~ s/\./\\[char46]/gms; # make sure these can't become control codes.
+ if ($wikitype eq 'mediawiki') {
+ # Dump obvious wikilinks.
+ if (defined $apiprefixregex) {
+ $str =~ s/\s*\[\[($apiprefixregex[a-zA-Z0-9_]+)\]\]\s*/\n.BR $1\n/gms;
+ }
+
+ # links
+ $str =~ s/\[(https?\:\/\/.*?)\s+(.*?)\]/\n.URL "$1" "$2"\n/g;
+
+ # <code></code> is also popular. :/
+ $str =~ s/\s*\<code>(.*?)<\/code>\s*/\n.BR $1\n/gms;
+
+ # bold+italic (this looks bad, just make it bold).
+ $str =~ s/\s*'''''(.*?)'''''\s*/\n.B $1\n/gms;
+
+ # bold
+ $str =~ s/\s*'''(.*?)'''\s*/\n.B $1\n/gms;
+
+ # italic
+ $str =~ s/\s*''(.*?)''\s*/\n.I $1\n/gms;
+
+ # bullets
+ $str =~ s/^\* /\n\\\(bu /gm;
+ } elsif ($wikitype eq 'md') {
+ # Dump obvious wikilinks.
+ if (defined $apiprefixregex) {
+ $str =~ s/\[(\`?$apiprefixregex[a-zA-Z0-9_]+\`?)\]\($apiprefixregex[a-zA-Z0-9_]+\)/\n.BR $1\n/gms;
+ }
+
+ # links
+ $str =~ s/\[(.*?)]\((https?\:\/\/.*?)\)/\n.URL "$2" "$1"\n/g;
+
+ # <code></code> is also popular. :/
+ $str =~ s/\s*\`(.*?)\`\s*/\n.BR $1\n/gms;
+
+ # bold+italic (this looks bad, just make it bold).
+ $str =~ s/\s*\*\*\*(.*?)\*\*\*\s*/\n.B $1\n/gms;
+
+ # bold
+ $str =~ s/\s*\*\*(.*?)\*\*\s*/\n.B $1\n/gms;
+
+ # italic
+ $str =~ s/\s*\*(.*?)\*\s*/\n.I $1\n/gms;
+
+ # bullets
+ $str =~ s/^\- /\n\\\(bu /gm;
+
+ } else {
+ die("Unexpected wikitype when converting to manpages\n"); # !!! FIXME: need to handle Markdown wiki pages.
+ }
+
+ if (defined $code) {
+ $code =~ s/\A\n+//gms;
+ $code =~ s/\n+\Z//gms;
+ if ($dewikify_manpage_code_indent) {
+ $str .= "\n.IP\n"
+ } else {
+ $str .= "\n.PP\n"
+ }
+ $str .= ".EX\n$code\n.EE\n.PP\n";
+ }
+ } else {
+ die("Unexpected dewikify_mode\n");
+ }
+
+ #print("\n\nDEWIKIFY CHUNK DONE:\n\n$str\n\n\n");
+
+ return $str;
+}
+
+sub dewikify {
+ my $wikitype = shift;
+ my $str = shift;
+ return '' if not defined $str;
+
+ #print("DEWIKIFY WHOLE:\n\n$str\n\n\n");
+
+ $str =~ s/\A[\s\n]*\= .*? \=\s*?\n+//ms;
+ $str =~ s/\A[\s\n]*\=\= .*? \=\=\s*?\n+//ms;
+
+ my $retval = '';
+ while ($str =~ s/\A(.*?)<syntaxhighlight lang='?(.*?)'?>(.*?)<\/syntaxhighlight\>//ms) {
+ $retval .= dewikify_chunk($wikitype, $1, $2, $3);
+ }
+ $retval .= dewikify_chunk($wikitype, $str, undef, undef);
+
+ #print("DEWIKIFY WHOLE DONE:\n\n$retval\n\n\n");
+
+ return $retval;
+}
+
+sub filecopy {
+ my $src = shift;
+ my $dst = shift;
+ my $endline = shift;
+ $endline = "\n" if not defined $endline;
+
+ open(COPYIN, '<', $src) or die("Failed to open '$src' for reading: $!\n");
+ open(COPYOUT, '>', $dst) or die("Failed to open '$dst' for writing: $!\n");
+ while (<COPYIN>) {
+ chomp;
+ s/[ \t\r\n]*\Z//;
+ print COPYOUT "$_$endline";
+ }
+ close(COPYOUT);
+ close(COPYIN);
+}
+
+sub usage {
+ die("USAGE: $0 <source code git clone path> <wiki git clone path> [--copy-to-headers|--copy-to-wiki|--copy-to-manpages] [--warn-about-missing] [--manpath=<man path>]\n\n");
+}
+
+usage() if not defined $srcpath;
+usage() if not defined $wikipath;
+#usage() if $copy_direction == 0;
+
+if (not defined $manpath) {
+ $manpath = "$srcpath/man";
+}
+
+my @standard_wiki_sections = (
+ 'Draft',
+ '[Brief]',
+ 'Deprecated',
+ 'Syntax',
+ 'Function Parameters',
+ 'Return Value',
+ 'Remarks',
+ 'Thread Safety',
+ 'Version',
+ 'Code Examples',
+ 'Related Functions'
+);
+
+# Sections that only ever exist in the wiki and shouldn't be deleted when
+# not found in the headers.
+my %only_wiki_sections = ( # The ones don't mean anything, I just need to check for key existence.
+ 'Draft', 1,
+ 'Code Examples', 1
+);
+
+
+my %headers = (); # $headers{"SDL_audio.h"} -> reference to an array of all lines of text in SDL_audio.h.
+my %headerfuncs = (); # $headerfuncs{"SDL_OpenAudio"} -> string of header documentation for SDL_OpenAudio, with comment '*' bits stripped from the start. Newlines embedded!
+my %headerdecls = ();
+my %headerfuncslocation = (); # $headerfuncslocation{"SDL_OpenAudio"} -> name of header holding SDL_OpenAudio define ("SDL_audio.h" in this case).
+my %headerfuncschunk = (); # $headerfuncschunk{"SDL_OpenAudio"} -> offset in array in %headers that should be replaced for this function.
+my %headerfuncshasdoxygen = (); # $headerfuncschunk{"SDL_OpenAudio"} -> 1 if there was no existing doxygen for this function.
+
+my $incpath = "$srcpath";
+$incpath .= "/$incsubdir" if $incsubdir ne '';
+
+my $wikireadmepath = "$wikipath/$wikireadmesubdir";
+my $readmepath = undef;
+if (defined $readmesubdir) {
+ $readmepath = "$srcpath/$readmesubdir";
+}
+
+opendir(DH, $incpath) or die("Can't opendir '$incpath': $!\n");
+while (my $d = readdir(DH)) {
+ my $dent = $d;
+ next if not $dent =~ /$selectheaderregex/; # just selected headers.
+ open(FH, '<', "$incpath/$dent") or die("Can't open '$incpath/$dent': $!\n");
+
+ my @contents = ();
+
+ while (<FH>) {
+ chomp;
+ my $decl;
+ my @templines;
+ my $str;
+ my $has_doxygen = 1;
+ if (/\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC/) { # a function declaration without a doxygen comment?
+ @templines = ();
+ $decl = $_;
+ $str = '';
+ $has_doxygen = 0;
+ } elsif (not /\A\/\*\*\s*\Z/) { # not doxygen comment start?
+ push @contents, $_;
+ next;
+ } else { # Start of a doxygen comment, parse it out.
+ @templines = ( $_ );
+ while (<FH>) {
+ chomp;
+ push @templines, $_;
+ last if /\A\s*\*\/\Z/;
+ if (s/\A\s*\*\s*\`\`\`/```/) { # this is a hack, but a lot of other code relies on the whitespace being trimmed, but we can't trim it in code blocks...
+ $str .= "$_\n";
+ while (<FH>) {
+ chomp;
+ push @templines, $_;
+ s/\A\s*\*\s?//;
+ if (s/\A\s*\`\`\`/```/) {
+ $str .= "$_\n";
+ last;
+ } else {
+ $str .= "$_\n";
+ }
+ }
+ } else {
+ s/\A\s*\*\s*//;
+ $str .= "$_\n";
+ }
+ }
+
+ $decl = <FH>;
+ $decl = '' if not defined $decl;
+ chomp($decl);
+ if (not $decl =~ /\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC/) {
+ #print "Found doxygen but no function sig:\n$str\n\n";
+ foreach (@templines) {
+ push @contents, $_;
+ }
+ push @contents, $decl;
+ next;
+ }
+ }
+
+ my @decllines = ( $decl );
+
+ if (not $decl =~ /\)\s*;/) {
+ while (<FH>) {
+ chomp;
+ push @decllines, $_;
+ s/\A\s+//;
+ s/\s+\Z//;
+ $decl .= " $_";
+ last if /\)\s*;/;
+ }
+ }
+
+ $decl =~ s/\s+\);\Z/);/;
+ $decl =~ s/\s+\Z//;
+ #print("DECL: [$decl]\n");
+
+ my $fn = '';
+ if ($decl =~ /\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC\s+(const\s+|)(unsigned\s+|)(.*?)\s*(\*?)\s*SDLCALL\s+(.*?)\s*\((.*?)\);/) {
+ $fn = $6;
+ #$decl =~ s/\A\s*extern\s+DECLSPEC\s+(.*?)\s+SDLCALL/$1/;
+ } else {
+ #print "Found doxygen but no function sig:\n$str\n\n";
+ foreach (@templines) {
+ push @contents, $_;
+ }
+ foreach (@decllines) {
+ push @contents, $_;
+ }
+ next;
+ }
+
+ $decl = ''; # build this with the line breaks, since it looks better for syntax highlighting.
+ foreach (@decllines) {
+ if ($decl eq '') {
+ $decl = $_;
+ $decl =~ s/\Aextern\s+(SDL_DEPRECATED\s+|)DECLSPEC\s+(.*?)\s+(\*?)SDLCALL\s+/$2$3 /;
+ } else {
+ my $trimmed = $_;
+ # !!! FIXME: trim space for SDL_DEPRECATED if it was used, too.
+ $trimmed =~ s/\A\s{24}//; # 24 for shrinking to match the removed "extern DECLSPEC SDLCALL "
+ $decl .= $trimmed;
+ }
+ $decl .= "\n";
+ }
+
+ #print("$fn:\n$str\n\n");
+
+ # There might be multiple declarations of a function due to #ifdefs,
+ # and only one of them will have documentation. If we hit an
+ # undocumented one before, delete the placeholder line we left for
+ # it so it doesn't accumulate a new blank line on each run.
+ my $skipfn = 0;
+ if (defined $headerfuncshasdoxygen{$fn}) {
+ if ($headerfuncshasdoxygen{$fn} == 0) { # An undocumented declaration already exists, nuke its placeholder line.
+ delete $contents[$headerfuncschunk{$fn}]; # delete DOES NOT RENUMBER existing elements!
+ } else { # documented function already existed?
+ $skipfn = 1; # don't add this copy to the list of functions.
+ if ($has_doxygen) {
+ print STDERR "WARNING: Function '$fn' appears to be documented in multiple locations. Only keeping the first one we saw!\n";
+ }
+ push @contents, join("\n", @decllines); # just put the existing declation in as-is.
+ }
+ }
+
+ if (!$skipfn) {
+ $headerfuncs{$fn} = $str;
+ $headerdecls{$fn} = $decl;
+ $headerfuncslocation{$fn} = $dent;
+ $headerfuncschunk{$fn} = scalar(@contents);
+ $headerfuncshasdoxygen{$fn} = $has_doxygen;
+ push @contents, join("\n", @templines);
+ push @contents, join("\n", @decllines);
+ }
+
+ }
+ close(FH);
+
+ $headers{$dent} = \@contents;
+}
+closedir(DH);
+
+
+# !!! FIXME: we need to parse enums and typedefs and structs and defines and and and and and...
+# !!! FIXME: (but functions are good enough for now.)
+
+my %wikitypes = (); # contains string of wiki page extension, like $wikitypes{"SDL_OpenAudio"} == 'mediawiki'
+my %wikifuncs = (); # contains references to hash of strings, each string being the full contents of a section of a wiki page, like $wikifuncs{"SDL_OpenAudio"}{"Remarks"}.
+my %wikisectionorder = (); # contains references to array, each array item being a key to a wikipage section in the correct order, like $wikisectionorder{"SDL_OpenAudio"}[2] == 'Remarks'
+opendir(DH, $wikipath) or die("Can't opendir '$wikipath': $!\n");
+while (my $d = readdir(DH)) {
+ my $dent = $d;
+ my $type = '';
+ if ($dent =~ /\.(md|mediawiki)\Z/) {
+ $type = $1;
+ } else {
+ next; # only dealing with wiki pages.
+ }
+
+ my $fn = $dent;
+ $fn =~ s/\..*\Z//;
+
+ # Ignore FrontPage.
+ next if $fn eq 'FrontPage';
+
+ # Ignore "Category*" pages.
+ next if ($fn =~ /\ACategory/);
+
+ open(FH, '<', "$wikipath/$dent") or die("Can't open '$wikipath/$dent': $!\n");
+
+ my $current_section = '[start]';
+ my @section_order = ( $current_section );
+ my %sections = ();
+ $sections{$current_section} = '';
+
+ my $firstline = 1;
+
+ while (<FH>) {
+ chomp;
+ my $orig = $_;
+ s/\A\s*//;
+ s/\s*\Z//;
+
+ if ($type eq 'mediawiki') {
+ if (defined($wikipreamble) && $firstline && /\A\=\=\=\=\=\= (.*?) \=\=\=\=\=\=\Z/ && ($1 eq $wikipreamble)) {
+ $firstline = 0; # skip this.
+ next;
+ } elsif (/\A\= (.*?) \=\Z/) {
+ $firstline = 0;
+ $current_section = ($1 eq $fn) ? '[Brief]' : $1;
+ die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
+ push @section_order, $current_section;
+ $sections{$current_section} = '';
+ } elsif (/\A\=\= (.*?) \=\=\Z/) {
+ $firstline = 0;
+ $current_section = ($1 eq $fn) ? '[Brief]' : $1;
+ die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
+ push @section_order, $current_section;
+ $sections{$current_section} = '';
+ next;
+ } elsif (/\A\-\-\-\-\Z/) {
+ $firstline = 0;
+ $current_section = '[footer]';
+ die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
+ push @section_order, $current_section;
+ $sections{$current_section} = '';
+ next;
+ }
+ } elsif ($type eq 'md') {
+ if (defined($wikipreamble) && $firstline && /\A\#\#\#\#\#\# (.*?)\Z/ && ($1 eq $wikipreamble)) {
+ $firstline = 0; # skip this.
+ next;
+ } elsif (/\A\#+ (.*?)\Z/) {
+ $firstline = 0;
+ $current_section = ($1 eq $fn) ? '[Brief]' : $1;
+ die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
+ push @section_order, $current_section;
+ $sections{$current_section} = '';
+ next;
+ } elsif (/\A\-\-\-\-\Z/) {
+ $firstline = 0;
+ $current_section = '[footer]';
+ die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
+ push @section_order, $current_section;
+ $sections{$current_section} = '';
+ next;
+ }
+ } else {
+ die("Unexpected wiki file type. Fixme!\n");
+ }
+
+ if ($firstline) {
+ $firstline = ($_ ne '');
+ }
+ if (!$firstline) {
+ $sections{$current_section} .= "$orig\n";
+ }
+ }
+ close(FH);
+
+ foreach (keys %sections) {
+ $sections{$_} =~ s/\A\n+//;
+ $sections{$_} =~ s/\n+\Z//;
+ $sections{$_} .= "\n";
+ }
+
+ if (0) {
+ foreach (@section_order) {
+ print("$fn SECTION '$_':\n");
+ print($sections{$_});
+ print("\n\n");
+ }
+ }
+
+ $wikitypes{$fn} = $type;
+ $wikifuncs{$fn} = \%sections;
+ $wikisectionorder{$fn} = \@section_order;
+}
+closedir(DH);
+
+
+if ($warn_about_missing) {
+ foreach (keys %wikifuncs) {
+ my $fn = $_;
+ if (not defined $headerfuncs{$fn}) {
+ print("WARNING: $fn defined in the wiki but not the headers!\n");
+ }
+ }
+
+ foreach (keys %headerfuncs) {
+ my $fn = $_;
+ if (not defined $wikifuncs{$fn}) {
+ print("WARNING: $fn defined in the headers but not the wiki!\n");
+ }
+ }
+}
+
+if ($copy_direction == 1) { # --copy-to-headers
+ my %changed_headers = ();
+
+ $dewikify_mode = 'md';
+ $wordwrap_mode = 'md'; # the headers use Markdown format.
+
+ foreach (keys %headerfuncs) {
+ my $fn = $_;
+ next if not defined $wikifuncs{$fn}; # don't have a page for that function, skip it.
+ my $wikitype = $wikitypes{$fn};
+ my $sectionsref = $wikifuncs{$fn};
+ my $remarks = $sectionsref->{'Remarks'};
+ my $params = $sectionsref->{'Function Parameters'};
+ my $returns = $sectionsref->{'Return Value'};
+ my $threadsafety = $sectionsref->{'Thread Safety'};
+ my $version = $sectionsref->{'Version'};
+ my $related = $sectionsref->{'Related Functions'};
+ my $deprecated = $sectionsref->{'Deprecated'};
+ my $brief = $sectionsref->{'[Brief]'};
+ my $addblank = 0;
+ my $str = '';
+
+ $headerfuncshasdoxygen{$fn} = 1; # Added/changed doxygen for this header.
+
+ $brief = dewikify($wikitype, $brief);
+ $brief =~ s/\A(.*?\.) /$1\n/; # \brief should only be one sentence, delimited by a period+space. Split if necessary.
+ my @briefsplit = split /\n/, $brief;
+ $brief = shift @briefsplit;
+
+ if (defined $remarks) {
+ $remarks = join("\n", @briefsplit) . dewikify($wikitype, $remarks);
+ }
+
+ if (defined $brief) {
+ $str .= "\n" if $addblank; $addblank = 1;
+ $str .= wordwrap($brief) . "\n";
+ }
+
+ if (defined $remarks) {
+ $str .= "\n" if $addblank; $addblank = 1;
+ $str .= wordwrap($remarks) . "\n";
+ }
+
+ if (defined $deprecated) {
+ # !!! FIXME: lots of code duplication in all of these.
+ $str .= "\n" if $addblank; $addblank = 1;
+ my $v = dewikify($wikitype, $deprecated);
+ my $whitespacelen = length("\\deprecated") + 1;
+ my $whitespace = ' ' x $whitespacelen;
+ $v = wordwrap($v, -$whitespacelen);
+ my @desclines = split /\n/, $v;
+ my $firstline = shift @desclines;
+ $str .= "\\deprecated $firstline\n";
+ foreach (@desclines) {
+ $str .= "${whitespace}$_\n";
+ }
+ }
+
+ if (defined $params) {
+ $str .= "\n" if $addblank; $addblank = (defined $returns) ? 0 : 1;
+
(Patch may be truncated, please check the link at the top of this post.)