00001
00002
00003
00004
00005
00006
00007
00008
00009
00023 #include <stdio.h>
00024 #include <stdlib.h>
00025 #include <string.h>
00026 #include <ctype.h>
00027 #include <limits.h>
00028 #include <unistd.h>
00029 #include <map>
00030 #include <set>
00031 #include <stack>
00032
00033 #ifndef PATH_MAX
00034 # define PATH_MAX 260
00035 #endif
00036
00038 struct StringCompare {
00045 bool operator () (const char *a, const char *b) const
00046 {
00047 return strcmp(a, b) < 0;
00048 }
00049 };
00051 typedef std::set<const char*, StringCompare> StringSet;
00053 typedef std::map<const char*, StringSet*, StringCompare> StringMap;
00055 typedef std::pair<const char*, StringSet*> StringMapItem;
00056
00058 static StringSet _include_dirs;
00060 static StringMap _files;
00062 static StringMap _headers;
00064 static StringSet _defines;
00065
00069 class File {
00070 public:
00076 File(const char *filename) : filename(filename)
00077 {
00078 this->fp = fopen(filename, "r");
00079 if (this->fp == NULL) {
00080 fprintf(stdout, "Could not open %s for reading\n", filename);
00081 exit(1);
00082 }
00083 this->dirname = strdup(filename);
00084 char *last = strrchr(this->dirname, '/');
00085 if (last != NULL) {
00086 *last = '\0';
00087 } else {
00088 *this->dirname = '\0';
00089 }
00090 }
00091
00093 ~File()
00094 {
00095 fclose(this->fp);
00096 free(this->dirname);
00097 }
00098
00104 char GetChar() const
00105 {
00106 int c = fgetc(this->fp);
00107 return (c == EOF) ? '\0' : c;
00108 }
00109
00114 const char *GetDirname() const
00115 {
00116 return this->dirname;
00117 }
00118
00119 private:
00120 FILE *fp;
00121 char *dirname;
00122 const char *filename;
00123 };
00124
00126 enum Token {
00127 TOKEN_UNKNOWN,
00128 TOKEN_END,
00129 TOKEN_EOL,
00130 TOKEN_SHARP,
00131 TOKEN_LOCAL,
00132 TOKEN_GLOBAL,
00133 TOKEN_IDENTIFIER,
00134 TOKEN_DEFINE,
00135 TOKEN_IF,
00136 TOKEN_IFDEF,
00137 TOKEN_IFNDEF,
00138 TOKEN_ELIF,
00139 TOKEN_ELSE,
00140 TOKEN_ENDIF,
00141 TOKEN_UNDEF,
00142 TOKEN_OR,
00143 TOKEN_AND,
00144 TOKEN_DEFINED,
00145 TOKEN_OPEN,
00146 TOKEN_CLOSE,
00147 TOKEN_NOT,
00148 TOKEN_ZERO,
00149 TOKEN_INCLUDE,
00150 };
00151
00153 typedef std::map<const char*, Token, StringCompare> KeywordList;
00154
00158 class Lexer {
00159 public:
00164 Lexer(const File *file) : file(file), current_char('\0'), string(NULL), token(TOKEN_UNKNOWN)
00165 {
00166 this->keywords["define"] = TOKEN_DEFINE;
00167 this->keywords["defined"] = TOKEN_DEFINED;
00168 this->keywords["if"] = TOKEN_IF;
00169 this->keywords["ifdef"] = TOKEN_IFDEF;
00170 this->keywords["ifndef"] = TOKEN_IFNDEF;
00171 this->keywords["include"] = TOKEN_INCLUDE;
00172 this->keywords["elif"] = TOKEN_ELIF;
00173 this->keywords["else"] = TOKEN_ELSE;
00174 this->keywords["endif"] = TOKEN_ENDIF;
00175 this->keywords["undef"] = TOKEN_UNDEF;
00176
00177
00178 this->Next();
00179
00180
00181 this->buf_len = 32;
00182 this->buf = (char*)malloc(sizeof(*this->buf) * this->buf_len);
00183 }
00184
00186 ~Lexer()
00187 {
00188 free(this->buf);
00189 }
00190
00194 void Next()
00195 {
00196 this->current_char = this->file->GetChar();
00197 }
00198
00203 Token GetToken() const
00204 {
00205 return this->token;
00206 }
00207
00212 const char *GetString() const
00213 {
00214 return this->string;
00215 }
00216
00221 void Lex()
00222 {
00223 for (;;) {
00224 free(this->string);
00225 this->string = NULL;
00226 this->token = TOKEN_UNKNOWN;
00227
00228 switch (this->current_char) {
00229
00230 case '\0': this->token = TOKEN_END; return;
00231
00232
00233 case '\t': this->Next(); break;
00234 case '\r': this->Next(); break;
00235 case ' ': this->Next(); break;
00236
00237 case '\\':
00238 this->Next();
00239 if (this->current_char == '\n') this->Next();
00240 break;
00241
00242 case '\n':
00243 this->token = TOKEN_EOL;
00244 this->Next();
00245 return;
00246
00247 case '#':
00248 this->token = TOKEN_SHARP;
00249 this->Next();
00250 return;
00251
00252 case '"':
00253 this->ReadString('"', TOKEN_LOCAL);
00254 this->Next();
00255 return;
00256
00257 case '<':
00258 this->ReadString('>', TOKEN_GLOBAL);
00259 this->Next();
00260 return;
00261
00262 case '&':
00263 this->Next();
00264 if (this->current_char == '&') {
00265 this->Next();
00266 this->token = TOKEN_AND;
00267 return;
00268 }
00269 break;
00270
00271 case '|':
00272 this->Next();
00273 if (this->current_char == '|') {
00274 this->Next();
00275 this->token = TOKEN_OR;
00276 return;
00277 }
00278 break;
00279
00280 case '(':
00281 this->Next();
00282 this->token = TOKEN_OPEN;
00283 return;
00284
00285 case ')':
00286 this->Next();
00287 this->token = TOKEN_CLOSE;
00288 return;
00289
00290 case '!':
00291 this->Next();
00292 if (this->current_char != '=') {
00293 this->token = TOKEN_NOT;
00294 return;
00295 }
00296 break;
00297
00298
00299 case '/':
00300 this->Next();
00301 switch (this->current_char) {
00302 case '*': {
00303 this->Next();
00304 char previous_char = '\0';
00305 while ((this->current_char != '/' || previous_char != '*') && this->current_char != '\0') {
00306 previous_char = this->current_char;
00307 this->Next();
00308 }
00309 this->Next();
00310 break;
00311 }
00312 case '/': while (this->current_char != '\n' && this->current_char != '\0') this->Next(); break;
00313 default: break;
00314 }
00315 break;
00316
00317 default:
00318 if (isalpha(this->current_char) || this->current_char == '_') {
00319
00320 this->ReadIdentifier();
00321 return;
00322 }
00323 if (isdigit(this->current_char)) {
00324 bool zero = this->current_char == '0';
00325 this->Next();
00326 if (this->current_char == 'x' || this->current_char == 'X') Next();
00327 while (isdigit(this->current_char) || this->current_char == '.' || (this->current_char >= 'a' && this->current_char <= 'f') || (this->current_char >= 'A' && this->current_char <= 'F')) {
00328 zero &= this->current_char == '0';
00329 this->Next();
00330 }
00331 if (zero) this->token = TOKEN_ZERO;
00332 return;
00333 }
00334 this->Next();
00335 break;
00336 }
00337 }
00338 }
00339
00340 private:
00346 Token FindKeyword(const char *name) const
00347 {
00348 KeywordList::const_iterator it = this->keywords.find(name);
00349 if (it == this->keywords.end()) return TOKEN_IDENTIFIER;
00350 return (*it).second;
00351 }
00352
00356 void ReadIdentifier()
00357 {
00358 size_t count = 0;
00359
00360
00361 do {
00362 this->buf[count++] = this->current_char;
00363 this->Next();
00364
00365 if (count >= buf_len) {
00366
00367 this->buf_len *= 2;
00368 this->buf = (char *)realloc(this->buf, sizeof(*this->buf) * this->buf_len);
00369 }
00370 } while ((isalpha(this->current_char) || this->current_char == '_' || isdigit(this->current_char)));
00371 this->buf[count] = '\0';
00372
00373 free(this->string);
00374 this->string = strdup(this->buf);
00375 this->token = FindKeyword(this->string);
00376 }
00377
00383 void ReadString(char end, Token token)
00384 {
00385 size_t count = 0;
00386 this->Next();
00387 while (this->current_char != end && this->current_char != ')' && this->current_char != '\n' && this->current_char != '\0') {
00388 this->buf[count++] = this->current_char;
00389 this->Next();
00390
00391 if (count >= this->buf_len) {
00392
00393 this->buf_len *= 2;
00394 this->buf = (char *)realloc(this->buf, sizeof(*this->buf) * this->buf_len);
00395 }
00396 }
00397 this->buf[count] = '\0';
00398 free(this->string);
00399 this->string = strdup(this->buf);
00400 this->token = token;
00401 }
00402
00403 const File *file;
00404 char current_char;
00405 char *string;
00406 Token token;
00407 char *buf;
00408 size_t buf_len;
00409 KeywordList keywords;
00410 };
00411
00422 const char *GeneratePath(const char *dirname, const char *filename, bool local)
00423 {
00424 if (local) {
00425 if (access(filename, R_OK) == 0) return strdup(filename);
00426
00427 char path[PATH_MAX];
00428 strcpy(path, dirname);
00429 const char *p = filename;
00430
00431 while (*p == '.') {
00432 if (*(++p) == '.') {
00433 char *s = strrchr(path, '/');
00434 if (s != NULL) *s = '\0';
00435 p += 2;
00436 }
00437 }
00438 strcat(path, "/");
00439 strcat(path, p);
00440
00441 if (access(path, R_OK) == 0) return strdup(path);
00442 }
00443
00444 for (StringSet::iterator it = _include_dirs.begin(); it != _include_dirs.end(); it++) {
00445 char path[PATH_MAX];
00446 strcpy(path, *it);
00447 const char *p = filename;
00448
00449 while (*p == '.') {
00450 if (*(++p) == '.') {
00451 char *s = strrchr(path, '/');
00452 if (s != NULL) *s = '\0';
00453 p += 2;
00454 }
00455 }
00456 strcat(path, "/");
00457 strcat(path, p);
00458
00459 if (access(path, R_OK) == 0) return strdup(path);
00460 }
00461
00462 return NULL;
00463 }
00464
00472 bool ExpressionDefined(Lexer *lexer, StringSet *defines, bool verbose);
00473
00481 bool ExpressionOr(Lexer *lexer, StringSet *defines, bool verbose);
00482
00491 bool ExpressionNot(Lexer *lexer, StringSet *defines, bool verbose)
00492 {
00493 if (lexer->GetToken() == TOKEN_NOT) {
00494 if (verbose) fprintf(stderr, "!");
00495 lexer->Lex();
00496 bool value = !ExpressionDefined(lexer, defines, verbose);
00497 if (verbose) fprintf(stderr, "[%d]", value);
00498 return value;
00499 }
00500
00501 if (lexer->GetToken() == TOKEN_OPEN) {
00502 if (verbose) fprintf(stderr, "(");
00503 lexer->Lex();
00504 bool value = ExpressionOr(lexer, defines, verbose);
00505 if (verbose) fprintf(stderr, ")[%d]", value);
00506 lexer->Lex();
00507 return value;
00508 }
00509
00510 if (lexer->GetToken() == TOKEN_ZERO) {
00511 if (verbose) fprintf(stderr, "0");
00512 lexer->Lex();
00513 if (verbose) fprintf(stderr, "[0]");
00514 return false;
00515 }
00516
00517 bool first = true;
00518 while (lexer->GetToken() == TOKEN_UNKNOWN || lexer->GetToken() == TOKEN_IDENTIFIER) {
00519 if (verbose && first) fprintf(stderr, "<assumed true>");
00520 first = false;
00521 lexer->Lex();
00522 }
00523
00524 return true;
00525 }
00526
00534 bool ExpressionDefined(Lexer *lexer, StringSet *defines, bool verbose)
00535 {
00536 bool value = ExpressionNot(lexer, defines, verbose);
00537
00538 if (lexer->GetToken() != TOKEN_DEFINED) return value;
00539 lexer->Lex();
00540 if (verbose) fprintf(stderr, "defined");
00541 bool open = (lexer->GetToken() == TOKEN_OPEN);
00542 if (open) lexer->Lex();
00543 if (verbose) fprintf(stderr, open ? "(" : " ");
00544 if (lexer->GetToken() == TOKEN_IDENTIFIER) {
00545 if (verbose) fprintf(stderr, "%s", lexer->GetString());
00546 value = defines->find(lexer->GetString()) != defines->end();
00547 }
00548 if (open) {
00549 if (verbose) fprintf(stderr, ")");
00550 lexer->Lex();
00551 }
00552 lexer->Lex();
00553 if (verbose) fprintf(stderr, "[%d]", value);
00554 return value;
00555 }
00556
00564 bool ExpressionAnd(Lexer *lexer, StringSet *defines, bool verbose)
00565 {
00566 bool value = ExpressionDefined(lexer, defines, verbose);
00567
00568 while (true) {
00569 if (lexer->GetToken() != TOKEN_AND) return value;
00570 if (verbose) fprintf(stderr, " && ");
00571 lexer->Lex();
00572 value = value && ExpressionDefined(lexer, defines, verbose);
00573 }
00574 }
00575
00583 bool ExpressionOr(Lexer *lexer, StringSet *defines, bool verbose)
00584 {
00585 bool value = ExpressionAnd(lexer, defines, verbose);
00586
00587 while (true) {
00588 if (lexer->GetToken() != TOKEN_OR) return value;
00589 if (verbose) fprintf(stderr, " || ");
00590 lexer->Lex();
00591 value = value || ExpressionAnd(lexer, defines, verbose);
00592 }
00593 }
00594
00596 enum Ignore {
00597 NOT_IGNORE,
00598 IGNORE_UNTIL_ELSE,
00599 IGNORE_UNTIL_ENDIF,
00600 };
00601
00609 void ScanFile(const char *filename, const char *ext, bool header, bool verbose)
00610 {
00611 static StringSet defines;
00612 static std::stack<Ignore> ignore;
00613
00614 if (!header) {
00615 for (StringSet::iterator it = _defines.begin(); it != _defines.end(); it++) {
00616 defines.insert(strdup(*it));
00617 }
00618 }
00619
00620 File file(filename);
00621 Lexer lexer(&file);
00622
00623
00624 lexer.Lex();
00625
00626 while (lexer.GetToken() != TOKEN_END) {
00627 switch (lexer.GetToken()) {
00628
00629 case TOKEN_END: break;
00630
00631
00632 case TOKEN_SHARP:
00633 lexer.Lex();
00634 switch (lexer.GetToken()) {
00635 case TOKEN_INCLUDE:
00636 if (verbose) fprintf(stderr, "%s #include ", filename);
00637 lexer.Lex();
00638 switch (lexer.GetToken()) {
00639 case TOKEN_LOCAL:
00640 case TOKEN_GLOBAL: {
00641 if (verbose) fprintf(stderr, "%s", lexer.GetString());
00642 if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00643 if (verbose) fprintf(stderr, " (ignored)");
00644 break;
00645 }
00646 const char *h = GeneratePath(file.GetDirname(), lexer.GetString(), lexer.GetToken() == TOKEN_LOCAL);
00647 if (h != NULL) {
00648 StringMap::iterator it = _headers.find(h);
00649 if (it == _headers.end()) {
00650 it = (_headers.insert(StringMapItem(strdup(h), new StringSet()))).first;
00651 if (verbose) fprintf(stderr, "\n");
00652 ScanFile(h, ext, true, verbose);
00653 }
00654 StringMap::iterator curfile;
00655 if (header) {
00656 curfile = _headers.find(filename);
00657 } else {
00658
00659 char path[PATH_MAX];
00660 strcpy(path, filename);
00661 *(strrchr(path, '.')) = '\0';
00662 strcat(path, ext != NULL ? ext : ".o");
00663 curfile = _files.find(path);
00664 if (curfile == _files.end()) {
00665 curfile = (_files.insert(StringMapItem(strdup(path), new StringSet()))).first;
00666 }
00667 }
00668 if (it != _headers.end()) {
00669 for (StringSet::iterator header = it->second->begin(); header != it->second->end(); header++) {
00670 if (curfile->second->find(*header) == curfile->second->end()) curfile->second->insert(strdup(*header));
00671 }
00672 }
00673 if (curfile->second->find(h) == curfile->second->end()) curfile->second->insert(strdup(h));
00674 free((void*)h);
00675 }
00676 }
00677
00678 default: break;
00679 }
00680 break;
00681
00682 case TOKEN_DEFINE:
00683 if (verbose) fprintf(stderr, "%s #define ", filename);
00684 lexer.Lex();
00685 if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00686 if (verbose) fprintf(stderr, "%s", lexer.GetString());
00687 if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00688 if (verbose) fprintf(stderr, " (ignored)");
00689 break;
00690 }
00691 if (defines.find(lexer.GetString()) == defines.end()) defines.insert(strdup(lexer.GetString()));
00692 lexer.Lex();
00693 }
00694 break;
00695
00696 case TOKEN_UNDEF:
00697 if (verbose) fprintf(stderr, "%s #undef ", filename);
00698 lexer.Lex();
00699 if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00700 if (verbose) fprintf(stderr, "%s", lexer.GetString());
00701 if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00702 if (verbose) fprintf(stderr, " (ignored)");
00703 break;
00704 }
00705 StringSet::iterator it = defines.find(lexer.GetString());
00706 if (it != defines.end()) {
00707 free((void*)*it);
00708 defines.erase(it);
00709 }
00710 lexer.Lex();
00711 }
00712 break;
00713
00714 case TOKEN_ENDIF:
00715 if (verbose) fprintf(stderr, "%s #endif", filename);
00716 lexer.Lex();
00717 if (!ignore.empty()) ignore.pop();
00718 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00719 break;
00720
00721 case TOKEN_ELSE: {
00722 if (verbose) fprintf(stderr, "%s #else", filename);
00723 lexer.Lex();
00724 Ignore last = ignore.empty() ? NOT_IGNORE : ignore.top();
00725 if (!ignore.empty()) ignore.pop();
00726 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00727 ignore.push(last == IGNORE_UNTIL_ELSE ? NOT_IGNORE : IGNORE_UNTIL_ENDIF);
00728 } else {
00729 ignore.push(IGNORE_UNTIL_ENDIF);
00730 }
00731 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00732 break;
00733 }
00734
00735 case TOKEN_ELIF: {
00736 if (verbose) fprintf(stderr, "%s #elif ", filename);
00737 lexer.Lex();
00738 Ignore last = ignore.empty() ? NOT_IGNORE : ignore.top();
00739 if (!ignore.empty()) ignore.pop();
00740 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00741 bool value = ExpressionOr(&lexer, &defines, verbose);
00742 ignore.push(last == IGNORE_UNTIL_ELSE ? (value ? NOT_IGNORE : IGNORE_UNTIL_ELSE) : IGNORE_UNTIL_ENDIF);
00743 } else {
00744 ignore.push(IGNORE_UNTIL_ENDIF);
00745 }
00746 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00747 break;
00748 }
00749
00750 case TOKEN_IF: {
00751 if (verbose) fprintf(stderr, "%s #if ", filename);
00752 lexer.Lex();
00753 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00754 bool value = ExpressionOr(&lexer, &defines, verbose);
00755 ignore.push(value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00756 } else {
00757 ignore.push(IGNORE_UNTIL_ENDIF);
00758 }
00759 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00760 break;
00761 }
00762
00763 case TOKEN_IFDEF:
00764 if (verbose) fprintf(stderr, "%s #ifdef ", filename);
00765 lexer.Lex();
00766 if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00767 bool value = defines.find(lexer.GetString()) != defines.end();
00768 if (verbose) fprintf(stderr, "%s[%d]", lexer.GetString(), value);
00769 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00770 ignore.push(value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00771 } else {
00772 ignore.push(IGNORE_UNTIL_ENDIF);
00773 }
00774 }
00775 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00776 break;
00777
00778 case TOKEN_IFNDEF:
00779 if (verbose) fprintf(stderr, "%s #ifndef ", filename);
00780 lexer.Lex();
00781 if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00782 bool value = defines.find(lexer.GetString()) != defines.end();
00783 if (verbose) fprintf(stderr, "%s[%d]", lexer.GetString(), value);
00784 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00785 ignore.push(!value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00786 } else {
00787 ignore.push(IGNORE_UNTIL_ENDIF);
00788 }
00789 }
00790 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00791 break;
00792
00793 default:
00794 if (verbose) fprintf(stderr, "%s #<unknown>", filename);
00795 lexer.Lex();
00796 break;
00797 }
00798 if (verbose) fprintf(stderr, "\n");
00799
00800 default:
00801
00802 while (lexer.GetToken() != TOKEN_EOL && lexer.GetToken() != TOKEN_END) lexer.Lex();
00803 lexer.Lex();
00804 break;
00805 }
00806 }
00807
00808 if (!header) {
00809 for (StringSet::iterator it = defines.begin(); it != defines.end(); it++) {
00810 free((void*)*it);
00811 }
00812 defines.clear();
00813 while (!ignore.empty()) ignore.pop();
00814 }
00815 }
00816
00823 int main(int argc, char *argv[])
00824 {
00825 bool ignorenext = true;
00826 char *filename = NULL;
00827 char *ext = NULL;
00828 char *delimiter = NULL;
00829 bool append = false;
00830 bool verbose = false;
00831
00832 for (int i = 0; i < argc; i++) {
00833 if (ignorenext) {
00834 ignorenext = false;
00835 continue;
00836 }
00837 if (argv[i][0] == '-') {
00838
00839 if (strncmp(argv[i], "-a", 2) == 0) append = true;
00840
00841 if (strncmp(argv[i], "-I", 2) == 0) {
00842 if (argv[i][2] == '\0') {
00843 i++;
00844 _include_dirs.insert(strdup(argv[i]));
00845 } else {
00846 _include_dirs.insert(strdup(&argv[i][2]));
00847 }
00848 continue;
00849 }
00850
00851 if (strncmp(argv[i], "-D", 2) == 0) {
00852 char *p = strchr(argv[i], '=');
00853 if (p != NULL) *p = '\0';
00854 _defines.insert(strdup(&argv[i][2]));
00855 continue;
00856 }
00857
00858 if (strncmp(argv[i], "-f", 2) == 0) {
00859 if (filename != NULL) continue;
00860 filename = strdup(&argv[i][2]);
00861 continue;
00862 }
00863
00864 if (strncmp(argv[i], "-o", 2) == 0) {
00865 if (ext != NULL) continue;
00866 ext = strdup(&argv[i][2]);
00867 continue;
00868 }
00869
00870 if (strncmp(argv[i], "-s", 2) == 0) {
00871 if (delimiter != NULL) continue;
00872 delimiter = strdup(&argv[i][2]);
00873 continue;
00874 }
00875
00876 if (strncmp(argv[i], "-v", 2) == 0) verbose = true;
00877 continue;
00878 }
00879 ScanFile(argv[i], ext, false, verbose);
00880 }
00881
00882
00883 if (filename == NULL) filename = strdup("Makefile");
00884
00885
00886 if (delimiter == NULL) delimiter = strdup("# DO NOT DELETE");
00887
00888 char backup[PATH_MAX];
00889 strcpy(backup, filename);
00890 strcat(backup, ".bak");
00891
00892 char *content = NULL;
00893 long size = 0;
00894
00895
00896
00897 FILE *src = fopen(filename, "rb");
00898 if (src != NULL) {
00899 fseek(src, 0, SEEK_END);
00900 size = ftell(src);
00901 rewind(src);
00902 content = (char*)malloc(size * sizeof(*content));
00903 fread(content, 1, size, src);
00904 fclose(src);
00905 }
00906
00907 FILE *dst = fopen(filename, "w");
00908 bool found_delimiter = false;
00909
00910 if (size != 0) {
00911 src = fopen(backup, "wb");
00912 fwrite(content, 1, size, src);
00913 fclose(src);
00914
00915
00916 src = fopen(backup, "rb");
00917 while (fgets(content, size, src) != NULL) {
00918 fputs(content, dst);
00919 if (!strncmp(content, delimiter, strlen(delimiter))) found_delimiter = true;
00920 if (!append && found_delimiter) break;
00921 }
00922 fclose(src);
00923 }
00924 if (!found_delimiter) fprintf(dst, "\n%s\n", delimiter);
00925
00926 for (StringMap::iterator it = _files.begin(); it != _files.end(); it++) {
00927 for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
00928 fprintf(dst, "%s: %s\n", it->first, *h);
00929 }
00930 }
00931
00932
00933 fclose(dst);
00934
00935 free(delimiter);
00936 free(filename);
00937 free(ext);
00938 free(content);
00939
00940 for (StringMap::iterator it = _files.begin(); it != _files.end(); it++) {
00941 for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
00942 free((void*)*h);
00943 }
00944 it->second->clear();
00945 delete it->second;
00946 free((void*)it->first);
00947 }
00948 _files.clear();
00949
00950 for (StringMap::iterator it = _headers.begin(); it != _headers.end(); it++) {
00951 for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
00952 free((void*)*h);
00953 }
00954 it->second->clear();
00955 delete it->second;
00956 free((void*)it->first);
00957 }
00958 _headers.clear();
00959
00960 for (StringSet::iterator it = _defines.begin(); it != _defines.end(); it++) {
00961 free((void*)*it);
00962 }
00963 _defines.clear();
00964
00965 for (StringSet::iterator it = _include_dirs.begin(); it != _include_dirs.end(); it++) {
00966 free((void*)*it);
00967 }
00968 _include_dirs.clear();
00969
00970 return 0;
00971 }