00001
00002
00003
00004
00005
00006
00007
00008
00009
00023 #include <stdio.h>
00024 #include <stdlib.h>
00025 #include <string.h>
00026 #include <ctype.h>
00027 #include <limits.h>
00028 #include <unistd.h>
00029 #include <map>
00030 #include <set>
00031 #include <stack>
00032 #include <cassert>
00033
00042 #define lengthof(x) (sizeof(x) / sizeof(x[0]))
00043
00050 #define lastof(x) (&x[lengthof(x) - 1])
00051
00068 char *strecpy(char *dst, const char *src, const char *last)
00069 {
00070 assert(dst <= last);
00071 while (dst != last && *src != '\0') {
00072 *dst++ = *src++;
00073 }
00074 *dst = '\0';
00075
00076 if (dst == last && *src != '\0') {
00077 fprintf(stderr, "String too long for destination buffer\n");
00078 exit(-3);
00079 }
00080 return dst;
00081 }
00082
00099 static char *strecat(char *dst, const char *src, const char *last)
00100 {
00101 assert(dst <= last);
00102 while (*dst != '\0') {
00103 if (dst == last) return dst;
00104 dst++;
00105 }
00106
00107 return strecpy(dst, src, last);
00108 }
00109
00114 static inline void free(const void *ptr)
00115 {
00116 free(const_cast<void *>(ptr));
00117 }
00118
00119 #ifndef PATH_MAX
00120
00121 # define PATH_MAX 260
00122 #endif
00123
00125 struct StringCompare {
00132 bool operator () (const char *a, const char *b) const
00133 {
00134 return strcmp(a, b) < 0;
00135 }
00136 };
00138 typedef std::set<const char*, StringCompare> StringSet;
00140 typedef std::map<const char*, StringSet*, StringCompare> StringMap;
00142 typedef std::pair<const char*, StringSet*> StringMapItem;
00143
00145 static StringSet _include_dirs;
00147 static StringMap _files;
00149 static StringMap _headers;
00151 static StringSet _defines;
00152
00156 class File {
00157 public:
00163 File(const char *filename)
00164 {
00165 this->fp = fopen(filename, "r");
00166 if (this->fp == NULL) {
00167 fprintf(stdout, "Could not open %s for reading\n", filename);
00168 exit(1);
00169 }
00170 this->dirname = strdup(filename);
00171 char *last = strrchr(this->dirname, '/');
00172 if (last != NULL) {
00173 *last = '\0';
00174 } else {
00175 *this->dirname = '\0';
00176 }
00177 }
00178
00180 ~File()
00181 {
00182 fclose(this->fp);
00183 free(this->dirname);
00184 }
00185
00191 char GetChar() const
00192 {
00193 int c = fgetc(this->fp);
00194 return (c == EOF) ? '\0' : c;
00195 }
00196
00201 const char *GetDirname() const
00202 {
00203 return this->dirname;
00204 }
00205
00206 private:
00207 FILE *fp;
00208 char *dirname;
00209 };
00210
00212 enum Token {
00213 TOKEN_UNKNOWN,
00214 TOKEN_END,
00215 TOKEN_EOL,
00216 TOKEN_SHARP,
00217 TOKEN_LOCAL,
00218 TOKEN_GLOBAL,
00219 TOKEN_IDENTIFIER,
00220 TOKEN_DEFINE,
00221 TOKEN_IF,
00222 TOKEN_IFDEF,
00223 TOKEN_IFNDEF,
00224 TOKEN_ELIF,
00225 TOKEN_ELSE,
00226 TOKEN_ENDIF,
00227 TOKEN_UNDEF,
00228 TOKEN_OR,
00229 TOKEN_AND,
00230 TOKEN_DEFINED,
00231 TOKEN_OPEN,
00232 TOKEN_CLOSE,
00233 TOKEN_NOT,
00234 TOKEN_ZERO,
00235 TOKEN_INCLUDE,
00236 };
00237
00239 typedef std::map<const char*, Token, StringCompare> KeywordList;
00240
00244 class Lexer {
00245 public:
00250 Lexer(const File *file) : file(file), current_char('\0'), string(NULL), token(TOKEN_UNKNOWN)
00251 {
00252 this->keywords["define"] = TOKEN_DEFINE;
00253 this->keywords["defined"] = TOKEN_DEFINED;
00254 this->keywords["if"] = TOKEN_IF;
00255 this->keywords["ifdef"] = TOKEN_IFDEF;
00256 this->keywords["ifndef"] = TOKEN_IFNDEF;
00257 this->keywords["include"] = TOKEN_INCLUDE;
00258 this->keywords["elif"] = TOKEN_ELIF;
00259 this->keywords["else"] = TOKEN_ELSE;
00260 this->keywords["endif"] = TOKEN_ENDIF;
00261 this->keywords["undef"] = TOKEN_UNDEF;
00262
00263
00264 this->Next();
00265
00266
00267 this->buf_len = 32;
00268 this->buf = (char*)malloc(sizeof(*this->buf) * this->buf_len);
00269 }
00270
00272 ~Lexer()
00273 {
00274 free(this->buf);
00275 }
00276
00280 void Next()
00281 {
00282 this->current_char = this->file->GetChar();
00283 }
00284
00289 Token GetToken() const
00290 {
00291 return this->token;
00292 }
00293
00298 const char *GetString() const
00299 {
00300 return this->string;
00301 }
00302
00307 void Lex()
00308 {
00309 for (;;) {
00310 free(this->string);
00311 this->string = NULL;
00312 this->token = TOKEN_UNKNOWN;
00313
00314 switch (this->current_char) {
00315
00316 case '\0': this->token = TOKEN_END; return;
00317
00318
00319 case '\t': this->Next(); break;
00320 case '\r': this->Next(); break;
00321 case ' ': this->Next(); break;
00322
00323 case '\\':
00324 this->Next();
00325 if (this->current_char == '\n') this->Next();
00326 break;
00327
00328 case '\n':
00329 this->token = TOKEN_EOL;
00330 this->Next();
00331 return;
00332
00333 case '#':
00334 this->token = TOKEN_SHARP;
00335 this->Next();
00336 return;
00337
00338 case '"':
00339 this->ReadString('"', TOKEN_LOCAL);
00340 this->Next();
00341 return;
00342
00343 case '<':
00344 this->ReadString('>', TOKEN_GLOBAL);
00345 this->Next();
00346 return;
00347
00348 case '&':
00349 this->Next();
00350 if (this->current_char == '&') {
00351 this->Next();
00352 this->token = TOKEN_AND;
00353 return;
00354 }
00355 break;
00356
00357 case '|':
00358 this->Next();
00359 if (this->current_char == '|') {
00360 this->Next();
00361 this->token = TOKEN_OR;
00362 return;
00363 }
00364 break;
00365
00366 case '(':
00367 this->Next();
00368 this->token = TOKEN_OPEN;
00369 return;
00370
00371 case ')':
00372 this->Next();
00373 this->token = TOKEN_CLOSE;
00374 return;
00375
00376 case '!':
00377 this->Next();
00378 if (this->current_char != '=') {
00379 this->token = TOKEN_NOT;
00380 return;
00381 }
00382 break;
00383
00384
00385 case '/':
00386 this->Next();
00387 switch (this->current_char) {
00388 case '*': {
00389 this->Next();
00390 char previous_char = '\0';
00391 while ((this->current_char != '/' || previous_char != '*') && this->current_char != '\0') {
00392 previous_char = this->current_char;
00393 this->Next();
00394 }
00395 this->Next();
00396 break;
00397 }
00398 case '/': while (this->current_char != '\n' && this->current_char != '\0') this->Next(); break;
00399 default: break;
00400 }
00401 break;
00402
00403 default:
00404 if (isalpha(this->current_char) || this->current_char == '_') {
00405
00406 this->ReadIdentifier();
00407 return;
00408 }
00409 if (isdigit(this->current_char)) {
00410 bool zero = this->current_char == '0';
00411 this->Next();
00412 if (this->current_char == 'x' || this->current_char == 'X') Next();
00413 while (isdigit(this->current_char) || this->current_char == '.' || (this->current_char >= 'a' && this->current_char <= 'f') || (this->current_char >= 'A' && this->current_char <= 'F')) {
00414 zero &= this->current_char == '0';
00415 this->Next();
00416 }
00417 if (zero) this->token = TOKEN_ZERO;
00418 return;
00419 }
00420 this->Next();
00421 break;
00422 }
00423 }
00424 }
00425
00426 private:
00432 Token FindKeyword(const char *name) const
00433 {
00434 KeywordList::const_iterator it = this->keywords.find(name);
00435 if (it == this->keywords.end()) return TOKEN_IDENTIFIER;
00436 return (*it).second;
00437 }
00438
00442 void ReadIdentifier()
00443 {
00444 size_t count = 0;
00445
00446
00447 do {
00448 this->buf[count++] = this->current_char;
00449 this->Next();
00450
00451 if (count >= buf_len) {
00452
00453 this->buf_len *= 2;
00454 this->buf = (char *)realloc(this->buf, sizeof(*this->buf) * this->buf_len);
00455 }
00456 } while ((isalpha(this->current_char) || this->current_char == '_' || isdigit(this->current_char)));
00457 this->buf[count] = '\0';
00458
00459 free(this->string);
00460 this->string = strdup(this->buf);
00461 this->token = FindKeyword(this->string);
00462 }
00463
00469 void ReadString(char end, Token token)
00470 {
00471 size_t count = 0;
00472 this->Next();
00473 while (this->current_char != end && this->current_char != ')' && this->current_char != '\n' && this->current_char != '\0') {
00474 this->buf[count++] = this->current_char;
00475 this->Next();
00476
00477 if (count >= this->buf_len) {
00478
00479 this->buf_len *= 2;
00480 this->buf = (char *)realloc(this->buf, sizeof(*this->buf) * this->buf_len);
00481 }
00482 }
00483 this->buf[count] = '\0';
00484 free(this->string);
00485 this->string = strdup(this->buf);
00486 this->token = token;
00487 }
00488
00489 const File *file;
00490 char current_char;
00491 char *string;
00492 Token token;
00493 char *buf;
00494 size_t buf_len;
00495 KeywordList keywords;
00496 };
00497
00508 const char *GeneratePath(const char *dirname, const char *filename, bool local)
00509 {
00510 if (local) {
00511 if (access(filename, R_OK) == 0) return strdup(filename);
00512
00513 char path[PATH_MAX];
00514 strecpy(path, dirname, lastof(path));
00515 const char *p = filename;
00516
00517 while (*p == '.') {
00518 if (*(++p) == '.') {
00519 char *s = strrchr(path, '/');
00520 if (s != NULL) *s = '\0';
00521 p += 2;
00522 }
00523 }
00524 strecat(path, "/", lastof(path));
00525 strecat(path, p, lastof(path));
00526
00527 if (access(path, R_OK) == 0) return strdup(path);
00528 }
00529
00530 for (StringSet::iterator it = _include_dirs.begin(); it != _include_dirs.end(); it++) {
00531 char path[PATH_MAX];
00532 strecpy(path, *it, lastof(path));
00533 const char *p = filename;
00534
00535 while (*p == '.') {
00536 if (*(++p) == '.') {
00537 char *s = strrchr(path, '/');
00538 if (s != NULL) *s = '\0';
00539 p += 2;
00540 }
00541 }
00542 strecat(path, "/", lastof(path));
00543 strecat(path, p, lastof(path));
00544
00545 if (access(path, R_OK) == 0) return strdup(path);
00546 }
00547
00548 return NULL;
00549 }
00550
00558 bool ExpressionDefined(Lexer *lexer, StringSet *defines, bool verbose);
00559
00567 bool ExpressionOr(Lexer *lexer, StringSet *defines, bool verbose);
00568
00577 bool ExpressionNot(Lexer *lexer, StringSet *defines, bool verbose)
00578 {
00579 if (lexer->GetToken() == TOKEN_NOT) {
00580 if (verbose) fprintf(stderr, "!");
00581 lexer->Lex();
00582 bool value = !ExpressionDefined(lexer, defines, verbose);
00583 if (verbose) fprintf(stderr, "[%d]", value);
00584 return value;
00585 }
00586
00587 if (lexer->GetToken() == TOKEN_OPEN) {
00588 if (verbose) fprintf(stderr, "(");
00589 lexer->Lex();
00590 bool value = ExpressionOr(lexer, defines, verbose);
00591 if (verbose) fprintf(stderr, ")[%d]", value);
00592 lexer->Lex();
00593 return value;
00594 }
00595
00596 if (lexer->GetToken() == TOKEN_ZERO) {
00597 if (verbose) fprintf(stderr, "0");
00598 lexer->Lex();
00599 if (verbose) fprintf(stderr, "[0]");
00600 return false;
00601 }
00602
00603 bool first = true;
00604 while (lexer->GetToken() == TOKEN_UNKNOWN || lexer->GetToken() == TOKEN_IDENTIFIER) {
00605 if (verbose && first) fprintf(stderr, "<assumed true>");
00606 first = false;
00607 lexer->Lex();
00608 }
00609
00610 return true;
00611 }
00612
00620 bool ExpressionDefined(Lexer *lexer, StringSet *defines, bool verbose)
00621 {
00622 bool value = ExpressionNot(lexer, defines, verbose);
00623
00624 if (lexer->GetToken() != TOKEN_DEFINED) return value;
00625 lexer->Lex();
00626 if (verbose) fprintf(stderr, "defined");
00627 bool open = (lexer->GetToken() == TOKEN_OPEN);
00628 if (open) lexer->Lex();
00629 if (verbose) fprintf(stderr, open ? "(" : " ");
00630 if (lexer->GetToken() == TOKEN_IDENTIFIER) {
00631 if (verbose) fprintf(stderr, "%s", lexer->GetString());
00632 value = defines->find(lexer->GetString()) != defines->end();
00633 }
00634 if (open) {
00635 if (verbose) fprintf(stderr, ")");
00636 lexer->Lex();
00637 }
00638 lexer->Lex();
00639 if (verbose) fprintf(stderr, "[%d]", value);
00640 return value;
00641 }
00642
00650 bool ExpressionAnd(Lexer *lexer, StringSet *defines, bool verbose)
00651 {
00652 bool value = ExpressionDefined(lexer, defines, verbose);
00653
00654 for (;;) {
00655 if (lexer->GetToken() != TOKEN_AND) return value;
00656 if (verbose) fprintf(stderr, " && ");
00657 lexer->Lex();
00658 value = value && ExpressionDefined(lexer, defines, verbose);
00659 }
00660 }
00661
00669 bool ExpressionOr(Lexer *lexer, StringSet *defines, bool verbose)
00670 {
00671 bool value = ExpressionAnd(lexer, defines, verbose);
00672
00673 for (;;) {
00674 if (lexer->GetToken() != TOKEN_OR) return value;
00675 if (verbose) fprintf(stderr, " || ");
00676 lexer->Lex();
00677 value = value || ExpressionAnd(lexer, defines, verbose);
00678 }
00679 }
00680
00682 enum Ignore {
00683 NOT_IGNORE,
00684 IGNORE_UNTIL_ELSE,
00685 IGNORE_UNTIL_ENDIF,
00686 };
00687
00695 void ScanFile(const char *filename, const char *ext, bool header, bool verbose)
00696 {
00697 static StringSet defines;
00698 static std::stack<Ignore> ignore;
00699
00700 if (!header) {
00701 for (StringSet::iterator it = _defines.begin(); it != _defines.end(); it++) {
00702 defines.insert(strdup(*it));
00703 }
00704 }
00705
00706 File file(filename);
00707 Lexer lexer(&file);
00708
00709
00710 lexer.Lex();
00711
00712 while (lexer.GetToken() != TOKEN_END) {
00713 switch (lexer.GetToken()) {
00714
00715 case TOKEN_END: break;
00716
00717
00718 case TOKEN_SHARP:
00719 lexer.Lex();
00720 switch (lexer.GetToken()) {
00721 case TOKEN_INCLUDE:
00722 if (verbose) fprintf(stderr, "%s #include ", filename);
00723 lexer.Lex();
00724 switch (lexer.GetToken()) {
00725 case TOKEN_LOCAL:
00726 case TOKEN_GLOBAL: {
00727 if (verbose) fprintf(stderr, "%s", lexer.GetString());
00728 if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00729 if (verbose) fprintf(stderr, " (ignored)");
00730 break;
00731 }
00732 const char *h = GeneratePath(file.GetDirname(), lexer.GetString(), lexer.GetToken() == TOKEN_LOCAL);
00733 if (h != NULL) {
00734 StringMap::iterator it = _headers.find(h);
00735 if (it == _headers.end()) {
00736 it = (_headers.insert(StringMapItem(strdup(h), new StringSet()))).first;
00737 if (verbose) fprintf(stderr, "\n");
00738 ScanFile(h, ext, true, verbose);
00739 }
00740 StringMap::iterator curfile;
00741 if (header) {
00742 curfile = _headers.find(filename);
00743 } else {
00744
00745 char path[PATH_MAX];
00746 strecpy(path, filename, lastof(path));
00747 *(strrchr(path, '.')) = '\0';
00748 strecat(path, ext != NULL ? ext : ".o", lastof(path));
00749 curfile = _files.find(path);
00750 if (curfile == _files.end()) {
00751 curfile = (_files.insert(StringMapItem(strdup(path), new StringSet()))).first;
00752 }
00753 }
00754 if (it != _headers.end()) {
00755 for (StringSet::iterator header = it->second->begin(); header != it->second->end(); header++) {
00756 if (curfile->second->find(*header) == curfile->second->end()) curfile->second->insert(strdup(*header));
00757 }
00758 }
00759 if (curfile->second->find(h) == curfile->second->end()) curfile->second->insert(strdup(h));
00760 free(h);
00761 }
00762 }
00763
00764 default: break;
00765 }
00766 break;
00767
00768 case TOKEN_DEFINE:
00769 if (verbose) fprintf(stderr, "%s #define ", filename);
00770 lexer.Lex();
00771 if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00772 if (verbose) fprintf(stderr, "%s", lexer.GetString());
00773 if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00774 if (verbose) fprintf(stderr, " (ignored)");
00775 break;
00776 }
00777 if (defines.find(lexer.GetString()) == defines.end()) defines.insert(strdup(lexer.GetString()));
00778 lexer.Lex();
00779 }
00780 break;
00781
00782 case TOKEN_UNDEF:
00783 if (verbose) fprintf(stderr, "%s #undef ", filename);
00784 lexer.Lex();
00785 if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00786 if (verbose) fprintf(stderr, "%s", lexer.GetString());
00787 if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00788 if (verbose) fprintf(stderr, " (ignored)");
00789 break;
00790 }
00791 StringSet::iterator it = defines.find(lexer.GetString());
00792 if (it != defines.end()) {
00793 free(*it);
00794 defines.erase(it);
00795 }
00796 lexer.Lex();
00797 }
00798 break;
00799
00800 case TOKEN_ENDIF:
00801 if (verbose) fprintf(stderr, "%s #endif", filename);
00802 lexer.Lex();
00803 if (!ignore.empty()) ignore.pop();
00804 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00805 break;
00806
00807 case TOKEN_ELSE: {
00808 if (verbose) fprintf(stderr, "%s #else", filename);
00809 lexer.Lex();
00810 Ignore last = ignore.empty() ? NOT_IGNORE : ignore.top();
00811 if (!ignore.empty()) ignore.pop();
00812 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00813 ignore.push(last == IGNORE_UNTIL_ELSE ? NOT_IGNORE : IGNORE_UNTIL_ENDIF);
00814 } else {
00815 ignore.push(IGNORE_UNTIL_ENDIF);
00816 }
00817 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00818 break;
00819 }
00820
00821 case TOKEN_ELIF: {
00822 if (verbose) fprintf(stderr, "%s #elif ", filename);
00823 lexer.Lex();
00824 Ignore last = ignore.empty() ? NOT_IGNORE : ignore.top();
00825 if (!ignore.empty()) ignore.pop();
00826 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00827 bool value = ExpressionOr(&lexer, &defines, verbose);
00828 ignore.push(last == IGNORE_UNTIL_ELSE ? (value ? NOT_IGNORE : IGNORE_UNTIL_ELSE) : IGNORE_UNTIL_ENDIF);
00829 } else {
00830 ignore.push(IGNORE_UNTIL_ENDIF);
00831 }
00832 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00833 break;
00834 }
00835
00836 case TOKEN_IF: {
00837 if (verbose) fprintf(stderr, "%s #if ", filename);
00838 lexer.Lex();
00839 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00840 bool value = ExpressionOr(&lexer, &defines, verbose);
00841 ignore.push(value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00842 } else {
00843 ignore.push(IGNORE_UNTIL_ENDIF);
00844 }
00845 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00846 break;
00847 }
00848
00849 case TOKEN_IFDEF:
00850 if (verbose) fprintf(stderr, "%s #ifdef ", filename);
00851 lexer.Lex();
00852 if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00853 bool value = defines.find(lexer.GetString()) != defines.end();
00854 if (verbose) fprintf(stderr, "%s[%d]", lexer.GetString(), value);
00855 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00856 ignore.push(value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00857 } else {
00858 ignore.push(IGNORE_UNTIL_ENDIF);
00859 }
00860 }
00861 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00862 break;
00863
00864 case TOKEN_IFNDEF:
00865 if (verbose) fprintf(stderr, "%s #ifndef ", filename);
00866 lexer.Lex();
00867 if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00868 bool value = defines.find(lexer.GetString()) != defines.end();
00869 if (verbose) fprintf(stderr, "%s[%d]", lexer.GetString(), value);
00870 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00871 ignore.push(!value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00872 } else {
00873 ignore.push(IGNORE_UNTIL_ENDIF);
00874 }
00875 }
00876 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00877 break;
00878
00879 default:
00880 if (verbose) fprintf(stderr, "%s #<unknown>", filename);
00881 lexer.Lex();
00882 break;
00883 }
00884 if (verbose) fprintf(stderr, "\n");
00885
00886 default:
00887
00888 while (lexer.GetToken() != TOKEN_EOL && lexer.GetToken() != TOKEN_END) lexer.Lex();
00889 lexer.Lex();
00890 break;
00891 }
00892 }
00893
00894 if (!header) {
00895 for (StringSet::iterator it = defines.begin(); it != defines.end(); it++) {
00896 free(*it);
00897 }
00898 defines.clear();
00899 while (!ignore.empty()) ignore.pop();
00900 }
00901 }
00902
00909 int main(int argc, char *argv[])
00910 {
00911 bool ignorenext = true;
00912 char *filename = NULL;
00913 char *ext = NULL;
00914 char *delimiter = NULL;
00915 bool append = false;
00916 bool verbose = false;
00917
00918 for (int i = 0; i < argc; i++) {
00919 if (ignorenext) {
00920 ignorenext = false;
00921 continue;
00922 }
00923 if (argv[i][0] == '-') {
00924
00925 if (strncmp(argv[i], "-a", 2) == 0) append = true;
00926
00927 if (strncmp(argv[i], "-I", 2) == 0) {
00928 if (argv[i][2] == '\0') {
00929 i++;
00930 _include_dirs.insert(strdup(argv[i]));
00931 } else {
00932 _include_dirs.insert(strdup(&argv[i][2]));
00933 }
00934 continue;
00935 }
00936
00937 if (strncmp(argv[i], "-D", 2) == 0) {
00938 char *p = strchr(argv[i], '=');
00939 if (p != NULL) *p = '\0';
00940 _defines.insert(strdup(&argv[i][2]));
00941 continue;
00942 }
00943
00944 if (strncmp(argv[i], "-f", 2) == 0) {
00945 if (filename != NULL) continue;
00946 filename = strdup(&argv[i][2]);
00947 continue;
00948 }
00949
00950 if (strncmp(argv[i], "-o", 2) == 0) {
00951 if (ext != NULL) continue;
00952 ext = strdup(&argv[i][2]);
00953 continue;
00954 }
00955
00956 if (strncmp(argv[i], "-s", 2) == 0) {
00957 if (delimiter != NULL) continue;
00958 delimiter = strdup(&argv[i][2]);
00959 continue;
00960 }
00961
00962 if (strncmp(argv[i], "-v", 2) == 0) verbose = true;
00963 continue;
00964 }
00965 ScanFile(argv[i], ext, false, verbose);
00966 }
00967
00968
00969 if (filename == NULL) filename = strdup("Makefile");
00970
00971
00972 if (delimiter == NULL) delimiter = strdup("# DO NOT DELETE");
00973
00974 char backup[PATH_MAX];
00975 strecpy(backup, filename, lastof(backup));
00976 strecat(backup, ".bak", lastof(backup));
00977
00978 char *content = NULL;
00979 long size = 0;
00980
00981
00982
00983 FILE *src = fopen(filename, "rb");
00984 if (src != NULL) {
00985 fseek(src, 0, SEEK_END);
00986 if ((size = ftell(src)) < 0) {
00987 fprintf(stderr, "Could not read %s\n", filename);
00988 exit(-2);
00989 }
00990 rewind(src);
00991 content = (char*)malloc(size * sizeof(*content));
00992 if (fread(content, 1, size, src) != (size_t)size) {
00993 fprintf(stderr, "Could not read %s\n", filename);
00994 exit(-2);
00995 }
00996 fclose(src);
00997 }
00998
00999 FILE *dst = fopen(filename, "w");
01000 bool found_delimiter = false;
01001
01002 if (size != 0) {
01003 src = fopen(backup, "wb");
01004 if (fwrite(content, 1, size, src) != (size_t)size) {
01005 fprintf(stderr, "Could not write %s\n", filename);
01006 exit(-2);
01007 }
01008 fclose(src);
01009
01010
01011 src = fopen(backup, "rb");
01012 while (fgets(content, size, src) != NULL) {
01013 fputs(content, dst);
01014 if (!strncmp(content, delimiter, strlen(delimiter))) found_delimiter = true;
01015 if (!append && found_delimiter) break;
01016 }
01017 fclose(src);
01018 }
01019 if (!found_delimiter) fprintf(dst, "\n%s\n", delimiter);
01020
01021 for (StringMap::iterator it = _files.begin(); it != _files.end(); it++) {
01022 for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
01023 fprintf(dst, "%s: %s\n", it->first, *h);
01024 }
01025 }
01026
01027
01028 fclose(dst);
01029
01030 free(delimiter);
01031 free(filename);
01032 free(ext);
01033 free(content);
01034
01035 for (StringMap::iterator it = _files.begin(); it != _files.end(); it++) {
01036 for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
01037 free(*h);
01038 }
01039 it->second->clear();
01040 delete it->second;
01041 free(it->first);
01042 }
01043 _files.clear();
01044
01045 for (StringMap::iterator it = _headers.begin(); it != _headers.end(); it++) {
01046 for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
01047 free(*h);
01048 }
01049 it->second->clear();
01050 delete it->second;
01051 free(it->first);
01052 }
01053 _headers.clear();
01054
01055 for (StringSet::iterator it = _defines.begin(); it != _defines.end(); it++) {
01056 free(*it);
01057 }
01058 _defines.clear();
01059
01060 for (StringSet::iterator it = _include_dirs.begin(); it != _include_dirs.end(); it++) {
01061 free(*it);
01062 }
01063 _include_dirs.clear();
01064
01065 return 0;
01066 }