diff --git a/tokenizer.c b/tokenizer.c index b2adae7..322ff35 100644 --- a/tokenizer.c +++ b/tokenizer.c @@ -1,28 +1,38 @@ +#ifndef TOKENIZER_H +#define TOKENIZER_H + +void tokenize(char *input); + +#endif // TOKENIZER_H + #include #include #include +#include "tokenizer.h" #define MAX_TOKEN_SIZE 100 void tokenize(char *input) { - char *token = strtok(input, " \t\n"); // Delimiters: space, tab, newline + char *token = strtok(input, " \t\n"); while (token != NULL) { printf("Token: %s\n", token); token = strtok(NULL, " \t\n"); } } +#include +#include +#include +#include "tokenizer.h" + +#define MAX_TOKEN_SIZE 100 int main() { char input[MAX_TOKEN_SIZE]; - // Get input string printf("Enter a string to tokenize: "); fgets(input, MAX_TOKEN_SIZE, stdin); - - // Remove newline character input[strcspn(input, "\n")] = '\0'; - // Tokenize the input string tokenize(input); return 0;