+long token_to_packed(const char token[6])
+{
+ const char ascii_to_advent[] = {
+ 63, 63, 63, 63, 63, 63, 63, 63,
+ 63, 63, 63, 63, 63, 63, 63, 63,
+ 63, 63, 63, 63, 63, 63, 63, 63,
+ 63, 63, 63, 63, 63, 63, 63, 63,
+
+ 0, 1, 2, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23,
+ 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39,
+ 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55,
+ 56, 57, 58, 59, 60, 61, 62, 63,
+
+ 63, 63, 63, 63, 63, 63, 63, 63,
+ 63, 63, 63, 63, 63, 63, 63, 63,
+ 63, 63, 63, 63, 63, 63, 63, 63,
+ 63, 63, 63, 63, 63, 63, 63, 63,
+};
+
+ size_t t_len = strlen(token);
+ long packed = 0;
+ for (size_t i = 0; i < t_len; ++i) {
+ char mapped = ascii_to_advent[(int) token[i]];
+ packed |= (mapped << (6 * i));
+ }
+ return (packed);
+}
+
+void tokenize(char* raw, long tokens[4])
+{
+ // set each token to 0
+ for (int i = 0; i < 4; ++i)
+ tokens[i] = 0;
+
+ // grab the first two words
+ char* words[2];
+ words[0] = (char*) xmalloc(strlen(raw) + 1);
+ words[1] = (char*) xmalloc(strlen(raw) + 1);
+ int word_count = sscanf(raw, "%s%s", words[0], words[1]);
+
+ // make space for substrings and zero it out
+ char chunk_data[][6] = {
+ {"\0\0\0\0\0"},
+ {"\0\0\0\0\0"},
+ {"\0\0\0\0\0"},
+ {"\0\0\0\0\0"},
+ };
+
+ // break the words into up to 4 5-char substrings
+ sscanf(words[0], "%5s%5s", chunk_data[0], chunk_data[1]);
+ if (word_count == 2)
+ sscanf(words[1], "%5s%5s", chunk_data[2], chunk_data[3]);
+ free(words[0]);
+ free(words[1]);
+
+ // uppercase all the substrings
+ for (int i = 0; i < 4; ++i)
+ for (unsigned int j = 0; j < strlen(chunk_data[i]); ++j)
+ chunk_data[i][j] = (char) toupper(chunk_data[i][j]);
+
+ // pack the substrings
+ for (int i = 0; i < 4; ++i)
+ tokens[i] = token_to_packed(chunk_data[i]);
+}