day 6, task 2 solved
This commit is contained in:
parent
8cb3eb85f5
commit
d269b05042
1 changed files with 61 additions and 13 deletions
|
@ -8,14 +8,21 @@
|
|||
%code requires {
|
||||
#include <stdbool.h>
|
||||
|
||||
#define BUFSIZE 14
|
||||
|
||||
typedef void* yyscan_t;
|
||||
|
||||
struct parser_state {
|
||||
// for storing the most recently read 4 chars
|
||||
char buffer[4];
|
||||
// for storing the most recently read 14 chars
|
||||
char buffer[BUFSIZE];
|
||||
// location tracker
|
||||
unsigned long loc;
|
||||
bool found_start;
|
||||
|
||||
bool found_start_of_packet;
|
||||
unsigned long packet_start;
|
||||
|
||||
bool found_start_of_msg;
|
||||
unsigned long msg_start;
|
||||
|
||||
bool check_matrix[26];
|
||||
};
|
||||
|
@ -76,32 +83,69 @@ msg
|
|||
|
||||
bit
|
||||
: CHAR {
|
||||
if (!state->found_start) {
|
||||
if (!state->found_start_of_packet || !state->found_start_of_msg) {
|
||||
state->loc++;
|
||||
|
||||
// left shift existing data
|
||||
for (int i = 0; i < 3; i++) {
|
||||
for (int i = 0; i < (BUFSIZE-1); i++) {
|
||||
state->buffer[i] = state->buffer[i+1];
|
||||
}
|
||||
|
||||
state->buffer[3] = $1;
|
||||
state->buffer[BUFSIZE-1] = $1;
|
||||
|
||||
// check whether condition is met
|
||||
// offset array indices by the base -- 'a'
|
||||
char offset = 'a';
|
||||
|
||||
state->found_start = true;
|
||||
for (int i = 0; i < 4; i++) {
|
||||
// Search for start of packet
|
||||
bool search_sop = !state->found_start_of_packet; // sop - start of packet
|
||||
bool search_som = !state->found_start_of_msg; // som - start of message
|
||||
|
||||
if (search_sop) {
|
||||
state->found_start_of_packet = true;
|
||||
}
|
||||
if (search_som) {
|
||||
state->found_start_of_msg = true;
|
||||
}
|
||||
|
||||
// the start of packet only concerns itself with the last 4 characters read; the start of message with the last 14
|
||||
// hence, we check the last 4 first, then the rest
|
||||
for (int i = BUFSIZE - 4; i < BUFSIZE; i++) {
|
||||
if (!state->check_matrix[state->buffer[i] - offset]) {
|
||||
state->check_matrix[state->buffer[i] - offset] = true;
|
||||
} else {
|
||||
state->found_start = false;
|
||||
if (search_sop) {
|
||||
state->found_start_of_packet = false;
|
||||
}
|
||||
if (search_som) {
|
||||
state->found_start_of_msg = false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// now check the remaining elements
|
||||
for (int i = 0; i < BUFSIZE - 4; i++) {
|
||||
if (!state->check_matrix[state->buffer[i] - offset]) {
|
||||
state->check_matrix[state->buffer[i] - offset] = true;
|
||||
} else {
|
||||
if (search_som) {
|
||||
state->found_start_of_msg = false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (search_sop && state->found_start_of_packet) {
|
||||
state->packet_start = state->loc;
|
||||
}
|
||||
if (search_som && state->found_start_of_msg) {
|
||||
state->msg_start = state->loc;
|
||||
}
|
||||
|
||||
// housekeeping; reset the fields to false
|
||||
for (int i = 0; i < 4; i++) {
|
||||
for (int i = 0; i < BUFSIZE; i++) {
|
||||
state->check_matrix[state->buffer[i] - offset] = false;
|
||||
}
|
||||
}
|
||||
|
@ -113,11 +157,14 @@ bit
|
|||
|
||||
int main(void) {
|
||||
struct parser_state parser_state;
|
||||
for (int i = 0; i < 4; i++) {
|
||||
for (int i = 0; i < BUFSIZE; i++) {
|
||||
parser_state.buffer[i] = 'a';
|
||||
}
|
||||
parser_state.loc = 0;
|
||||
parser_state.found_start = false;
|
||||
parser_state.packet_start = 0;
|
||||
parser_state.msg_start = 0;
|
||||
parser_state.found_start_of_packet = false;
|
||||
parser_state.found_start_of_msg = false;
|
||||
|
||||
yyscan_t scanner;
|
||||
if (yylex_init(&scanner)) {
|
||||
|
@ -133,7 +180,8 @@ int main(void) {
|
|||
|
||||
yylex_destroy(scanner);
|
||||
|
||||
printf("First start-of-packet marker detected at: %lu\n", parser_state.loc);
|
||||
printf("First start-of-packet marker detected at: %lu\n", parser_state.packet_start);
|
||||
printf("First start-of-message marker detected at: %lu\n", parser_state.msg_start);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue