Lines Matching refs:input

505 	const char *input;  member
535 t->input = t->s.start;
556 assert(t->input[t->offset] == '\r' || t->input[t->offset] == '\n'); in config_skip_newline()
557 if (t->input[t->offset] == '\r' && t->input[t->offset + 1] == '\n') { in config_skip_newline()
567 assert(t->input[t->offset] == '#'); in config_skip_comment()
568 for (i = 1; t->input[t->offset + i] && in config_skip_comment()
569 (t->input[t->offset + i] != '\n' && t->input[t->offset + i] != '\r'); in config_skip_comment()
579 for (tid = 0; tid == 0 && t->offset < t->size && t->input[t->offset] ; ) { in config_tokenizer()
580 char c = t->input[t->offset]; in config_tokenizer()
586 if (t->input[t->offset + 1] == '>') { in config_tokenizer()
600 if (t->input[t->offset + 1] == '=') { in config_tokenizer()
606 } else if (t->input[t->offset + 1] == '~') { in config_tokenizer()
624 buffer_copy_string_len(token, t->input + t->offset, 1); in config_tokenizer()
639 if (t->input[t->offset + 1] == '=') { in config_tokenizer()
645 } else if (t->input[t->offset + 1] == '~') { in config_tokenizer()
679 switch (t->input[t->offset]) { in config_tokenizer()
722 start = t->input + t->offset + 1; in config_tokenizer()
725 for (i = 1; t->input[t->offset + i]; i++) { in config_tokenizer()
726 if (t->input[t->offset + i] == '\\' && in config_tokenizer()
727 t->input[t->offset + i + 1] == '"') { in config_tokenizer()
729 buffer_append_string_len(token, start, t->input + t->offset + i - start); in config_tokenizer()
731 start = t->input + t->offset + i + 1; in config_tokenizer()
739 if (t->input[t->offset + i] == '"') { in config_tokenizer()
742 buffer_append_string_len(token, start, t->input + t->offset + i - start); in config_tokenizer()
748 if (t->input[t->offset + i] == '\0') { in config_tokenizer()
791 if (t->input[t->offset + 1] == '=') { in config_tokenizer()
843 for (i = 0; t->input[t->offset + i] && in config_tokenizer()
844 (isalpha((unsigned char)t->input[t->offset + i]) in config_tokenizer()
847 if (i && t->input[t->offset + i]) { in config_tokenizer()
849 buffer_copy_string_len(token, t->input + t->offset, i); in config_tokenizer()
863 for (i = 0; t->input[t->offset + i] && isdigit((unsigned char)t->input[t->offset + i]); i++); in config_tokenizer()
869 buffer_copy_string_len(token, t->input + t->offset, i); in config_tokenizer()
876 for (i = 0; t->input[t->offset + i] && in config_tokenizer()
877 (isalnum((unsigned char)t->input[t->offset + i]) || in config_tokenizer()
878 t->input[t->offset + i] == '.' || in config_tokenizer()
879 t->input[t->offset + i] == '_' || /* for env.* */ in config_tokenizer()
880 t->input[t->offset + i] == '-' in config_tokenizer()
883 if (i && t->input[t->offset + i]) { in config_tokenizer()
884 buffer_copy_string_len(token, t->input + t->offset, i); in config_tokenizer()
972 static int tokenizer_init(tokenizer_t *t, const buffer *source, const char *input, size_t size) { in tokenizer_init() argument
975 t->input = input; in tokenizer_init()